From 40e09a6c17bad1bc35c57772cdf6bbebd3387a50 Mon Sep 17 00:00:00 2001 From: Yurii Lisovskyi Date: Sat, 26 Aug 2023 13:04:38 +0300 Subject: [PATCH 01/19] Add LAG hash seed test-case (#184) Signed-off-by: Yurii Lisovskyi --- common/sai.py | 2 +- tests/test_l2_basic.py | 186 +++++++++++++++++++++++++++++++++++++++++ 2 files changed, 187 insertions(+), 1 deletion(-) diff --git a/common/sai.py b/common/sai.py index bfb5b479..3b7fd28b 100644 --- a/common/sai.py +++ b/common/sai.py @@ -232,7 +232,7 @@ def set_loglevel(self, sai_api, loglevel): return self.sai_client.set_loglevel(sai_api, loglevel) # CRUD - def create(self, obj, attrs, do_assert=True): + def create(self, obj, attrs=[], do_assert=True): return self.sai_client.create(obj, attrs, do_assert) def remove(self, obj, do_assert=True): diff --git a/tests/test_l2_basic.py b/tests/test_l2_basic.py index 4cf963a8..33e6f82e 100644 --- a/tests/test_l2_basic.py +++ b/tests/test_l2_basic.py @@ -383,6 +383,192 @@ def test_l2_lag(npu, dataplane): npu.set(oid, ["SAI_PORT_ATTR_PORT_VLAN_ID", npu.default_vlan_id]) +def test_l2_lag_hash_seed(npu, dataplane): + """ + Description: + Check that the packets are equally divided among LAG members. + + Test scenario: + 1. Create a LAG group with 4 ports 1 through 4. + 2. Setup static FDB entries for the LAG and send packet to this destination MAC address. + 3. Send 100 packets with varying 5-tuple and check order/sequence of the distribution of packets received on ports 1 through 4. + 4. Change the LAG Hash seed value to 10 and compare the order/sequence of the distribution of packets received for the same set of 100 packets on ports 1 through 4. + 5. Verify that it is different after changing the hash seed. + """ + vlan_id = "10" + mac = '00:11:11:11:11:11' + lag_mbr_num = 4 + lag_mbr_oids = [] + lag_hashseed_value = "10" + + # Remove bridge ports + for idx in range(lag_mbr_num): + npu.remove_vlan_member(npu.default_vlan_oid, npu.dot1q_bp_oids[idx]) + npu.remove(npu.dot1q_bp_oids[idx]) + + # Remove Port #4 from the default VLAN + npu.remove_vlan_member(npu.default_vlan_oid, npu.dot1q_bp_oids[lag_mbr_num]) + + # Create LAG + lag_oid = npu.create(SaiObjType.LAG) + + # Create LAG members + for idx in range(lag_mbr_num): + oid = npu.create(SaiObjType.LAG_MEMBER, + [ + "SAI_LAG_MEMBER_ATTR_LAG_ID", lag_oid, + "SAI_LAG_MEMBER_ATTR_PORT_ID", npu.port_oids[idx] + ]) + lag_mbr_oids.append(oid) + + # Create bridge port for LAG + lag_bp_oid = npu.create(SaiObjType.BRIDGE_PORT, + [ + "SAI_BRIDGE_PORT_ATTR_TYPE", "SAI_BRIDGE_PORT_TYPE_PORT", + "SAI_BRIDGE_PORT_ATTR_PORT_ID", lag_oid, + #"SAI_BRIDGE_PORT_ATTR_BRIDGE_ID", npu.dot1q_br_oid, + "SAI_BRIDGE_PORT_ATTR_ADMIN_STATE", "true" + ]) + + # Create VLAN + vlan_oid = npu.create(SaiObjType.VLAN, ["SAI_VLAN_ATTR_VLAN_ID", vlan_id]) + + # Create VLAN members + npu.create_vlan_member(vlan_oid, lag_bp_oid, "SAI_VLAN_TAGGING_MODE_UNTAGGED") + npu.create_vlan_member(vlan_oid, npu.dot1q_bp_oids[lag_mbr_num], "SAI_VLAN_TAGGING_MODE_UNTAGGED") + + # Set PVID for LAG and Port #4 + npu.set(npu.port_oids[lag_mbr_num], ["SAI_PORT_ATTR_PORT_VLAN_ID", vlan_id]) + npu.set(lag_oid, ["SAI_LAG_ATTR_PORT_VLAN_ID", vlan_id]) + + npu.create_fdb(vlan_oid, mac, lag_bp_oid) + + try: + if npu.run_traffic: + count1 = [0, 0, 0, 0] + laglist1 = list() + src_mac_start = '00:22:22:22:22:' + ip_src_start = '192.168.12.' + ip_dst_start = '10.10.10.' + dport = 0x80 + sport = 0x1234 + max_itrs = 101 + + # Sending 100 packets to verify the order/sequence of distribution + for i in range(0, max_itrs): + src_mac = src_mac_start + str(i % 99).zfill(2) + ip_src = ip_src_start + str(i % 99).zfill(3) + ip_dst = ip_dst_start + str(i % 99).zfill(3) + + pkt = simple_tcp_packet(eth_dst=mac, + eth_src=src_mac, + ip_dst=ip_dst, + ip_src=ip_src, + tcp_sport=sport, + tcp_dport=dport, + ip_id=109, + ip_ttl=64) + + exp_pkt = simple_tcp_packet(eth_dst=mac, + eth_src=src_mac, + ip_dst=ip_dst, + ip_src=ip_src, + tcp_sport=sport, + tcp_dport=dport, + ip_id=109, + ip_ttl=64) + + send_packet(dataplane, lag_mbr_num, str(pkt)) + rcv_idx = verify_any_packet_any_port(dataplane, [exp_pkt], [0, 1, 2, 3]) + count1[rcv_idx] += 1 + laglist1.append(rcv_idx) + sport += 1 + dport += 1 + + npu.set(npu.switch_oid, ["SAI_SWITCH_ATTR_LAG_DEFAULT_HASH_SEED", lag_hashseed_value]) + + count2 = [0, 0, 0, 0] + laglist2 = list() + max_itrs = 101 + src_mac_start = '00:22:22:22:22:' + ip_src_start = '192.168.12.' + ip_dst_start = '10.10.10.' + dport = 0x80 + sport = 0x1234 + + # Sending 100 packets to verify the order/sequence of distribution + for i in range(0, max_itrs): + src_mac = src_mac_start + str(i % 99).zfill(2) + ip_src = ip_src_start + str(i % 99).zfill(3) + ip_dst = ip_dst_start + str(i % 99).zfill(3) + + pkt = simple_tcp_packet(eth_dst=mac, + eth_src=src_mac, + ip_dst=ip_dst, + ip_src=ip_src, + tcp_sport=sport, + tcp_dport=dport, + ip_id=109, + ip_ttl=64) + + exp_pkt = simple_tcp_packet(eth_dst=mac, + eth_src=src_mac, + ip_dst=ip_dst, + ip_src=ip_src, + tcp_sport=sport, + tcp_dport=dport, + ip_id=109, + ip_ttl=64) + + send_packet(dataplane, lag_mbr_num, str(pkt)) + rcv_idx = verify_any_packet_any_port(dataplane, [exp_pkt], [0, 1, 2, 3]) + count2[rcv_idx] += 1 + laglist2.append(rcv_idx) + sport += 1 + dport += 1 + + order_check = 0 + for i in range(0, max_itrs): + if(laglist1[i] != laglist2[i]): + order_check += 1 + + assert order_check > 0, "Checking the difference in order/sequence before and after changing hash seed value: " + str(order_check) + + finally: + npu.flush_fdb_entries(npu.switch_oid, ["SAI_FDB_FLUSH_ATTR_BV_ID", vlan_oid, "SAI_FDB_FLUSH_ATTR_ENTRY_TYPE", "SAI_FDB_FLUSH_ENTRY_TYPE_ALL"]) + + npu.set(npu.switch_oid, ["SAI_SWITCH_ATTR_LAG_DEFAULT_HASH_SEED", "0"]) + + npu.remove_vlan_member(vlan_oid, lag_bp_oid) + npu.remove_vlan_member(vlan_oid, npu.dot1q_bp_oids[lag_mbr_num]) + npu.remove(vlan_oid) + + for oid in lag_mbr_oids: + npu.remove(oid) + + npu.remove(lag_bp_oid) + npu.remove(lag_oid) + + # Create bridge port for ports removed from LAG + for idx in range(lag_mbr_num): + bp_oid = npu.create(SaiObjType.BRIDGE_PORT, + [ + "SAI_BRIDGE_PORT_ATTR_TYPE", "SAI_BRIDGE_PORT_TYPE_PORT", + "SAI_BRIDGE_PORT_ATTR_PORT_ID", npu.port_oids[idx], + #"SAI_BRIDGE_PORT_ATTR_BRIDGE_ID", npu.dot1q_br_oid, + "SAI_BRIDGE_PORT_ATTR_ADMIN_STATE", "true" + ]) + npu.dot1q_bp_oids[idx] = bp_oid + + # Add ports to default VLAN + for oid in npu.dot1q_bp_oids[0:lag_mbr_num+1]: + npu.create_vlan_member(npu.default_vlan_oid, oid, "SAI_VLAN_TAGGING_MODE_UNTAGGED") + + # Set PVID + for oid in npu.port_oids[0:lag_mbr_num+1]: + npu.set(oid, ["SAI_PORT_ATTR_PORT_VLAN_ID", npu.default_vlan_id]) + + def test_l2_vlan_bcast_ucast(npu, dataplane): """ Description: From 0818e8434c4ff1a194fc32eb2395b25d37fb83e8 Mon Sep 17 00:00:00 2001 From: Andriy Kokhan Date: Tue, 5 Sep 2023 19:42:04 +0300 Subject: [PATCH 02/19] Fixed u8/u16 and range Thrift conversions (#188) Signed-off-by: Andriy Kokhan --- .github/workflows/sc-standalone-deb10.yml | 5 +++ .github/workflows/sc-standalone-deb11.yml | 5 +++ .../sai_thrift_client/sai_thrift_utils.py | 34 +++++++++++++++++-- 3 files changed, 41 insertions(+), 3 deletions(-) diff --git a/.github/workflows/sc-standalone-deb10.yml b/.github/workflows/sc-standalone-deb10.yml index 440ec73d..e2b8774b 100644 --- a/.github/workflows/sc-standalone-deb10.yml +++ b/.github/workflows/sc-standalone-deb10.yml @@ -95,5 +95,10 @@ jobs: run: ./exec.sh --no-tty -s thrift pytest --testbed=saivs_thrift_standalone -v test_l2_basic_dd.py - name: Run thrift unit tests run: ./exec.sh --no-tty -s thrift pytest --testbed=saivs_thrift_standalone -v ut/test_vrf_ut.py ut/test_bridge_ut.py ut/test_acl_ut.py + - name: Run thrift unit tests + run: ./exec.sh --no-tty -s thrift pytest --testbed=saivs_thrift_standalone -v -k \ + "(test_switch_ut and not sai_map_list_t and not sai_system_port_config_list_t) or (test_port_ut and not sai_map_list_t)" - name: Run thrift sairedis tests run: ./exec.sh --no-tty -s thrift pytest --testbed=saivs_thrift_standalone -v -k "test_sairec" + - name: Run thrift API tests + run: ./exec.sh --no-tty -s thrift pytest --testbed=saivs_thrift_standalone -v -k "api/test" diff --git a/.github/workflows/sc-standalone-deb11.yml b/.github/workflows/sc-standalone-deb11.yml index 173e05ae..5111de48 100644 --- a/.github/workflows/sc-standalone-deb11.yml +++ b/.github/workflows/sc-standalone-deb11.yml @@ -95,5 +95,10 @@ jobs: run: ./exec.sh --no-tty -s thrift pytest --testbed=saivs_thrift_standalone -v test_l2_basic_dd.py - name: Run thrift unit tests run: ./exec.sh --no-tty -s thrift pytest --testbed=saivs_thrift_standalone -v ut/test_vrf_ut.py ut/test_bridge_ut.py ut/test_acl_ut.py + - name: Run thrift unit tests + run: ./exec.sh --no-tty -s thrift pytest --testbed=saivs_thrift_standalone -v -k \ + "(test_switch_ut and not sai_map_list_t and not sai_system_port_config_list_t) or (test_port_ut and not sai_map_list_t)" - name: Run thrift sairedis tests run: ./exec.sh --no-tty -s thrift pytest --testbed=saivs_thrift_standalone -v -k "test_sairec" + - name: Run thrift API tests + run: ./exec.sh --no-tty -s thrift pytest --testbed=saivs_thrift_standalone -v -k "api/test" diff --git a/common/sai_client/sai_thrift_client/sai_thrift_utils.py b/common/sai_client/sai_thrift_client/sai_thrift_utils.py index 5351ea23..e7152fa2 100644 --- a/common/sai_client/sai_thrift_client/sai_thrift_utils.py +++ b/common/sai_client/sai_thrift_client/sai_thrift_utils.py @@ -57,6 +57,24 @@ def convert_attribute_name_to_thrift(attr): """ return re.search('SAI_.*_ATTR_(.*)', attr).group(1).lower() + @staticmethod + def convert_u8_to_thrift(u8_str): + # Thrift does not support unsigned int notation. + # The values gt than 0x7F should be converted into the signed values. + value = int(u8_str, 0) + if value > 0x7F: + return -((~value & 0xFF) + 1) + return value + + @staticmethod + def convert_u16_to_thrift(u16_str): + # Thrift does not support unsigned int notation. + # The values gt than 0x7FFF should be converted into the signed values. + value = int(u16_str, 0) + if value > 0x7FFF: + return -((~value & 0xFFFF) + 1) + return value + @staticmethod def convert_value_to_thrift(value, attr_name=None, value_type=None): """ @@ -73,7 +91,13 @@ def convert_value_to_thrift(value, attr_name=None, value_type=None): actual_value = getattr(sai_headers, value, None) if actual_value != None: return actual_value - return 0 if value == '' else int(value, 0) + if value == '': + return 0 + if value_type == 'u8': + return ThriftConverter.convert_u8_to_thrift(value) + elif value_type == 'u16': + return ThriftConverter.convert_u16_to_thrift(value) + return int(value, 0) if value_type in [ 'booldata' ]: return value.lower() == "true" or value == "0" elif value_type in [ 'mac', 'ipv4', 'ipv6', 'chardata' ]: @@ -163,7 +187,7 @@ def sai_int_range(value_type, range): """ splitted = range.split(',') sai_thrift_class = getattr(ttypes, 'sai_thrift_{}_range_t'.format(value_type[:-5])) - return sai_thrift_class(min=splitted[0], max=splitted[1]) + return sai_thrift_class(min=int(splitted[0]), max=int(splitted[1])) @staticmethod def sai_qos_map_params(value): @@ -373,11 +397,15 @@ def convert_value_from_thrift(value, attr_name, obj_type=None): sai_thrift_ip_address_t('192.168.0.1'...), "ipaddr" => "192.168.0.1" """ value_type = ThriftConverter.get_attribute_type(attr_name) - if value_type in [ 's8', 'u8', 's16', 'u16', + if value_type in [ 's8', 's16', 'u32', 's64', 'u64', 'ptr', 'mac', 'ipv4', 'ipv6', 'chardata' ]: return str(value) + elif value_type == 'u8': + return str(value) if value > 0 else str(value & 0xFF) + elif value_type == 'u16': + return str(value) if value > 0 else str(value & 0xFFFF) elif value_type in [ 's32' ]: actual_value = ThriftConverter.get_str_by_enum(obj_type, attr_name, value) if actual_value != None: From 03aa142a854151342435ab9dccf5719c55671369 Mon Sep 17 00:00:00 2001 From: Andriy Kokhan Date: Wed, 6 Sep 2023 17:46:48 +0300 Subject: [PATCH 03/19] Updated PTF use-case. Enabled PTF TCs run from CI/CD (#189) Signed-off-by: Andriy Kokhan --- .github/workflows/sc-standalone-deb10.yml | 3 ++ .github/workflows/sc-standalone-deb11.yml | 3 ++ usecases/sai-ptf/README.md | 20 +++---------- .../sai-ptf/{ptf-conftest.py => conftest.py} | 30 +++++++++++++++++-- 4 files changed, 37 insertions(+), 19 deletions(-) rename usecases/sai-ptf/{ptf-conftest.py => conftest.py} (55%) diff --git a/.github/workflows/sc-standalone-deb10.yml b/.github/workflows/sc-standalone-deb10.yml index e2b8774b..0e533969 100644 --- a/.github/workflows/sc-standalone-deb10.yml +++ b/.github/workflows/sc-standalone-deb10.yml @@ -102,3 +102,6 @@ jobs: run: ./exec.sh --no-tty -s thrift pytest --testbed=saivs_thrift_standalone -v -k "test_sairec" - name: Run thrift API tests run: ./exec.sh --no-tty -s thrift pytest --testbed=saivs_thrift_standalone -v -k "api/test" + + - name: Run PTF tests + run: ./exec.sh --no-tty -s thrift pytest --testbed=saivs_thrift_standalone -v ../usecases/sai-ptf/SAI/ptf/saifdb.py -k FdbAttributeTest diff --git a/.github/workflows/sc-standalone-deb11.yml b/.github/workflows/sc-standalone-deb11.yml index 5111de48..a8c1945d 100644 --- a/.github/workflows/sc-standalone-deb11.yml +++ b/.github/workflows/sc-standalone-deb11.yml @@ -102,3 +102,6 @@ jobs: run: ./exec.sh --no-tty -s thrift pytest --testbed=saivs_thrift_standalone -v -k "test_sairec" - name: Run thrift API tests run: ./exec.sh --no-tty -s thrift pytest --testbed=saivs_thrift_standalone -v -k "api/test" + + - name: Run PTF tests + run: ./exec.sh --no-tty -s thrift pytest --testbed=saivs_thrift_standalone -v ../usecases/sai-ptf/SAI/ptf/saifdb.py -k FdbAttributeTest diff --git a/usecases/sai-ptf/README.md b/usecases/sai-ptf/README.md index eebab1f9..a3693395 100644 --- a/usecases/sai-ptf/README.md +++ b/usecases/sai-ptf/README.md @@ -6,29 +6,22 @@ SAI Challenger has capability to run these tests by setting up proper test envir # Steps to run tests -0. Setup the environment +1. Setup the environment ``` git submodule update --init -cp usecases/sai-ptf/ptf-conftest.py usecases/sai-ptf/SAI/ptf/conftest.py -cp usecases/sai-ptf/patches/0001-sai-base-test.patch usecases/sai-ptf/SAI/ -cd usecases/sai-ptf/SAI/ && patch -p1 < 0001-sai-base-test.patch && cd - ``` -1. Build a Docker image with required test env +2. Build a Docker image with a required test environment. + This step is optional. The image can be implicitly pulled from DockerHub by `run.sh`. ``` ./build.sh -s thrift ``` -2. Start a container based on newly built image +3. Start a Docker container ``` ./run.sh -s thrift ``` -3. Login into the container -``` -docker exec -ti sc-thrift-trident2-saivs-run bash -``` - 4. Run a test @@ -39,11 +32,6 @@ To run PTF test case: pytest --testbed=saivs_thrift_standalone ../usecases/sai-ptf/SAI/ptf/saifdb.py -k FdbAttributeTest -v ``` -To clean-up `saiserver` for `saivs` target after test case execution: -``` -supervisorctl restart saiserver -``` - To run SAI Challenger test case using Thrift RPC: ``` pytest --testbed=saivs_thrift_standalone -k "access_to_access" -v diff --git a/usecases/sai-ptf/ptf-conftest.py b/usecases/sai-ptf/conftest.py similarity index 55% rename from usecases/sai-ptf/ptf-conftest.py rename to usecases/sai-ptf/conftest.py index 0af18404..be11ebec 100644 --- a/usecases/sai-ptf/ptf-conftest.py +++ b/usecases/sai-ptf/conftest.py @@ -1,18 +1,42 @@ import sys import pytest +import subprocess from saichallenger.common.sai_testbed import SaiTestbedMeta +sys.path.insert(0, '/sai-challenger/ptf/src') -def import_base_modules(): - sys.path.insert(0, '/sai-challenger/ptf/src') -import_base_modules() +@pytest.hookimpl(tryfirst=True) +def pytest_sessionstart(session): + patch_file = "/sai-challenger/usecases/sai-ptf/patches/0001-sai-base-test.patch" + target_directory = "/sai-challenger/usecases/sai-ptf/SAI/" + + try: + command = ["patch", "--dry-run", "--silent", "-N", "-p1", "-i", patch_file, "-d", target_directory] + result = subprocess.run(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True) + if result.returncode == 0: + subprocess.run(["patch", "-p1", "-i", patch_file, "-d", target_directory], check=True) + elif result.returncode == 1: + # The patch is already applied + return + else: + raise RuntimeError(f"Failed to check whether the patch is already applied: {result}") + except Exception as e: + raise RuntimeError(f"Failed to apply the patch: {e}") @pytest.fixture(scope="session", autouse=True) def set_ptf_params(request): if request.config.option.testbed: tb_params = SaiTestbedMeta("/sai-challenger", request.config.option.testbed) + if tb_params.config['npu'][0]['target'] == 'saivs' and \ + tb_params.config['npu'][0]['client']['config']['ip'] in ['localhost', '127.0.0.1']: + try: + # Clean-up saiserver after previous test session + subprocess.run(["supervisorctl", "restart", "saiserver"], check=True) + except Exception as e: + raise RuntimeError(f"Failed to apply the patch: {e}") + tb_params.generate_sai_ptf_config_files() ports = to_ptf_int_list(tb_params.config['dataplane'][0]['port_groups']) else: From 3ed52e8dcc81b56aba2d6187b4790bd61cea2043 Mon Sep 17 00:00:00 2001 From: Yurii Lisovskyi Date: Wed, 13 Sep 2023 17:37:38 +0300 Subject: [PATCH 04/19] Fix oper status check (#192) Signed-off-by: Yurii Lisovskyi --- common/sai_npu.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/common/sai_npu.py b/common/sai_npu.py index 5a380aeb..edb69c46 100644 --- a/common/sai_npu.py +++ b/common/sai_npu.py @@ -276,8 +276,7 @@ def set_sku_mode(self, sku): def assert_port_oper_up(self, port_oid, tout=15): for i in range(tout): - status, data = self.get(port_oid, ["SAI_PORT_ATTR_OPER_STATUS"]) - assert status == "SAI_STATUS_SUCCESS" + data = self.get(port_oid, ["SAI_PORT_ATTR_OPER_STATUS"]) if data.value() == "SAI_PORT_OPER_STATUS_UP": return if i + 1 < tout: From 479e5b310bc29ca09b2db13f2169be5b9a2bf53f Mon Sep 17 00:00:00 2001 From: vikumarks <119973184+vikumarks@users.noreply.github.com> Date: Thu, 14 Sep 2023 23:24:53 -0700 Subject: [PATCH 05/19] Added support for string SAI_NULL_OBJECT_ID for Thrift RPC (#194) Signed-off-by: Vinod Kumar --- common/sai_client/sai_thrift_client/sai_thrift_utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/common/sai_client/sai_thrift_client/sai_thrift_utils.py b/common/sai_client/sai_thrift_client/sai_thrift_utils.py index e7152fa2..6f2b1093 100644 --- a/common/sai_client/sai_thrift_client/sai_thrift_utils.py +++ b/common/sai_client/sai_thrift_client/sai_thrift_utils.py @@ -362,7 +362,7 @@ def object_id(oid): "16" => 16 "oid:0x10" => 16 """ - if oid == None or oid == 'null': + if oid == None or oid == 'null' or oid == 'SAI_NULL_OBJECT_ID': return 0 if isinstance(oid, str) and oid.startswith('oid:0x'): return int(oid[4:], 16) From 89eb3401b12ad33d0499706df8f3868ffd47c07b Mon Sep 17 00:00:00 2001 From: Yurii Lisovskyi Date: Fri, 15 Sep 2023 16:54:22 +0300 Subject: [PATCH 06/19] Remove unnecessary adding BPs to default VLAN on init (#193) Signed-off-by: Yurii Lisovskyi --- common/sai_npu.py | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/common/sai_npu.py b/common/sai_npu.py index edb69c46..6f51bf16 100644 --- a/common/sai_npu.py +++ b/common/sai_npu.py @@ -207,11 +207,14 @@ def set_sku_mode(self, sku): # Remove existing ports num_ports = len(self.dot1q_bp_oids) for idx in range(num_ports): - self.remove_vlan_member(self.default_vlan_oid, self.dot1q_bp_oids[idx]) - self.remove(self.dot1q_bp_oids[idx]) - oid = self.get(self.port_oids[idx], ["SAI_PORT_ATTR_PORT_SERDES_ID"]).oid() - if oid != "oid:0x0": + oid = self.get_vlan_member(self.default_vlan_oid, self.dot1q_bp_oids[idx]) + if oid: self.remove(oid) + self.remove(self.dot1q_bp_oids[idx]) + status, data = self.get(self.port_oids[idx], ["SAI_PORT_ATTR_PORT_SERDES_ID"], do_assert=False) + serdes_oid = data.oid() + if status == "SAI_STATUS_SUCCESS" and serdes_oid != "oid:0x0": + self.remove(serdes_oid) self.remove(self.port_oids[idx]) self.port_oids.clear() self.dot1q_bp_oids.clear() From 12b9df10c7718b91597a3d9647a1ae73204ec6b2 Mon Sep 17 00:00:00 2001 From: vikumarks <119973184+vikumarks@users.noreply.github.com> Date: Sat, 16 Sep 2023 00:40:29 -0700 Subject: [PATCH 07/19] Adding new DASH TCs for create, set and remove API (#195) Signed-off-by: Vinod Kumar --- tests/api/test_dash_acl_group.py | 56 +++ tests/api/test_direction_lookup_entry.py | 54 +++ tests/api/test_eni.py | 489 ++++++++++++++++++++++ tests/api/test_inbound_routing_entry.py | 97 +++++ tests/api/test_outbound_ca_to_pa_entry.py | 124 ++++++ tests/api/test_pa_validation_entry.py | 79 ++++ tests/api/test_vip_entry.py | 55 +++ tests/api/test_vnet.py | 54 +++ 8 files changed, 1008 insertions(+) create mode 100644 tests/api/test_dash_acl_group.py create mode 100644 tests/api/test_direction_lookup_entry.py create mode 100644 tests/api/test_eni.py create mode 100644 tests/api/test_inbound_routing_entry.py create mode 100644 tests/api/test_outbound_ca_to_pa_entry.py create mode 100644 tests/api/test_pa_validation_entry.py create mode 100644 tests/api/test_vip_entry.py create mode 100644 tests/api/test_vnet.py diff --git a/tests/api/test_dash_acl_group.py b/tests/api/test_dash_acl_group.py new file mode 100644 index 00000000..3551a051 --- /dev/null +++ b/tests/api/test_dash_acl_group.py @@ -0,0 +1,56 @@ + +from pprint import pprint + +import pytest + +@pytest.fixture(scope="module", autouse=True) +def skip_all(testbed_instance): + testbed = testbed_instance + if testbed is not None and len(testbed.dpu) != 1: + pytest.skip("invalid for \"{}\" testbed".format(testbed.name)) + +@pytest.mark.dpu +class TestSaiDashAclGroup: + # object with no attributes + + def test_dash_acl_group_create(self, dpu): + #Attribs are not marked mandatory but if we dont gives it throws an error + commands = [ + { + 'name': 'dash_acl_group_1', + 'op': 'create', + 'type': 'SAI_OBJECT_TYPE_DASH_ACL_GROUP', + 'attributes': ["SAI_DASH_ACL_GROUP_ATTR_IP_ADDR_FAMILY","SAI_IP_ADDR_FAMILY_IPV4"] + } + ] + + results = [*dpu.process_commands(commands)] + print('======= SAI commands RETURN values create =======') + pprint(results) + + + + @pytest.mark.dependency(name="test_sai_dash_acl_group_attr_ip_addr_family_set") + def test_sai_dash_acl_group_attr_ip_addr_family_set(self, dpu): + + commands = [ + { + "name": "dash_acl_group_1", + "op": "set", + "attributes": ["SAI_DASH_ACL_GROUP_ATTR_IP_ADDR_FAMILY", 'SAI_IP_ADDR_FAMILY_IPV4'] + } + ] + results = [*dpu.process_commands(commands)] + print("======= SAI commands RETURN values get =======") + pprint(results) + + + + def test_dash_acl_group_remove(self, dpu): + + commands = [{'name': 'dash_acl_group_1', 'op': 'remove'}] + + results = [*dpu.process_commands(commands)] + print('======= SAI commands RETURN values remove =======') + pprint(results) + diff --git a/tests/api/test_direction_lookup_entry.py b/tests/api/test_direction_lookup_entry.py new file mode 100644 index 00000000..c94ace8d --- /dev/null +++ b/tests/api/test_direction_lookup_entry.py @@ -0,0 +1,54 @@ + +from pprint import pprint + +import pytest + +@pytest.fixture(scope="module", autouse=True) +def skip_all(testbed_instance): + testbed = testbed_instance + if testbed is not None and len(testbed.dpu) != 1: + pytest.skip("invalid for \"{}\" testbed".format(testbed.name)) + +@pytest.mark.dpu +class TestSaiDirectionLookupEntry: + # object with no attributes + + def test_direction_lookup_entry_create(self, dpu): + commands = [ + { + 'name': 'direction_lookup_entry_1', + 'op': 'create', + 'type': 'SAI_OBJECT_TYPE_DIRECTION_LOOKUP_ENTRY', + 'attributes': [], + 'key': {'switch_id': '$SWITCH_ID', 'vni': "2000"} + } + ] + results = [*dpu.process_commands(commands)] + print('======= SAI commands RETURN values create =======') + pprint(results) + + + + @pytest.mark.dependency(name="test_sai_direction_lookup_entry_attr_action_set") + def test_sai_direction_lookup_entry_attr_action_set(self, dpu): + + commands = [ + { + "name": "direction_lookup_entry_1", + "op": "set", + "attributes": ["SAI_DIRECTION_LOOKUP_ENTRY_ATTR_ACTION", 'SAI_DIRECTION_LOOKUP_ENTRY_ACTION_SET_OUTBOUND_DIRECTION'] + } + ] + results = [*dpu.process_commands(commands)] + print("======= SAI commands RETURN values get =======") + pprint(results) + + + def test_direction_lookup_entry_remove(self, dpu): + + commands = [{'name': 'direction_lookup_entry_1', 'key': {'switch_id': '$SWITCH_ID', 'vni': '2000'}, 'op': 'remove'}] + + results = [*dpu.process_commands(commands)] + print('======= SAI commands RETURN values remove =======') + pprint(results) + diff --git a/tests/api/test_eni.py b/tests/api/test_eni.py new file mode 100644 index 00000000..e420e991 --- /dev/null +++ b/tests/api/test_eni.py @@ -0,0 +1,489 @@ +from pprint import pprint + +import pytest + +@pytest.fixture(scope="module", autouse=True) +def skip_all(testbed_instance): + testbed = testbed_instance + if testbed is not None and len(testbed.dpu) != 1: + pytest.skip("invalid for \"{}\" testbed".format(testbed.name)) + +@pytest.mark.dpu +class TestSaiEni: + # object with no attributes + + def test_eni_create(self, dpu): + commands = [ + {"name": "vnet","op": "create","type": "SAI_OBJECT_TYPE_VNET","attributes": ["SAI_VNET_ATTR_VNI","2000"]}, + { + "name": "eni_1", + "op": "create", + "type": "SAI_OBJECT_TYPE_ENI", + "attributes": [ + "SAI_ENI_ATTR_ADMIN_STATE","True", + "SAI_ENI_ATTR_VM_UNDERLAY_DIP","10.10.1.10", + "SAI_ENI_ATTR_VM_VNI","2000", + "SAI_ENI_ATTR_VNET_ID","$vnet", + ] + } + ] + results = [*dpu.process_commands(commands)] + print('======= SAI commands RETURN values create =======') + pprint(results) + + + @pytest.mark.dependency(name='test_sai_eni_attr_cps_set') + def test_sai_eni_attr_cps_set(self, dpu): + commands = [ + {'name': 'eni_1', 'op': 'set', 'attributes': ['SAI_ENI_ATTR_CPS', '0']} + ] + results = [*dpu.process_commands(commands)] + print('======= SAI commands RETURN values get =======') + pprint(results) + + + @pytest.mark.dependency(name='test_sai_eni_attr_pps_set') + def test_sai_eni_attr_pps_set(self, dpu): + commands = [ + {'name': 'eni_1', 'op': 'set', 'attributes': ['SAI_ENI_ATTR_PPS', '0']} + ] + results = [*dpu.process_commands(commands)] + print('======= SAI commands RETURN values get =======') + pprint(results) + + @pytest.mark.dependency(name='test_sai_eni_attr_flows_set') + def test_sai_eni_attr_flows_set(self, dpu): + commands = [ + {'name': 'eni_1', 'op': 'set', 'attributes': ['SAI_ENI_ATTR_FLOWS', '0']} + ] + results = [*dpu.process_commands(commands)] + print('======= SAI commands RETURN values get =======') + pprint(results) + + @pytest.mark.dependency(name='test_sai_eni_attr_admin_state_set') + def test_sai_eni_attr_admin_state_set(self, dpu): + commands = [ + { + 'name': 'eni_1', + 'op': 'set', + 'attributes': ['SAI_ENI_ATTR_ADMIN_STATE', 'false'], + } + ] + results = [*dpu.process_commands(commands)] + print('======= SAI commands RETURN values get =======') + pprint(results) + + @pytest.mark.dependency(name='test_sai_eni_attr_vm_underlay_dip_set') + def test_sai_eni_attr_vm_underlay_dip_set(self, dpu): + commands = [ + { + 'name': 'eni_1', + 'op': 'set', + 'attributes': ['SAI_ENI_ATTR_VM_UNDERLAY_DIP', '0.0.0.0'], + } + ] + results = [*dpu.process_commands(commands)] + print('======= SAI commands RETURN values get =======') + pprint(results) + + @pytest.mark.dependency(name='test_sai_eni_attr_vm_vni_set') + def test_sai_eni_attr_vm_vni_set(self, dpu): + commands = [ + {'name': 'eni_1', 'op': 'set', 'attributes': ['SAI_ENI_ATTR_VM_VNI', '0']} + ] + results = [*dpu.process_commands(commands)] + print('======= SAI commands RETURN values get =======') + pprint(results) + + @pytest.mark.dependency(name='test_sai_eni_attr_vnet_id_set') + def test_sai_eni_attr_vnet_id_set(self, dpu): + commands = [ + { + 'name': 'eni_1', + 'op': 'set', + 'attributes': ['SAI_ENI_ATTR_VNET_ID', 'null'], + } + ] + results = [*dpu.process_commands(commands)] + print('======= SAI commands RETURN values get =======') + pprint(results) + + @pytest.mark.dependency( + name='test_sai_eni_attr_inbound_v4_stage1_dash_acl_group_id_set' + ) + def test_sai_eni_attr_inbound_v4_stage1_dash_acl_group_id_set(self, dpu): + commands = [ + { + 'name': 'eni_1', + 'op': 'set', + 'attributes': [ + 'SAI_ENI_ATTR_INBOUND_V4_STAGE1_DASH_ACL_GROUP_ID', + 'null', + ], + } + ] + results = [*dpu.process_commands(commands)] + print('======= SAI commands RETURN values get =======') + pprint(results) + + @pytest.mark.dependency( + name='test_sai_eni_attr_inbound_v4_stage2_dash_acl_group_id_set' + ) + def test_sai_eni_attr_inbound_v4_stage2_dash_acl_group_id_set(self, dpu): + commands = [ + { + 'name': 'eni_1', + 'op': 'set', + 'attributes': [ + 'SAI_ENI_ATTR_INBOUND_V4_STAGE2_DASH_ACL_GROUP_ID', + 'null', + ], + } + ] + results = [*dpu.process_commands(commands)] + print('======= SAI commands RETURN values get =======') + pprint(results) + + @pytest.mark.dependency( + name='test_sai_eni_attr_inbound_v4_stage3_dash_acl_group_id_set' + ) + def test_sai_eni_attr_inbound_v4_stage3_dash_acl_group_id_set(self, dpu): + commands = [ + { + 'name': 'eni_1', + 'op': 'set', + 'attributes': [ + 'SAI_ENI_ATTR_INBOUND_V4_STAGE3_DASH_ACL_GROUP_ID', + 'null', + ], + } + ] + results = [*dpu.process_commands(commands)] + print('======= SAI commands RETURN values get =======') + pprint(results) + + @pytest.mark.dependency( + name='test_sai_eni_attr_inbound_v4_stage4_dash_acl_group_id_set' + ) + def test_sai_eni_attr_inbound_v4_stage4_dash_acl_group_id_set(self, dpu): + commands = [ + { + 'name': 'eni_1', + 'op': 'set', + 'attributes': [ + 'SAI_ENI_ATTR_INBOUND_V4_STAGE4_DASH_ACL_GROUP_ID', + 'null', + ], + } + ] + results = [*dpu.process_commands(commands)] + print('======= SAI commands RETURN values get =======') + pprint(results) + + @pytest.mark.dependency( + name='test_sai_eni_attr_inbound_v4_stage5_dash_acl_group_id_set' + ) + def test_sai_eni_attr_inbound_v4_stage5_dash_acl_group_id_set(self, dpu): + commands = [ + { + 'name': 'eni_1', + 'op': 'set', + 'attributes': [ + 'SAI_ENI_ATTR_INBOUND_V4_STAGE5_DASH_ACL_GROUP_ID', + 'null', + ], + } + ] + results = [*dpu.process_commands(commands)] + print('======= SAI commands RETURN values get =======') + pprint(results) + + + @pytest.mark.dependency( + name='test_sai_eni_attr_inbound_v6_stage1_dash_acl_group_id_set' + ) + def test_sai_eni_attr_inbound_v6_stage1_dash_acl_group_id_set(self, dpu): + commands = [ + { + 'name': 'eni_1', + 'op': 'set', + 'attributes': [ + 'SAI_ENI_ATTR_INBOUND_V6_STAGE1_DASH_ACL_GROUP_ID', + 'null', + ], + } + ] + results = [*dpu.process_commands(commands)] + print('======= SAI commands RETURN values get =======') + pprint(results) + + @pytest.mark.dependency( + name='test_sai_eni_attr_inbound_v6_stage2_dash_acl_group_id_set' + ) + def test_sai_eni_attr_inbound_v6_stage2_dash_acl_group_id_set(self, dpu): + commands = [ + { + 'name': 'eni_1', + 'op': 'set', + 'attributes': [ + 'SAI_ENI_ATTR_INBOUND_V6_STAGE2_DASH_ACL_GROUP_ID', + 'null', + ], + } + ] + results = [*dpu.process_commands(commands)] + print('======= SAI commands RETURN values get =======') + pprint(results) + + @pytest.mark.dependency( + name='test_sai_eni_attr_inbound_v6_stage3_dash_acl_group_id_set' + ) + def test_sai_eni_attr_inbound_v6_stage3_dash_acl_group_id_set(self, dpu): + commands = [ + { + 'name': 'eni_1', + 'op': 'set', + 'attributes': [ + 'SAI_ENI_ATTR_INBOUND_V6_STAGE3_DASH_ACL_GROUP_ID', + 'null', + ], + } + ] + results = [*dpu.process_commands(commands)] + print('======= SAI commands RETURN values get =======') + pprint(results) + + @pytest.mark.dependency( + name='test_sai_eni_attr_inbound_v6_stage4_dash_acl_group_id_set' + ) + def test_sai_eni_attr_inbound_v6_stage4_dash_acl_group_id_set(self, dpu): + commands = [ + { + 'name': 'eni_1', + 'op': 'set', + 'attributes': [ + 'SAI_ENI_ATTR_INBOUND_V6_STAGE4_DASH_ACL_GROUP_ID', + 'null', + ], + } + ] + results = [*dpu.process_commands(commands)] + print('======= SAI commands RETURN values get =======') + pprint(results) + + + @pytest.mark.dependency( + name='test_sai_eni_attr_inbound_v6_stage5_dash_acl_group_id_set' + ) + def test_sai_eni_attr_inbound_v6_stage5_dash_acl_group_id_set(self, dpu): + commands = [ + { + 'name': 'eni_1', + 'op': 'set', + 'attributes': [ + 'SAI_ENI_ATTR_INBOUND_V6_STAGE5_DASH_ACL_GROUP_ID', + 'null', + ], + } + ] + results = [*dpu.process_commands(commands)] + print('======= SAI commands RETURN values get =======') + pprint(results) + + + @pytest.mark.dependency( + name='test_sai_eni_attr_outbound_v4_stage1_dash_acl_group_id_set' + ) + def test_sai_eni_attr_outbound_v4_stage1_dash_acl_group_id_set(self, dpu): + commands = [ + { + 'name': 'eni_1', + 'op': 'set', + 'attributes': [ + 'SAI_ENI_ATTR_OUTBOUND_V4_STAGE1_DASH_ACL_GROUP_ID', + 'null', + ], + } + ] + results = [*dpu.process_commands(commands)] + print('======= SAI commands RETURN values get =======') + pprint(results) + + @pytest.mark.dependency( + name='test_sai_eni_attr_outbound_v4_stage2_dash_acl_group_id_set' + ) + def test_sai_eni_attr_outbound_v4_stage2_dash_acl_group_id_set(self, dpu): + commands = [ + { + 'name': 'eni_1', + 'op': 'set', + 'attributes': [ + 'SAI_ENI_ATTR_OUTBOUND_V4_STAGE2_DASH_ACL_GROUP_ID', + 'null', + ], + } + ] + results = [*dpu.process_commands(commands)] + print('======= SAI commands RETURN values get =======') + pprint(results) + + + @pytest.mark.dependency( + name='test_sai_eni_attr_outbound_v4_stage3_dash_acl_group_id_set' + ) + def test_sai_eni_attr_outbound_v4_stage3_dash_acl_group_id_set(self, dpu): + commands = [ + { + 'name': 'eni_1', + 'op': 'set', + 'attributes': [ + 'SAI_ENI_ATTR_OUTBOUND_V4_STAGE3_DASH_ACL_GROUP_ID', + 'null', + ], + } + ] + results = [*dpu.process_commands(commands)] + print('======= SAI commands RETURN values get =======') + pprint(results) + + + @pytest.mark.dependency( + name='test_sai_eni_attr_outbound_v4_stage4_dash_acl_group_id_set' + ) + def test_sai_eni_attr_outbound_v4_stage4_dash_acl_group_id_set(self, dpu): + commands = [ + { + 'name': 'eni_1', + 'op': 'set', + 'attributes': [ + 'SAI_ENI_ATTR_OUTBOUND_V4_STAGE4_DASH_ACL_GROUP_ID', + 'null', + ], + } + ] + results = [*dpu.process_commands(commands)] + print('======= SAI commands RETURN values get =======') + pprint(results) + + + @pytest.mark.dependency( + name='test_sai_eni_attr_outbound_v4_stage5_dash_acl_group_id_set' + ) + def test_sai_eni_attr_outbound_v4_stage5_dash_acl_group_id_set(self, dpu): + commands = [ + { + 'name': 'eni_1', + 'op': 'set', + 'attributes': [ + 'SAI_ENI_ATTR_OUTBOUND_V4_STAGE5_DASH_ACL_GROUP_ID', + 'null', + ], + } + ] + results = [*dpu.process_commands(commands)] + print('======= SAI commands RETURN values get =======') + pprint(results) + + @pytest.mark.dependency( + name='test_sai_eni_attr_outbound_v6_stage1_dash_acl_group_id_set' + ) + def test_sai_eni_attr_outbound_v6_stage1_dash_acl_group_id_set(self, dpu): + commands = [ + { + 'name': 'eni_1', + 'op': 'set', + 'attributes': [ + 'SAI_ENI_ATTR_OUTBOUND_V6_STAGE1_DASH_ACL_GROUP_ID', + 'null', + ], + } + ] + results = [*dpu.process_commands(commands)] + print('======= SAI commands RETURN values get =======') + pprint(results) + + @pytest.mark.dependency( + name='test_sai_eni_attr_outbound_v6_stage2_dash_acl_group_id_set' + ) + def test_sai_eni_attr_outbound_v6_stage2_dash_acl_group_id_set(self, dpu): + commands = [ + { + 'name': 'eni_1', + 'op': 'set', + 'attributes': [ + 'SAI_ENI_ATTR_OUTBOUND_V6_STAGE2_DASH_ACL_GROUP_ID', + 'null', + ], + } + ] + results = [*dpu.process_commands(commands)] + print('======= SAI commands RETURN values get =======') + pprint(results) + + + @pytest.mark.dependency( + name='test_sai_eni_attr_outbound_v6_stage3_dash_acl_group_id_set' + ) + def test_sai_eni_attr_outbound_v6_stage3_dash_acl_group_id_set(self, dpu): + commands = [ + { + 'name': 'eni_1', + 'op': 'set', + 'attributes': [ + 'SAI_ENI_ATTR_OUTBOUND_V6_STAGE3_DASH_ACL_GROUP_ID', + 'null', + ], + } + ] + results = [*dpu.process_commands(commands)] + print('======= SAI commands RETURN values get =======') + pprint(results) + + @pytest.mark.dependency( + name='test_sai_eni_attr_outbound_v6_stage4_dash_acl_group_id_set' + ) + def test_sai_eni_attr_outbound_v6_stage4_dash_acl_group_id_set(self, dpu): + commands = [ + { + 'name': 'eni_1', + 'op': 'set', + 'attributes': [ + 'SAI_ENI_ATTR_OUTBOUND_V6_STAGE4_DASH_ACL_GROUP_ID', + 'null', + ], + } + ] + results = [*dpu.process_commands(commands)] + print('======= SAI commands RETURN values get =======') + pprint(results) + + + @pytest.mark.dependency( + name='test_sai_eni_attr_outbound_v6_stage5_dash_acl_group_id_set' + ) + def test_sai_eni_attr_outbound_v6_stage5_dash_acl_group_id_set(self, dpu): + commands = [ + { + 'name': 'eni_1', + 'op': 'set', + 'attributes': [ + 'SAI_ENI_ATTR_OUTBOUND_V6_STAGE5_DASH_ACL_GROUP_ID', + 'null', + ], + } + ] + results = [*dpu.process_commands(commands)] + print('======= SAI commands RETURN values get =======') + pprint(results) + + + def test_eni_remove(self, dpu): + + commands = [ + {'name': 'eni_1', 'op': 'remove'}, + {"name": "vnet","op": "remove"}, + ] + + results = [*dpu.process_commands(commands)] + print('======= SAI commands RETURN values remove =======') + pprint(results) \ No newline at end of file diff --git a/tests/api/test_inbound_routing_entry.py b/tests/api/test_inbound_routing_entry.py new file mode 100644 index 00000000..54b0ea54 --- /dev/null +++ b/tests/api/test_inbound_routing_entry.py @@ -0,0 +1,97 @@ + +from pprint import pprint + +import pytest + +@pytest.fixture(scope="module", autouse=True) +def skip_all(testbed_instance): + testbed = testbed_instance + if testbed is not None and len(testbed.dpu) != 1: + pytest.skip("invalid for \"{}\" testbed".format(testbed.name)) + +@pytest.mark.dpu +class TestSaiInboundRoutingEntry: + # object with no attributes + + def test_inbound_routing_entry_create(self, dpu): + + commands = [ + {"name": "vnet","op": "create","type": "SAI_OBJECT_TYPE_VNET","attributes": ["SAI_VNET_ATTR_VNI","2000"]}, + {"name": "eni_1","op": "create","type": "SAI_OBJECT_TYPE_ENI", + "attributes": [ + "SAI_ENI_ATTR_ADMIN_STATE","True", + "SAI_ENI_ATTR_VM_UNDERLAY_DIP","10.10.1.10", + "SAI_ENI_ATTR_VM_VNI","2000", + "SAI_ENI_ATTR_VNET_ID","$vnet", + ] + }, + {'name': 'inbound_routing_entry_1', 'op': 'create', 'type': 'SAI_OBJECT_TYPE_INBOUND_ROUTING_ENTRY', + 'attributes': [ + "SAI_INBOUND_ROUTING_ENTRY_ATTR_ACTION","SAI_INBOUND_ROUTING_ENTRY_ACTION_VXLAN_DECAP_PA_VALIDATE", + "SAI_INBOUND_ROUTING_ENTRY_ATTR_SRC_VNET_ID","$vnet" + ], + 'key': {'switch_id': '$SWITCH_ID', 'eni_id': "33", 'vni': '2000', 'sip': '1.1.1.1', 'sip_mask': '32', 'priority': '0'} + } + ] + + results = [*dpu.process_commands(commands)] + print('======= SAI commands RETURN values create =======') + pprint(results) + + + + @pytest.mark.dependency(name="test_sai_inbound_routing_entry_attr_action_set") + def test_sai_inbound_routing_entry_attr_action_set(self, dpu): + + commands = [ + { + "name": "inbound_routing_entry_1", + "op": "set", + "attributes": [ + "SAI_INBOUND_ROUTING_ENTRY_ATTR_ACTION", + 'SAI_INBOUND_ROUTING_ENTRY_ACTION_VXLAN_DECAP' + ], + } + ] + results = [*dpu.process_commands(commands)] + print("======= SAI commands RETURN values get =======") + pprint(results) + + @pytest.mark.dependency(name="test_sai_inbound_routing_entry_attr_src_vnet_id_set") + def test_sai_inbound_routing_entry_attr_src_vnet_id_set(self, dpu): + + commands = [ + { + "name": "inbound_routing_entry_1", + "op": "set", + "attributes": ["SAI_INBOUND_ROUTING_ENTRY_ATTR_SRC_VNET_ID", '0'] + } + ] + results = [*dpu.process_commands(commands)] + print("======= SAI commands RETURN values get =======") + pprint(results) + + + def test_inbound_routing_entry_remove(self, dpu): + + commands = [ + { + 'name': 'inbound_routing_entry_1', + 'op': 'remove', + 'key': + { + 'switch_id': '$SWITCH_ID', + 'eni_id': '33', + 'vni': '2000', + 'sip': '1.1.1.1', + 'sip_mask': '32', + 'priority': '0' + }, + }, + {'name': 'eni_1', 'op': 'remove'}, + {"name": "vnet","op": "remove"}, + ] + results = [*dpu.process_commands(commands)] + print('======= SAI commands RETURN values remove =======') + pprint(results) + diff --git a/tests/api/test_outbound_ca_to_pa_entry.py b/tests/api/test_outbound_ca_to_pa_entry.py new file mode 100644 index 00000000..100b036e --- /dev/null +++ b/tests/api/test_outbound_ca_to_pa_entry.py @@ -0,0 +1,124 @@ + +from pprint import pprint + +import pytest + +@pytest.fixture(scope="module", autouse=True) +def skip_all(testbed_instance): + testbed = testbed_instance + if testbed is not None and len(testbed.dpu) != 1: + pytest.skip("invalid for \"{}\" testbed".format(testbed.name)) + +@pytest.mark.dpu +class TestSaiOutboundCaToPaEntry: + # object with no attributes + + def test_outbound_ca_to_pa_entry_create(self, dpu): + + commands = [ + { + "name": "vnet", + "op": "create", + "type": "SAI_OBJECT_TYPE_VNET", + "attributes": ["SAI_VNET_ATTR_VNI","2000"] + }, + { + 'name': 'outbound_ca_to_pa_entry_1', + 'op': 'create', + 'type': 'SAI_OBJECT_TYPE_OUTBOUND_CA_TO_PA_ENTRY', + 'attributes': [ + "SAI_OUTBOUND_CA_TO_PA_ENTRY_ATTR_UNDERLAY_DIP","221.0.2.100", + "SAI_OUTBOUND_CA_TO_PA_ENTRY_ATTR_OVERLAY_DMAC","00:1B:6E:00:00:01", + "SAI_OUTBOUND_CA_TO_PA_ENTRY_ATTR_USE_DST_VNET_VNI","True", + ], + 'key': {'switch_id': '$SWITCH_ID', 'dst_vnet_id': '$vnet', 'dip': '1.128.0.1'} + } + ] + + results = [*dpu.process_commands(commands)] + print('======= SAI commands RETURN values create =======') + pprint(results) + + + + @pytest.mark.dependency(name="test_sai_outbound_ca_to_pa_entry_attr_underlay_dip_set") + def test_sai_outbound_ca_to_pa_entry_attr_underlay_dip_set(self, dpu): + + commands = [ + { + "name": "outbound_ca_to_pa_entry_1", + "op": "set", + "attributes": ["SAI_OUTBOUND_CA_TO_PA_ENTRY_ATTR_UNDERLAY_DIP", '0.0.0.0'] + } + ] + results = [*dpu.process_commands(commands)] + print("======= SAI commands RETURN values get =======") + pprint(results) + + + + @pytest.mark.dependency(name="test_sai_outbound_ca_to_pa_entry_attr_overlay_dmac_set") + def test_sai_outbound_ca_to_pa_entry_attr_overlay_dmac_set(self, dpu): + + commands = [ + { + "name": "outbound_ca_to_pa_entry_1", + "op": "set", + "attributes": ["SAI_OUTBOUND_CA_TO_PA_ENTRY_ATTR_OVERLAY_DMAC", '0:0:0:0:0:0'] + } + ] + results = [*dpu.process_commands(commands)] + print("======= SAI commands RETURN values get =======") + pprint(results) + + + @pytest.mark.dependency(name="test_sai_outbound_ca_to_pa_entry_attr_use_dst_vnet_vni_set") + def test_sai_outbound_ca_to_pa_entry_attr_use_dst_vnet_vni_set(self, dpu): + + commands = [ + { + "name": "outbound_ca_to_pa_entry_1", + "op": "set", + "attributes": ["SAI_OUTBOUND_CA_TO_PA_ENTRY_ATTR_USE_DST_VNET_VNI", 'false'] + } + ] + results = [*dpu.process_commands(commands)] + print("======= SAI commands RETURN values get =======") + pprint(results) + + + @pytest.mark.dependency(name="test_sai_outbound_ca_to_pa_entry_attr_counter_id_set") + def test_sai_outbound_ca_to_pa_entry_attr_counter_id_set(self, dpu): + + commands = [ + { + "name": "outbound_ca_to_pa_entry_1", + "op": "set", + "attributes": ["SAI_OUTBOUND_CA_TO_PA_ENTRY_ATTR_COUNTER_ID", '0'] + } + ] + results = [*dpu.process_commands(commands)] + print("======= SAI commands RETURN values get =======") + pprint(results) + + + def test_outbound_ca_to_pa_entry_remove(self, dpu): + + commands = [ + { + 'name': 'outbound_ca_to_pa_entry_1', + 'op': 'remove', + 'key': + { + 'switch_id': '$SWITCH_ID', + 'dst_vnet_id': '$vnet', + 'dip': '1.128.0.1' + }, + }, + {"name": "vnet","op": "remove"}, + + ] + results = [*dpu.process_commands(commands)] + print('======= SAI commands RETURN values remove =======') + pprint(results) + diff --git a/tests/api/test_pa_validation_entry.py b/tests/api/test_pa_validation_entry.py new file mode 100644 index 00000000..118b8f7e --- /dev/null +++ b/tests/api/test_pa_validation_entry.py @@ -0,0 +1,79 @@ + +from pprint import pprint + +import pytest + +@pytest.fixture(scope="module", autouse=True) +def skip_all(testbed_instance): + testbed = testbed_instance + if testbed is not None and len(testbed.dpu) != 1: + pytest.skip("invalid for \"{}\" testbed".format(testbed.name)) + +@pytest.mark.dpu +class TestSaiPaValidationEntry: + # object with no attributes + + def test_pa_validation_entry_create(self, dpu): + + commands = [ + { + "name": "vnet", + "op": "create", + "type": "SAI_OBJECT_TYPE_VNET", + "attributes": ["SAI_VNET_ATTR_VNI","7000"] + }, + { + 'name': 'pa_validation_entry_1', + 'op': 'create', + 'type': 'SAI_OBJECT_TYPE_PA_VALIDATION_ENTRY', + 'attributes': [], + 'key': + { + 'switch_id': '$SWITCH_ID', + 'vnet_id': '$vnet', + 'sip': '1.1.1.1' + } + } + ] + results = [*dpu.process_commands(commands)] + print('======= SAI commands RETURN values create =======') + pprint(results) + + + + @pytest.mark.dependency(name="test_sai_pa_validation_entry_attr_action_set") + def test_sai_pa_validation_entry_attr_action_set(self, dpu): + + commands = [ + { + "name": "pa_validation_entry_1", + "op": "set", + "attributes": [ + "SAI_PA_VALIDATION_ENTRY_ATTR_ACTION", + 'SAI_PA_VALIDATION_ENTRY_ACTION_PERMIT' + ], + } + ] + results = [*dpu.process_commands(commands)] + print("======= SAI commands RETURN values get =======") + pprint(results) + + def test_pa_validation_entry_remove(self, dpu): + + commands = [ + { + 'name': 'pa_validation_entry_1', + 'op': 'remove', + 'key': + { + 'switch_id': '$SWITCH_ID', + 'vnet_id': '$vnet', + 'sip': '1.1.1.1' + }, + } + ] + + results = [*dpu.process_commands(commands)] + print('======= SAI commands RETURN values remove =======') + pprint(results) + diff --git a/tests/api/test_vip_entry.py b/tests/api/test_vip_entry.py new file mode 100644 index 00000000..d59e619f --- /dev/null +++ b/tests/api/test_vip_entry.py @@ -0,0 +1,55 @@ + +from pprint import pprint + +import pytest + +@pytest.fixture(scope="module", autouse=True) +def skip_all(testbed_instance): + testbed = testbed_instance + if testbed is not None and len(testbed.dpu) != 1: + pytest.skip("invalid for \"{}\" testbed".format(testbed.name)) + +@pytest.mark.dpu +class TestSaiVipEntry: + # object with no attributes + + def test_vip_entry_create(self, dpu): + + commands = [ + { + 'name': 'vip_entry_1', + 'op': 'create', + 'type': 'SAI_OBJECT_TYPE_VIP_ENTRY', + 'attributes': [], + 'key': {'switch_id': '$SWITCH_ID', 'vip': '1.2.1.1'} + } + ] + results = [*dpu.process_commands(commands)] + print('======= SAI commands RETURN values create =======') + pprint(results) + + + + @pytest.mark.dependency(name="test_sai_vip_entry_attr_action_set") + def test_sai_vip_entry_attr_action_set(self, dpu): + + commands = [ + { + "name": "vip_entry_1", + "op": "set", + "attributes": ["SAI_VIP_ENTRY_ATTR_ACTION", 'SAI_VIP_ENTRY_ACTION_ACCEPT'] + } + ] + results = [*dpu.process_commands(commands)] + print("======= SAI commands RETURN values get =======") + pprint(results) + + + def test_vip_entry_remove(self, dpu): + + commands = [{'name': 'vip_entry_1', 'key': {'switch_id': '$SWITCH_ID', 'vip': '1.2.1.1'}, 'op': 'remove'}] + + results = [*dpu.process_commands(commands)] + print('======= SAI commands RETURN values remove =======') + pprint(results) + diff --git a/tests/api/test_vnet.py b/tests/api/test_vnet.py new file mode 100644 index 00000000..db35a3cf --- /dev/null +++ b/tests/api/test_vnet.py @@ -0,0 +1,54 @@ + +from pprint import pprint + +import pytest + +@pytest.fixture(scope="module", autouse=True) +def skip_all(testbed_instance): + testbed = testbed_instance + if testbed is not None and len(testbed.dpu) != 1: + pytest.skip("invalid for \"{}\" testbed".format(testbed.name)) + +@pytest.mark.dpu +class TestSaiVnet: + # object with no attributes + + def test_vnet_create(self, dpu): + + commands = [ + { + 'name': 'vnet_1', + 'op': 'create', + 'type': 'SAI_OBJECT_TYPE_VNET', + 'attributes': ["SAI_VNET_ATTR_VNI", '2001'] + } + ] + + results = [*dpu.process_commands(commands)] + print('======= SAI commands RETURN values create =======') + pprint(results) + + + @pytest.mark.dependency(name="test_sai_vnet_attr_vni_set") + def test_sai_vnet_attr_vni_set(self, dpu): + + commands = [ + { + "name": "vnet_1", + "op": "set", + "attributes": ["SAI_VNET_ATTR_VNI", '2001'] + } + ] + results = [*dpu.process_commands(commands)] + print("======= SAI commands RETURN values get =======") + pprint(results) + + + def test_vnet_remove(self, dpu): + + commands = [{'name': 'vnet_1', 'op': 'remove'}] + + results = [*dpu.process_commands(commands)] + print('======= SAI commands RETURN values remove =======') + pprint(results) + From 37cacece8bc3b55d2b8e13311a7136ecc391e21e Mon Sep 17 00:00:00 2001 From: vikumarks <119973184+vikumarks@users.noreply.github.com> Date: Sat, 16 Sep 2023 00:44:38 -0700 Subject: [PATCH 08/19] Adding new DASH TCs for create, set and remove API 2nd set (#197) Signed-off-by: Vinod Kumar --- tests/api/test_dash_acl_rule.py | 68 +++++++++ tests/api/test_eni_ether_address_map_entry.py | 71 ++++++++++ tests/api/test_outbound_routing_entry.py | 132 ++++++++++++++++++ 3 files changed, 271 insertions(+) create mode 100644 tests/api/test_dash_acl_rule.py create mode 100644 tests/api/test_eni_ether_address_map_entry.py create mode 100644 tests/api/test_outbound_routing_entry.py diff --git a/tests/api/test_dash_acl_rule.py b/tests/api/test_dash_acl_rule.py new file mode 100644 index 00000000..2cfd3986 --- /dev/null +++ b/tests/api/test_dash_acl_rule.py @@ -0,0 +1,68 @@ +from pprint import pprint + +import pytest + +@pytest.fixture(scope="module", autouse=True) +def skip_all(testbed_instance): + testbed = testbed_instance + if testbed is not None and len(testbed.dpu) != 1: + pytest.skip("invalid for \"{}\" testbed".format(testbed.name)) + +@pytest.mark.dpu +class TestSaiDashAclRule: + # object with parent SAI_OBJECT_TYPE_DASH_ACL_GROUP + + def test_dash_acl_rule_create(self, dpu): + commands = [ + { + 'name': 'dash_acl_group_1', + 'op': 'create', + 'type': 'SAI_OBJECT_TYPE_DASH_ACL_GROUP', + 'attributes': ['SAI_DASH_ACL_GROUP_ATTR_IP_ADDR_FAMILY', 'SAI_IP_ADDR_FAMILY_IPV4',], + }, + { + 'name': 'dash_acl_rule_1', + 'op': 'create', + 'type': 'SAI_OBJECT_TYPE_DASH_ACL_RULE', + 'attributes': [ + 'SAI_DASH_ACL_RULE_ATTR_DASH_ACL_GROUP_ID','$dash_acl_group_1', + 'SAI_DASH_ACL_RULE_ATTR_DIP','1.1.1.1', + 'SAI_DASH_ACL_RULE_ATTR_SIP','2.2.2.2', + 'SAI_DASH_ACL_RULE_ATTR_PROTOCOL','17', + 'SAI_DASH_ACL_RULE_ATTR_SRC_PORT','5678', + 'SAI_DASH_ACL_RULE_ATTR_DST_PORT','8765', + 'SAI_DASH_ACL_RULE_ATTR_PRIORITY','10', + ], + }, + ] + + results = [*dpu.process_commands(commands)] + print('======= SAI commands RETURN values create =======') + pprint(results) + + @pytest.mark.dependency(name='test_sai_dash_acl_rule_attr_action_set') + def test_sai_dash_acl_rule_attr_action_set(self, dpu): + commands = [ + { + 'name': 'dash_acl_rule_1', + 'op': 'set', + 'attributes': [ + 'SAI_DASH_ACL_RULE_ATTR_ACTION', + 'SAI_DASH_ACL_RULE_ACTION_PERMIT', + ], + } + ] + results = [*dpu.process_commands(commands)] + print('======= SAI commands RETURN values set =======') + pprint(results) + + + def test_dash_acl_rule_remove(self, dpu): + commands = [ + {'name': 'dash_acl_rule_1', 'op': 'remove'}, + {'name': 'dash_acl_group_1', 'op': 'remove'}, + ] + + results = [*dpu.process_commands(commands)] + print('======= SAI commands RETURN values remove =======') + pprint(results) diff --git a/tests/api/test_eni_ether_address_map_entry.py b/tests/api/test_eni_ether_address_map_entry.py new file mode 100644 index 00000000..359c7788 --- /dev/null +++ b/tests/api/test_eni_ether_address_map_entry.py @@ -0,0 +1,71 @@ +from pprint import pprint + +import pytest + +@pytest.fixture(scope="module", autouse=True) +def skip_all(testbed_instance): + testbed = testbed_instance + if testbed is not None and len(testbed.dpu) != 1: + pytest.skip("invalid for \"{}\" testbed".format(testbed.name)) + +@pytest.mark.dpu +class TestSaiEniEtherAddressMapEntry: + # object with no attributes + + def test_eni_ether_address_map_entry_create(self, dpu): + commands = [ + {"name": "vnet","op": "create","type": "SAI_OBJECT_TYPE_VNET","attributes": ["SAI_VNET_ATTR_VNI","2000"]}, + { + "name": "eni_1", + "op": "create", + "type": "SAI_OBJECT_TYPE_ENI", + "attributes": [ + "SAI_ENI_ATTR_ADMIN_STATE","True", + "SAI_ENI_ATTR_VM_UNDERLAY_DIP","10.10.1.10", + "SAI_ENI_ATTR_VM_VNI","2000", + "SAI_ENI_ATTR_VNET_ID","$vnet", + ] + }, + { + 'name': 'eni_ether_address_map_entry_1', + 'op': 'create', + 'type': 'SAI_OBJECT_TYPE_ENI_ETHER_ADDRESS_MAP_ENTRY', + 'attributes': ["SAI_ENI_ETHER_ADDRESS_MAP_ENTRY_ATTR_ENI_ID","$eni_1"], + 'key': {'switch_id': '$SWITCH_ID', 'address': '00:AA:AA:AA:AB:00'}, + } + ] + + results = [*dpu.process_commands(commands)] + print('\n======= SAI commands RETURN values create =======\n') + pprint(results) + + @pytest.mark.dependency(name='test_sai_eni_ether_address_map_entry_attr_eni_id_set') + def test_sai_eni_ether_address_map_entry_attr_eni_id_set(self, dpu): + commands = [ + { + 'name': 'eni_ether_address_map_entry_1', + 'op': 'set', + 'attributes': ['SAI_ENI_ETHER_ADDRESS_MAP_ENTRY_ATTR_ENI_ID', 'null'], + } + ] + results = [*dpu.process_commands(commands)] + print('\n======= SAI commands RETURN values set =======\n') + pprint(results) + + + def test_eni_ether_address_map_entry_remove(self, dpu): + commands = [ + + { + 'name': 'eni_ether_address_map_entry_1', + 'key': {'switch_id': '$SWITCH_ID', 'address': '00:AA:AA:AA:AB:00'}, + 'op': 'remove', + }, + {'name': 'eni_1', 'op': 'remove'}, + {"name": "vnet","op": "remove"}, + ] + + results = [*dpu.process_commands(commands)] + print('\n======= SAI commands RETURN values remove =======\n') + pprint(results) + diff --git a/tests/api/test_outbound_routing_entry.py b/tests/api/test_outbound_routing_entry.py new file mode 100644 index 00000000..be7b5af0 --- /dev/null +++ b/tests/api/test_outbound_routing_entry.py @@ -0,0 +1,132 @@ +from pprint import pprint + +import pytest + +@pytest.fixture(scope="module", autouse=True) +def skip_all(testbed_instance): + testbed = testbed_instance + if testbed is not None and len(testbed.dpu) != 1: + pytest.skip("invalid for \"{}\" testbed".format(testbed.name)) + + +@pytest.mark.dpu +class TestSaiOutboundRoutingEntry: + # object with no attributes + + def test_outbound_routing_entry_create(self, dpu): + commands = [ + {"name": "vnet","op": "create","type": "SAI_OBJECT_TYPE_VNET","attributes": ["SAI_VNET_ATTR_VNI","2000"]}, + { + "name": "eni_1", + "op": "create", + "type": "SAI_OBJECT_TYPE_ENI", + "attributes": [ + "SAI_ENI_ATTR_ADMIN_STATE","True", + "SAI_ENI_ATTR_VM_UNDERLAY_DIP","10.10.1.10", + "SAI_ENI_ATTR_VM_VNI","2000", + "SAI_ENI_ATTR_VNET_ID","$vnet", + ] + }, + { + 'name': 'outbound_routing_entry_1', + 'op': 'create', + 'type': 'SAI_OBJECT_TYPE_OUTBOUND_ROUTING_ENTRY', + 'attributes': ["SAI_OUTBOUND_ROUTING_ENTRY_ATTR_ACTION", "SAI_OUTBOUND_ROUTING_ENTRY_ACTION_ROUTE_VNET","SAI_OUTBOUND_ROUTING_ENTRY_ATTR_DST_VNET_ID", "$vnet"], + 'key': { + 'switch_id': '$SWITCH_ID', + 'eni_id': '$eni_1', + 'destination': '10.1.0.0/16', + }, + } + ] + + results = [*dpu.process_commands(commands)] + print('\n======= SAI commands RETURN values create =======\n') + pprint(results) + + @pytest.mark.dependency(name='test_sai_outbound_routing_entry_attr_action_set') + def test_sai_outbound_routing_entry_attr_action_set(self, dpu): + commands = [ + { + 'name': 'outbound_routing_entry_1', + 'op': 'set', + 'attributes': [ + 'SAI_OUTBOUND_ROUTING_ENTRY_ATTR_ACTION', + 'SAI_OUTBOUND_ROUTING_ENTRY_ACTION_ROUTE_VNET', + "SAI_OUTBOUND_ROUTING_ENTRY_ATTR_DST_VNET_ID", "$vnet" + ], + } + ] + results = [*dpu.process_commands(commands)] + print('\n======= SAI commands RETURN values set =======\n') + pprint(results) + + + @pytest.mark.dependency(name='test_sai_outbound_routing_entry_attr_dst_vnet_id_set') + def test_sai_outbound_routing_entry_attr_dst_vnet_id_set(self, dpu): + commands = [ + { + 'name': 'outbound_routing_entry_1', + 'op': 'set', + 'attributes': [ + 'SAI_OUTBOUND_ROUTING_ENTRY_ATTR_ACTION', + 'SAI_OUTBOUND_ROUTING_ENTRY_ACTION_ROUTE_VNET', + "SAI_OUTBOUND_ROUTING_ENTRY_ATTR_DST_VNET_ID", "$vnet" + ], + } + ] + results = [*dpu.process_commands(commands)] + print('\n======= SAI commands RETURN values set =======\n') + pprint(results) + + + + + @pytest.mark.dependency(name='test_sai_outbound_routing_entry_attr_overlay_ip_set') + def test_sai_outbound_routing_entry_attr_overlay_ip_set(self, dpu): + commands = [ + { + 'name': 'outbound_routing_entry_1', + 'op': 'set', + 'attributes': ['SAI_OUTBOUND_ROUTING_ENTRY_ATTR_OVERLAY_IP', '0.0.0.0'], + } + ] + results = [*dpu.process_commands(commands)] + print('\n======= SAI commands RETURN values set =======\n') + pprint(results) + + + @pytest.mark.dependency(name='test_sai_outbound_routing_entry_attr_counter_id_set') + def test_sai_outbound_routing_entry_attr_counter_id_set(self, dpu): + commands = [ + { + 'name': 'outbound_routing_entry_1', + 'op': 'set', + 'attributes': ['SAI_OUTBOUND_ROUTING_ENTRY_ATTR_COUNTER_ID', 'null'], + } + ] + results = [*dpu.process_commands(commands)] + print('\n======= SAI commands RETURN values set =======\n') + pprint(results) + + + + def test_outbound_routing_entry_remove(self, dpu): + commands = [ + { + 'name': 'outbound_routing_entry_1', + 'key': { + 'switch_id': '$SWITCH_ID', + 'eni_id': '$eni_1', + 'destination': '10.1.0.0/16', + }, + 'op': 'remove', + }, + {'name': 'eni_1', 'op': 'remove'}, + {"name": "vnet","op": "remove"}, + + ] + + results = [*dpu.process_commands(commands)] + print('======= SAI commands RETURN values remove =======') + pprint(results) From eda4f6425bebac1eaa9e19aace0f88167b73654b Mon Sep 17 00:00:00 2001 From: Andriy Kokhan Date: Sat, 16 Sep 2023 13:05:55 +0300 Subject: [PATCH 09/19] Fixed set_sku_mode() (#198) Signed-off-by: Andriy Kokhan --- common/sai_npu.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/common/sai_npu.py b/common/sai_npu.py index 6f51bf16..cf364876 100644 --- a/common/sai_npu.py +++ b/common/sai_npu.py @@ -212,9 +212,8 @@ def set_sku_mode(self, sku): self.remove(oid) self.remove(self.dot1q_bp_oids[idx]) status, data = self.get(self.port_oids[idx], ["SAI_PORT_ATTR_PORT_SERDES_ID"], do_assert=False) - serdes_oid = data.oid() - if status == "SAI_STATUS_SUCCESS" and serdes_oid != "oid:0x0": - self.remove(serdes_oid) + if status == "SAI_STATUS_SUCCESS" and data.oid() != "oid:0x0": + self.remove(data.oid()) self.remove(self.port_oids[idx]) self.port_oids.clear() self.dot1q_bp_oids.clear() From f66079741e64399101a3560d7687967d07120f97 Mon Sep 17 00:00:00 2001 From: Andriy Kokhan Date: Sun, 17 Sep 2023 16:25:45 +0300 Subject: [PATCH 10/19] Added FDB basic UTs (#199) Signed-off-by: Andriy Kokhan --- .github/workflows/sc-client-server-deb10.yml | 4 +- .github/workflows/sc-client-server-deb11.yml | 4 +- .github/workflows/sc-standalone-deb10.yml | 7 +- .github/workflows/sc-standalone-deb11.yml | 7 +- common/sai_npu.py | 12 +-- tests/ut/test_fdb_ut.py | 91 ++++++++++++++++++++ 6 files changed, 112 insertions(+), 13 deletions(-) create mode 100644 tests/ut/test_fdb_ut.py diff --git a/.github/workflows/sc-client-server-deb10.yml b/.github/workflows/sc-client-server-deb10.yml index 53bfa247..0f04fe09 100644 --- a/.github/workflows/sc-client-server-deb10.yml +++ b/.github/workflows/sc-client-server-deb10.yml @@ -156,8 +156,10 @@ jobs: - name: Run functional test cases run: ./exec.sh --no-tty -i client pytest --testbed=saivs_client_server -v -k "test_l2_basic" + - name: Run unit tests + run: ./exec.sh --no-tty -i client pytest --testbed=saivs_client_server -v ut/test_acl_ut.py ut/test_bridge_ut.py ut/test_vrf_ut.py ut/test_port_ut.py ut/test_fdb_ut.py - name: Run unit tests run: ./exec.sh --no-tty -i client pytest --testbed=saivs_client_server -v -k \ - "test_acl_ut or test_bridge_ut or (test_switch_ut and not sai_map_list_t) or test_vrf_ut or test_port_ut.py" + "test_switch_ut and not sai_map_list_t" - name: Run thift data-driven tests run: ./exec.sh --no-tty -i client pytest --testbed=saivs_client_server -v test_l2_basic_dd.py diff --git a/.github/workflows/sc-client-server-deb11.yml b/.github/workflows/sc-client-server-deb11.yml index 389b3342..5246b189 100644 --- a/.github/workflows/sc-client-server-deb11.yml +++ b/.github/workflows/sc-client-server-deb11.yml @@ -155,8 +155,10 @@ jobs: - name: Run functional test cases run: ./exec.sh --no-tty -i client pytest --testbed=saivs_client_server -v -k "test_l2_basic" + - name: Run unit tests + run: ./exec.sh --no-tty -i client pytest --testbed=saivs_client_server -v ut/test_acl_ut.py ut/test_bridge_ut.py ut/test_vrf_ut.py ut/test_port_ut.py ut/test_fdb_ut.py - name: Run unit tests run: ./exec.sh --no-tty -i client pytest --testbed=saivs_client_server -v -k \ - "test_acl_ut or test_bridge_ut or (test_switch_ut and not sai_map_list_t) or test_vrf_ut or test_port_ut.py" + "test_switch_ut and not sai_map_list_t" - name: Run thift data-driven tests run: ./exec.sh --no-tty -i client pytest --testbed=saivs_client_server -v test_l2_basic_dd.py diff --git a/.github/workflows/sc-standalone-deb10.yml b/.github/workflows/sc-standalone-deb10.yml index 0e533969..93e86819 100644 --- a/.github/workflows/sc-standalone-deb10.yml +++ b/.github/workflows/sc-standalone-deb10.yml @@ -72,8 +72,9 @@ jobs: - name: Run sairedis tests run: ./exec.sh --no-tty pytest --testbed=saivs_standalone -v -k "test_sairec" - name: Run unit tests - run: ./exec.sh --no-tty pytest --testbed=saivs_standalone -v -k \ - "test_acl_ut or test_bridge_ut or (test_switch_ut and not sai_map_list_t) or test_vrf_ut or test_port_ut.py" + run: ./exec.sh --no-tty pytest --testbed=saivs_standalone -v ut/test_acl_ut.py ut/test_bridge_ut.py ut/test_vrf_ut.py ut/test_port_ut.py ut/test_fdb_ut.py + - name: Run unit tests + run: ./exec.sh --no-tty pytest --testbed=saivs_standalone -v -k "test_switch_ut and not sai_map_list_t" - name: Run data-driven tests run: ./exec.sh --no-tty pytest --testbed=saivs_standalone -v test_l2_basic_dd.py - name: Run API tests @@ -94,7 +95,7 @@ jobs: - name: Run thift data-driven tests run: ./exec.sh --no-tty -s thrift pytest --testbed=saivs_thrift_standalone -v test_l2_basic_dd.py - name: Run thrift unit tests - run: ./exec.sh --no-tty -s thrift pytest --testbed=saivs_thrift_standalone -v ut/test_vrf_ut.py ut/test_bridge_ut.py ut/test_acl_ut.py + run: ./exec.sh --no-tty -s thrift pytest --testbed=saivs_thrift_standalone -v ut/test_vrf_ut.py ut/test_bridge_ut.py ut/test_acl_ut.py ut/test_fdb_ut.py - name: Run thrift unit tests run: ./exec.sh --no-tty -s thrift pytest --testbed=saivs_thrift_standalone -v -k \ "(test_switch_ut and not sai_map_list_t and not sai_system_port_config_list_t) or (test_port_ut and not sai_map_list_t)" diff --git a/.github/workflows/sc-standalone-deb11.yml b/.github/workflows/sc-standalone-deb11.yml index a8c1945d..6e0b38fa 100644 --- a/.github/workflows/sc-standalone-deb11.yml +++ b/.github/workflows/sc-standalone-deb11.yml @@ -72,8 +72,9 @@ jobs: - name: Run sairedis tests run: ./exec.sh --no-tty pytest --testbed=saivs_standalone -v -k "test_sairec" - name: Run unit tests - run: ./exec.sh --no-tty pytest --testbed=saivs_standalone -v -k \ - "test_acl_ut or test_bridge_ut or (test_switch_ut and not sai_map_list_t) or test_vrf_ut or test_port_ut.py" + run: ./exec.sh --no-tty pytest --testbed=saivs_standalone -v ut/test_acl_ut.py ut/test_bridge_ut.py ut/test_vrf_ut.py ut/test_port_ut.py ut/test_fdb_ut.py + - name: Run unit tests + run: ./exec.sh --no-tty pytest --testbed=saivs_standalone -v -k "test_switch_ut and not sai_map_list_t" - name: Run data-driven tests run: ./exec.sh --no-tty pytest --testbed=saivs_standalone -v test_l2_basic_dd.py - name: Run API tests @@ -94,7 +95,7 @@ jobs: - name: Run thift data-driven tests run: ./exec.sh --no-tty -s thrift pytest --testbed=saivs_thrift_standalone -v test_l2_basic_dd.py - name: Run thrift unit tests - run: ./exec.sh --no-tty -s thrift pytest --testbed=saivs_thrift_standalone -v ut/test_vrf_ut.py ut/test_bridge_ut.py ut/test_acl_ut.py + run: ./exec.sh --no-tty -s thrift pytest --testbed=saivs_thrift_standalone -v ut/test_vrf_ut.py ut/test_bridge_ut.py ut/test_acl_ut.py ut/test_fdb_ut.py - name: Run thrift unit tests run: ./exec.sh --no-tty -s thrift pytest --testbed=saivs_thrift_standalone -v -k \ "(test_switch_ut and not sai_map_list_t and not sai_system_port_config_list_t) or (test_port_ut and not sai_map_list_t)" diff --git a/common/sai_npu.py b/common/sai_npu.py index cf364876..65805dd8 100644 --- a/common/sai_npu.py +++ b/common/sai_npu.py @@ -100,8 +100,9 @@ def reset(self): attr = [] self.init(attr) - def create_fdb(self, vlan_oid, mac, bp_oid, action="SAI_PACKET_ACTION_FORWARD"): - self.create('SAI_OBJECT_TYPE_FDB_ENTRY:' + json.dumps( + def create_fdb(self, vlan_oid, mac, bp_oid, entry_type="SAI_FDB_ENTRY_TYPE_STATIC", action="SAI_PACKET_ACTION_FORWARD", do_assert=True): + return self.create( + 'SAI_OBJECT_TYPE_FDB_ENTRY:' + json.dumps( { "bvid" : vlan_oid, "mac" : mac, @@ -109,13 +110,14 @@ def create_fdb(self, vlan_oid, mac, bp_oid, action="SAI_PACKET_ACTION_FORWARD"): } ), [ - "SAI_FDB_ENTRY_ATTR_TYPE", "SAI_FDB_ENTRY_TYPE_STATIC", + "SAI_FDB_ENTRY_ATTR_TYPE", entry_type, "SAI_FDB_ENTRY_ATTR_BRIDGE_PORT_ID", bp_oid, "SAI_FDB_ENTRY_ATTR_PACKET_ACTION", action - ]) + ], + do_assert) def remove_fdb(self, vlan_oid, mac, do_assert=True): - self.remove('SAI_OBJECT_TYPE_FDB_ENTRY:' + json.dumps( + return self.remove('SAI_OBJECT_TYPE_FDB_ENTRY:' + json.dumps( { "bvid" : vlan_oid, "mac" : mac, diff --git a/tests/ut/test_fdb_ut.py b/tests/ut/test_fdb_ut.py new file mode 100644 index 00000000..46b4ecd3 --- /dev/null +++ b/tests/ut/test_fdb_ut.py @@ -0,0 +1,91 @@ +import pytest +import json +from saichallenger.common.sai_data import SaiObjType + + +@pytest.fixture(scope="module", autouse=True) +def skip_all(testbed_instance): + testbed = testbed_instance + if testbed is not None and len(testbed.npu) != 1: + pytest.skip("invalid for \"{}\" testbed".format(testbed.name)) + + +class TestFdbEntry: + state = dict() + mac = "00:00:11:22:33:44" + + @classmethod + def key(cls, npu, bvid, mac=None): + key = 'SAI_OBJECT_TYPE_FDB_ENTRY:' + json.dumps( + { + "bvid" : bvid, + "mac" : mac if mac else cls.mac, + "switch_id" : npu.switch_oid + } + ) + return key + + @pytest.mark.dependency() + def test_create_dynamic(self, npu): + npu.create_fdb(npu.default_vlan_oid, TestFdbEntry.mac, npu.dot1q_bp_oids[0], "SAI_FDB_ENTRY_TYPE_DYNAMIC") + + @pytest.mark.dependency(depends=['TestFdbEntry::test_create_dynamic']) + def test_create_duplicated_dynamic(self, npu): + status, _ = npu.create_fdb(npu.default_vlan_oid, TestFdbEntry.mac, npu.dot1q_bp_oids[0], "SAI_FDB_ENTRY_TYPE_DYNAMIC", do_assert=False) + assert status == "SAI_STATUS_ITEM_ALREADY_EXISTS" + + @pytest.mark.dependency(depends=['TestFdbEntry::test_create_dynamic']) + def test_create_duplicated_static(self, npu): + status, _ = npu.create_fdb(npu.default_vlan_oid, TestFdbEntry.mac, npu.dot1q_bp_oids[0], "SAI_FDB_ENTRY_TYPE_STATIC", do_assert=False) + assert status == "SAI_STATUS_ITEM_ALREADY_EXISTS" + + @pytest.mark.dependency(depends=['TestFdbEntry::test_create_dynamic']) + def test_change_to_static(self, npu): + npu.set(TestFdbEntry.key(npu, npu.default_vlan_oid), ["SAI_FDB_ENTRY_ATTR_TYPE", "SAI_FDB_ENTRY_TYPE_STATIC"]) + + @pytest.mark.dependency(depends=['TestFdbEntry::test_change_to_static']) + def test_change_to_dynamic(self, npu): + npu.set(TestFdbEntry.key(npu, npu.default_vlan_oid), ["SAI_FDB_ENTRY_ATTR_TYPE", "SAI_FDB_ENTRY_TYPE_DYNAMIC"]) + + @pytest.mark.dependency(depends=['TestFdbEntry::test_create_dynamic']) + def test_default_action(self, npu): + data = npu.get(TestFdbEntry.key(npu, npu.default_vlan_oid), ["SAI_FDB_ENTRY_ATTR_PACKET_ACTION", ""]) + assert data.value() == "SAI_PACKET_ACTION_FORWARD" + self.state["SAI_FDB_ENTRY_ATTR_PACKET_ACTION"] = data.value() + + @pytest.mark.parametrize( + "action", + [ + ("SAI_PACKET_ACTION_DROP"), + ("SAI_PACKET_ACTION_DONOTDROP"), + ("SAI_PACKET_ACTION_COPY"), + ("SAI_PACKET_ACTION_COPY_CANCEL"), + ("SAI_PACKET_ACTION_TRAP"), + ("SAI_PACKET_ACTION_LOG"), + ("SAI_PACKET_ACTION_DENY"), + ("SAI_PACKET_ACTION_TRANSIT"), + ("SAI_PACKET_ACTION_FORWARD") + ] + ) + @pytest.mark.dependency(depends=['TestFdbEntry::test_create_dynamic']) + def test_set_action(self, npu, action): + attr = "SAI_FDB_ENTRY_ATTR_PACKET_ACTION" + status = npu.set(TestFdbEntry.key(npu, npu.default_vlan_oid), + [attr, action], do_assert=False) + npu.assert_status_success(status) + data = npu.get(TestFdbEntry.key(npu, npu.default_vlan_oid), [attr, ""]) + assert data.value() == action + + @pytest.mark.dependency(depends=['TestFdbEntry::test_create_dynamic']) + def test_no_bridge_port(self, npu): + npu.set(TestFdbEntry.key(npu, npu.default_vlan_oid), ["SAI_FDB_ENTRY_ATTR_BRIDGE_PORT_ID", "oid:0x0"]) + + @pytest.mark.dependency(depends=['TestFdbEntry::test_create_dynamic']) + def test_remove_dynamic(self, npu): + npu.remove_fdb(npu.default_vlan_oid, TestFdbEntry.mac) + + @pytest.mark.dependency(depends=['TestFdbEntry::test_create_dynamic']) + def test_duplicated_remove(self, npu): + status = npu.remove_fdb(npu.default_vlan_oid, TestFdbEntry.mac, do_assert=False) + assert status == "SAI_STATUS_ITEM_NOT_FOUND" + From e6d10a4a78cf044cdf6cd3a6582a339ffdd6fe58 Mon Sep 17 00:00:00 2001 From: Andriy Kokhan Date: Tue, 19 Sep 2023 21:59:29 +0300 Subject: [PATCH 11/19] Added LAG basic UTs (#200) Signed-off-by: Andriy Kokhan --- .github/workflows/sc-client-server-deb10.yml | 2 +- .github/workflows/sc-client-server-deb11.yml | 2 +- .github/workflows/sc-standalone-deb10.yml | 4 +- .github/workflows/sc-standalone-deb11.yml | 4 +- tests/ut/test_lag_ut.py | 116 +++++++++++++++++++ 5 files changed, 122 insertions(+), 6 deletions(-) create mode 100644 tests/ut/test_lag_ut.py diff --git a/.github/workflows/sc-client-server-deb10.yml b/.github/workflows/sc-client-server-deb10.yml index 0f04fe09..53626639 100644 --- a/.github/workflows/sc-client-server-deb10.yml +++ b/.github/workflows/sc-client-server-deb10.yml @@ -157,7 +157,7 @@ jobs: - name: Run functional test cases run: ./exec.sh --no-tty -i client pytest --testbed=saivs_client_server -v -k "test_l2_basic" - name: Run unit tests - run: ./exec.sh --no-tty -i client pytest --testbed=saivs_client_server -v ut/test_acl_ut.py ut/test_bridge_ut.py ut/test_vrf_ut.py ut/test_port_ut.py ut/test_fdb_ut.py + run: ./exec.sh --no-tty -i client pytest --testbed=saivs_client_server -v ut/test_acl_ut.py ut/test_bridge_ut.py ut/test_vrf_ut.py ut/test_port_ut.py ut/test_fdb_ut.py ut/test_lag_ut.py - name: Run unit tests run: ./exec.sh --no-tty -i client pytest --testbed=saivs_client_server -v -k \ "test_switch_ut and not sai_map_list_t" diff --git a/.github/workflows/sc-client-server-deb11.yml b/.github/workflows/sc-client-server-deb11.yml index 5246b189..1a332b11 100644 --- a/.github/workflows/sc-client-server-deb11.yml +++ b/.github/workflows/sc-client-server-deb11.yml @@ -156,7 +156,7 @@ jobs: - name: Run functional test cases run: ./exec.sh --no-tty -i client pytest --testbed=saivs_client_server -v -k "test_l2_basic" - name: Run unit tests - run: ./exec.sh --no-tty -i client pytest --testbed=saivs_client_server -v ut/test_acl_ut.py ut/test_bridge_ut.py ut/test_vrf_ut.py ut/test_port_ut.py ut/test_fdb_ut.py + run: ./exec.sh --no-tty -i client pytest --testbed=saivs_client_server -v ut/test_acl_ut.py ut/test_bridge_ut.py ut/test_vrf_ut.py ut/test_port_ut.py ut/test_fdb_ut.py ut/test_lag_ut.py - name: Run unit tests run: ./exec.sh --no-tty -i client pytest --testbed=saivs_client_server -v -k \ "test_switch_ut and not sai_map_list_t" diff --git a/.github/workflows/sc-standalone-deb10.yml b/.github/workflows/sc-standalone-deb10.yml index 93e86819..57688b62 100644 --- a/.github/workflows/sc-standalone-deb10.yml +++ b/.github/workflows/sc-standalone-deb10.yml @@ -72,7 +72,7 @@ jobs: - name: Run sairedis tests run: ./exec.sh --no-tty pytest --testbed=saivs_standalone -v -k "test_sairec" - name: Run unit tests - run: ./exec.sh --no-tty pytest --testbed=saivs_standalone -v ut/test_acl_ut.py ut/test_bridge_ut.py ut/test_vrf_ut.py ut/test_port_ut.py ut/test_fdb_ut.py + run: ./exec.sh --no-tty pytest --testbed=saivs_standalone -v ut/test_acl_ut.py ut/test_bridge_ut.py ut/test_vrf_ut.py ut/test_port_ut.py ut/test_fdb_ut.py ut/test_lag_ut.py - name: Run unit tests run: ./exec.sh --no-tty pytest --testbed=saivs_standalone -v -k "test_switch_ut and not sai_map_list_t" - name: Run data-driven tests @@ -95,7 +95,7 @@ jobs: - name: Run thift data-driven tests run: ./exec.sh --no-tty -s thrift pytest --testbed=saivs_thrift_standalone -v test_l2_basic_dd.py - name: Run thrift unit tests - run: ./exec.sh --no-tty -s thrift pytest --testbed=saivs_thrift_standalone -v ut/test_vrf_ut.py ut/test_bridge_ut.py ut/test_acl_ut.py ut/test_fdb_ut.py + run: ./exec.sh --no-tty -s thrift pytest --testbed=saivs_thrift_standalone -v ut/test_vrf_ut.py ut/test_bridge_ut.py ut/test_acl_ut.py ut/test_fdb_ut.py ut/test_lag_ut.py - name: Run thrift unit tests run: ./exec.sh --no-tty -s thrift pytest --testbed=saivs_thrift_standalone -v -k \ "(test_switch_ut and not sai_map_list_t and not sai_system_port_config_list_t) or (test_port_ut and not sai_map_list_t)" diff --git a/.github/workflows/sc-standalone-deb11.yml b/.github/workflows/sc-standalone-deb11.yml index 6e0b38fa..f237ae68 100644 --- a/.github/workflows/sc-standalone-deb11.yml +++ b/.github/workflows/sc-standalone-deb11.yml @@ -72,7 +72,7 @@ jobs: - name: Run sairedis tests run: ./exec.sh --no-tty pytest --testbed=saivs_standalone -v -k "test_sairec" - name: Run unit tests - run: ./exec.sh --no-tty pytest --testbed=saivs_standalone -v ut/test_acl_ut.py ut/test_bridge_ut.py ut/test_vrf_ut.py ut/test_port_ut.py ut/test_fdb_ut.py + run: ./exec.sh --no-tty pytest --testbed=saivs_standalone -v ut/test_acl_ut.py ut/test_bridge_ut.py ut/test_vrf_ut.py ut/test_port_ut.py ut/test_fdb_ut.py ut/test_lag_ut.py - name: Run unit tests run: ./exec.sh --no-tty pytest --testbed=saivs_standalone -v -k "test_switch_ut and not sai_map_list_t" - name: Run data-driven tests @@ -95,7 +95,7 @@ jobs: - name: Run thift data-driven tests run: ./exec.sh --no-tty -s thrift pytest --testbed=saivs_thrift_standalone -v test_l2_basic_dd.py - name: Run thrift unit tests - run: ./exec.sh --no-tty -s thrift pytest --testbed=saivs_thrift_standalone -v ut/test_vrf_ut.py ut/test_bridge_ut.py ut/test_acl_ut.py ut/test_fdb_ut.py + run: ./exec.sh --no-tty -s thrift pytest --testbed=saivs_thrift_standalone -v ut/test_vrf_ut.py ut/test_bridge_ut.py ut/test_acl_ut.py ut/test_fdb_ut.py ut/test_lag_ut.py - name: Run thrift unit tests run: ./exec.sh --no-tty -s thrift pytest --testbed=saivs_thrift_standalone -v -k \ "(test_switch_ut and not sai_map_list_t and not sai_system_port_config_list_t) or (test_port_ut and not sai_map_list_t)" diff --git a/tests/ut/test_lag_ut.py b/tests/ut/test_lag_ut.py new file mode 100644 index 00000000..84cb4737 --- /dev/null +++ b/tests/ut/test_lag_ut.py @@ -0,0 +1,116 @@ +import pytest +from saichallenger.common.sai import Sai +from saichallenger.common.sai_data import SaiObjType + +lag_attrs = Sai.get_obj_attrs("SAI_OBJECT_TYPE_LAG") +lag_mbr_attrs = Sai.get_obj_attrs("SAI_OBJECT_TYPE_LAG_MEMBER") + +@pytest.fixture(scope="module", autouse=True) +def skip_all(testbed_instance): + testbed = testbed_instance + if testbed is not None and len(testbed.npu) != 1: + pytest.skip("invalid for \"{}\" testbed".format(testbed.name)) + + +class TestLag: + oid = None + lag_mbr_num = 2 + lag_mbr_oids = [] + + @pytest.mark.dependency() + def test_create(self, npu): + TestLag.oid = npu.create(SaiObjType.LAG) + + @pytest.mark.parametrize( + "attr,attr_type", + lag_attrs + ) + @pytest.mark.dependency(depends=['TestLag::test_create']) + def test_get_attr(self, npu, attr, attr_type): + if attr == "SAI_LAG_ATTR_SYSTEM_PORT_AGGREGATE_ID": + pytest.skip("Valid for SAI_SWITCH_TYPE_VOQ only") + status, data = npu.get_by_type(TestLag.oid, attr, attr_type, False) + npu.assert_status_success(status) + if attr == "SAI_LAG_ATTR_PORT_LIST": + assert len(data.to_list()) == 0 + elif attr == "SAI_LAG_ATTR_PORT_VLAN_ID": + assert data.value() == npu.default_vlan_id + elif attr in ["SAI_LAG_ATTR_DROP_UNTAGGED", "SAI_LAG_ATTR_DROP_TAGGED"]: + assert data.value() == "false" + + @pytest.mark.dependency(depends=['TestLag::test_create']) + def test_create_members(self, npu): + # Remove bridge ports + for idx in range(TestLag.lag_mbr_num): + npu.remove_vlan_member(npu.default_vlan_oid, npu.dot1q_bp_oids[idx]) + npu.remove(npu.dot1q_bp_oids[idx]) + + # Create LAG members + for idx in range(TestLag.lag_mbr_num): + oid = npu.create(SaiObjType.LAG_MEMBER, + [ + "SAI_LAG_MEMBER_ATTR_LAG_ID", TestLag.oid, + "SAI_LAG_MEMBER_ATTR_PORT_ID", npu.port_oids[idx] + ]) + TestLag.lag_mbr_oids.append(oid) + + @pytest.mark.parametrize( + "attr,attr_type", + lag_mbr_attrs + ) + @pytest.mark.dependency(depends=['TestLag::test_create_members']) + def test_get_member_attr(self, npu, attr, attr_type): + status, data = npu.get_by_type(TestLag.lag_mbr_oids[0], attr, attr_type, False) + npu.assert_status_success(status) + if attr == "SAI_LAG_MEMBER_ATTR_LAG_ID": + assert data.value() == TestLag.oid + elif attr == "SAI_LAG_MEMBER_ATTR_PORT_ID": + assert data.value() == npu.port_oids[0] + elif attr in ["SAI_LAG_MEMBER_ATTR_EGRESS_DISABLE", "SAI_LAG_MEMBER_ATTR_INGRESS_DISABLE"]: + assert data.value() == "false" + + + @pytest.mark.dependency(depends=['TestLag::test_create_members']) + def test_check_members(self, npu): + status, data = npu.get(TestLag.oid, ["SAI_LAG_ATTR_PORT_LIST"], False) + npu.assert_status_success(status) + mbr_oids = data.oids() + assert len(mbr_oids) == TestLag.lag_mbr_num + for oid in mbr_oids: + assert oid in TestLag.lag_mbr_oids + + @pytest.mark.dependency(depends=['TestLag::test_create_members']) + def test_remove_members(self, npu): + # Remove LAG members + for oid in TestLag.lag_mbr_oids: + npu.remove(oid) + + # Create bridge port for ports removed from LAG + for idx in range(TestLag.lag_mbr_num): + bp_oid = npu.create(SaiObjType.BRIDGE_PORT, + [ + "SAI_BRIDGE_PORT_ATTR_TYPE", "SAI_BRIDGE_PORT_TYPE_PORT", + "SAI_BRIDGE_PORT_ATTR_PORT_ID", npu.port_oids[idx], + #"SAI_BRIDGE_PORT_ATTR_BRIDGE_ID", npu.dot1q_br_oid, + "SAI_BRIDGE_PORT_ATTR_ADMIN_STATE", "true" + ]) + npu.dot1q_bp_oids[idx] = bp_oid + + # Add ports to default VLAN + for oid in npu.dot1q_bp_oids[0:TestLag.lag_mbr_num]: + npu.create_vlan_member(npu.default_vlan_oid, oid, "SAI_VLAN_TAGGING_MODE_UNTAGGED") + + # Set PVID + for oid in npu.port_oids[0:TestLag.lag_mbr_num]: + npu.set(oid, ["SAI_PORT_ATTR_PORT_VLAN_ID", npu.default_vlan_id]) + + @pytest.mark.dependency(depends=['TestLag::test_remove_members']) + def test_check_no_members(self, npu): + status, data = npu.get(TestLag.oid, ["SAI_LAG_ATTR_PORT_LIST"], False) + npu.assert_status_success(status) + assert len(data.oids()) == 0 + + @pytest.mark.dependency(depends=['TestLag::test_create']) + def test_remove(self, npu): + npu.remove(TestLag.oid) + From e864c3f1cae2f8db96d96d7f82479415301b6e3d Mon Sep 17 00:00:00 2001 From: Andriy Kokhan Date: Wed, 20 Sep 2023 11:35:20 +0300 Subject: [PATCH 12/19] Added FDB flush UTs (#201) Signed-off-by: Andriy Kokhan --- tests/ut/test_fdb_ut.py | 73 +++++++++++++++++++++++++++++++++++++++++ 1 file changed, 73 insertions(+) diff --git a/tests/ut/test_fdb_ut.py b/tests/ut/test_fdb_ut.py index 46b4ecd3..af9a4305 100644 --- a/tests/ut/test_fdb_ut.py +++ b/tests/ut/test_fdb_ut.py @@ -89,3 +89,76 @@ def test_duplicated_remove(self, npu): status = npu.remove_fdb(npu.default_vlan_oid, TestFdbEntry.mac, do_assert=False) assert status == "SAI_STATUS_ITEM_NOT_FOUND" + +class TestFlushFdbEntries: + dynamic_entries = ["00:00:00:00:00:11", "00:00:00:00:00:22"] + static_entries = ["00:00:00:00:00:33", "00:00:00:00:00:44"] + + def flush_fdb_entries(self, npu, entries_type): + for mac in TestFlushFdbEntries.dynamic_entries: + npu.create_fdb(npu.default_vlan_oid, mac, npu.dot1q_bp_oids[0], f"SAI_FDB_ENTRY_TYPE_DYNAMIC") + + for mac in TestFlushFdbEntries.static_entries: + npu.create_fdb(npu.default_vlan_oid, mac, npu.dot1q_bp_oids[0], f"SAI_FDB_ENTRY_TYPE_STATIC") + + npu.flush_fdb_entries(npu.switch_oid, [ + "SAI_FDB_FLUSH_ATTR_BV_ID", npu.default_vlan_oid, + "SAI_FDB_FLUSH_ATTR_ENTRY_TYPE", f"SAI_FDB_FLUSH_ENTRY_TYPE_{entries_type}" + ]) + + def test_flush_dynamic(self, npu): + self.flush_fdb_entries(npu, "DYNAMIC") + + flushed = [] + not_flushed = [] + + for mac in TestFlushFdbEntries.dynamic_entries: + status = npu.remove_fdb(npu.default_vlan_oid, mac, do_assert=False) + if status == "SAI_STATUS_SUCCESS": + not_flushed.append(mac) + for mac in TestFlushFdbEntries.static_entries: + status = npu.remove_fdb(npu.default_vlan_oid, mac, do_assert=False) + if status != "SAI_STATUS_SUCCESS": + flushed.append(mac) + + msg = "" if len(not_flushed) == 0 else f"Dynamic FDB entries {not_flushed} have not been flushed. " + msg += "" if len(flushed) == 0 else f"Static FDB entries {flushed} have been flushed." + assert not msg, msg + + def test_flush_static(self, npu): + self.flush_fdb_entries(npu, "STATIC") + + flushed = [] + not_flushed = [] + + for mac in TestFlushFdbEntries.dynamic_entries: + status = npu.remove_fdb(npu.default_vlan_oid, mac, do_assert=False) + if status != "SAI_STATUS_SUCCESS": + flushed.append(mac) + for mac in TestFlushFdbEntries.static_entries: + status = npu.remove_fdb(npu.default_vlan_oid, mac, do_assert=False) + if status == "SAI_STATUS_SUCCESS": + not_flushed.append(mac) + + msg = "" if len(not_flushed) == 0 else f"Static FDB entries {not_flushed} have not been flushed. " + msg += "" if len(flushed) == 0 else f"Dynamic FDB entries {flushed} have been flushed." + assert not msg, msg + + def test_flush_all(self, npu): + self.flush_fdb_entries(npu, "ALL") + + not_flushed_dynamic = [] + not_flushed_static = [] + + for mac in TestFlushFdbEntries.dynamic_entries: + status = npu.remove_fdb(npu.default_vlan_oid, mac, do_assert=False) + if status == "SAI_STATUS_SUCCESS": + not_flushed_dynamic.append(mac) + for mac in TestFlushFdbEntries.static_entries: + status = npu.remove_fdb(npu.default_vlan_oid, mac, do_assert=False) + if status == "SAI_STATUS_SUCCESS": + not_flushed_static.append(mac) + + msg = "" if len(not_flushed_static) == 0 else f"Static FDB entries {not_flushed_static} have not been flushed. " + msg += "" if len(not_flushed_dynamic) == 0 else f"Dynamic FDB entries {not_flushed_dynamic} have not been flushed." + assert not msg, msg From c2d359d93092b44d967a2440af7ff2ed22f0495f Mon Sep 17 00:00:00 2001 From: Andriy Kokhan Date: Sat, 30 Sep 2023 12:02:23 +0300 Subject: [PATCH 13/19] [CI/CD] Fixed checks what Docker images have to be rebuild (#206) Signed-off-by: Andriy Kokhan --- .github/workflows/sc-client-server-deb10.yml | 43 +++++++++++--------- .github/workflows/sc-client-server-deb11.yml | 39 ++++++++++-------- .github/workflows/sc-standalone-deb10.yml | 4 +- .github/workflows/sc-standalone-deb11.yml | 4 +- 4 files changed, 50 insertions(+), 40 deletions(-) diff --git a/.github/workflows/sc-client-server-deb10.yml b/.github/workflows/sc-client-server-deb10.yml index 53626639..de4ca19f 100644 --- a/.github/workflows/sc-client-server-deb10.yml +++ b/.github/workflows/sc-client-server-deb10.yml @@ -7,7 +7,6 @@ on: paths: - '.github/workflows/sc-client-server-deb10.yml' - 'dockerfiles/buster/Dockerfile.client' - - 'dockerfiles/buster/Dockerfile.saithrift-client' - 'dockerfiles/buster/Dockerfile.server' - 'npu/broadcom/BCM56850/saivs/Dockerfile.server' - 'common/**' @@ -23,11 +22,12 @@ on: - 'sai.env' env: - DOCKER_BASE: 'dockerfiles/buster/Dockerfile' - DOCKER_REDIS: 'npu/broadcom/BCM56850/saivs/Dockerfile' - DOCKER_THRIFT: 'npu/broadcom/BCM56850/saivs/Dockerfile.saithrift' - REDIS_RPC: 0 - THRIFT_RPC: 0 + DOCKER_CLIENT: 'dockerfiles/buster/Dockerfile.client' + DOCKER_SERVER_BASE: 'dockerfiles/buster/Dockerfile.server' + DOCKER_SERVER: 'npu/broadcom/BCM56850/saivs/Dockerfile.server' + REDIS_CLIENT: 0 + REDIS_SERVER: 0 + jobs: build-sc-server: @@ -43,23 +43,25 @@ jobs: - name: Check what files were updated id: check_changes run: | - echo 'changed_files=$(git diff --name-only origin/HEAD | xargs)' >> $GITHUB_OUTPUT + echo 'changed_files<> $GITHUB_OUTPUT + echo "$(git diff --name-only HEAD~1)" >> $GITHUB_OUTPUT + echo 'EOF' >> $GITHUB_OUTPUT - name: Check what Docker images have to be rebuild run: | - for file in "$DOCKER_BASE" "$DOCKER_REDIS" "sai.env"; do + for file in "$DOCKER_SERVER_BASE" "$DOCKER_SERVER" "sai.env"; do if [[ "${{ steps.check_changes.outputs.changed_files }}" == *"$file"* ]]; then - echo "REDIS_RPC=1" >> $GITHUB_ENV + echo "REDIS_SERVER=1" >> $GITHUB_ENV fi done - name: Build server Docker image run: ./build.sh -i server -o deb10 - if: ${{ env.REDIS_RPC == '1' }} + if: ${{ env.REDIS_SERVER == '1' }} - name: Pull SAI-C server run: ./run.sh -i server -o deb10 - if: ${{ env.REDIS_RPC == '0' }} + if: ${{ env.REDIS_SERVER == '0' }} - name: Save server Docker image run: docker save sc-server-trident2-saivs > sc-server.tar @@ -82,24 +84,25 @@ jobs: - name: Check what files were updated id: check_changes run: | - echo 'changed_files=$(git diff --name-only origin/HEAD | xargs)' >> $GITHUB_OUTPUT + echo 'changed_files<> $GITHUB_OUTPUT + echo "$(git diff --name-only HEAD~1)" >> $GITHUB_OUTPUT + echo 'EOF' >> $GITHUB_OUTPUT - name: Check what Docker images have to be rebuild run: | - changed_files=$(git diff --name-only origin/HEAD | xargs) - for file in "$DOCKER_BASE" "$DOCKER_REDIS" "sai.env"; do - if [[ "$changed_files" == *"$file"* ]]; then - echo "REDIS_RPC=1" + for file in "$DOCKER_CLIENT" "sai.env"; do + if [[ "${{ steps.check_changes.outputs.changed_files }}" == *"$file"* ]]; then + echo "REDIS_CLIENT=1" >> $GITHUB_ENV fi done - name: Build client Docker image run: ./build.sh -i client -o deb10 --nosnappi - if: ${{ env.REDIS_RPC == '1' }} + if: ${{ env.REDIS_CLIENT == '1' }} - name: Pull SAI-C client run: ./run.sh -i client -o deb10 - if: ${{ env.REDIS_RPC == '0' }} + if: ${{ env.REDIS_CLIENT == '0' }} - name: Save client Docker image run: docker save sc-client > sc-client.tar @@ -147,10 +150,10 @@ jobs: run: ./run.sh -i server -o deb10 - name: Update SAI-C server package run: ./exec.sh -i server --no-tty pip3 install /sai-challenger/common /sai-challenger - if: ${{ env.REDIS_RPC == '0' }} + if: ${{ env.REDIS_SERVER == '0' }} - name: Update SAI-C client package run: ./exec.sh -i client --no-tty pip3 install /sai-challenger/common /sai-challenger - if: ${{ env.REDIS_RPC == '0' }} + if: ${{ env.REDIS_CLIENT == '0' }} - name: Create veth links between client and server dockers run: sudo ./veth-create-host.sh sc-server-trident2-saivs-run sc-client-run diff --git a/.github/workflows/sc-client-server-deb11.yml b/.github/workflows/sc-client-server-deb11.yml index 1a332b11..b6639c5b 100644 --- a/.github/workflows/sc-client-server-deb11.yml +++ b/.github/workflows/sc-client-server-deb11.yml @@ -7,7 +7,6 @@ on: paths: - '.github/workflows/sc-client-server-deb11.yml' - 'dockerfiles/bullseye/Dockerfile.client' - - 'dockerfiles/bullseye/Dockerfile.saithrift-client' - 'dockerfiles/bullseye/Dockerfile.server' - 'npu/broadcom/BCM56850/saivs/Dockerfile.server' - 'common/**' @@ -23,11 +22,11 @@ on: - 'sai.env' env: - DOCKER_BASE: 'dockerfiles/bullseye/Dockerfile' - DOCKER_REDIS: 'npu/broadcom/BCM56850/saivs/Dockerfile' - DOCKER_THRIFT: 'npu/broadcom/BCM56850/saivs/Dockerfile.saithrift' - REDIS_RPC: 0 - THRIFT_RPC: 0 + DOCKER_CLIENT: 'dockerfiles/bullseye/Dockerfile.client' + DOCKER_SERVER_BASE: 'dockerfiles/bullseye/Dockerfile.server' + DOCKER_SERVER: 'npu/broadcom/BCM56850/saivs/Dockerfile.server' + REDIS_CLIENT: 0 + REDIS_SERVER: 0 jobs: build-sc-server: @@ -43,23 +42,25 @@ jobs: - name: Check what files were updated id: check_changes run: | - echo 'changed_files=$(git diff --name-only origin/HEAD | xargs)' >> $GITHUB_OUTPUT + echo 'changed_files<> $GITHUB_OUTPUT + echo "$(git diff --name-only HEAD~1)" >> $GITHUB_OUTPUT + echo 'EOF' >> $GITHUB_OUTPUT - name: Check what Docker images have to be rebuild run: | - for file in "$DOCKER_BASE" "$DOCKER_REDIS" "sai.env"; do + for file in "$DOCKER_SERVER_BASE" "$DOCKER_SERVER" "sai.env"; do if [[ "${{ steps.check_changes.outputs.changed_files }}" == *"$file"* ]]; then - echo "REDIS_RPC=1" >> $GITHUB_ENV + echo "REDIS_SERVER=1" >> $GITHUB_ENV fi done - name: Build server Docker image run: ./build.sh -i server -o deb11 - if: ${{ env.REDIS_RPC == '1' }} + if: ${{ env.REDIS_SERVER == '1' }} - name: Pull SAI-C server run: ./run.sh -i server -o deb11 - if: ${{ env.REDIS_RPC == '0' }} + if: ${{ env.REDIS_SERVER == '0' }} - name: Save server Docker image run: docker save sc-server-trident2-saivs > sc-server.tar @@ -82,23 +83,25 @@ jobs: - name: Check what files were updated id: check_changes run: | - echo 'changed_files=$(git diff --name-only origin/HEAD | xargs)' >> $GITHUB_OUTPUT + echo 'changed_files<> $GITHUB_OUTPUT + echo "$(git diff --name-only HEAD~1)" >> $GITHUB_OUTPUT + echo 'EOF' >> $GITHUB_OUTPUT - name: Check what Docker images have to be rebuild run: | - for file in "$DOCKER_BASE" "$DOCKER_REDIS" "sai.env"; do + for file in "$DOCKER_CLIENT" "sai.env"; do if [[ "${{ steps.check_changes.outputs.changed_files }}" == *"$file"* ]]; then - echo "REDIS_RPC=1" >> $GITHUB_ENV + echo "REDIS_CLIENT=1" >> $GITHUB_ENV fi done - name: Build client Docker image run: ./build.sh -i client -o deb11 --nosnappi - if: ${{ env.REDIS_RPC == '1' }} + if: ${{ env.REDIS_CLIENT == '1' }} - name: Pull SAI-C client run: ./run.sh -i client -o deb11 - if: ${{ env.REDIS_RPC == '0' }} + if: ${{ env.REDIS_CLIENT == '0' }} - name: Save client Docker image run: docker save sc-client > sc-client.tar @@ -146,10 +149,10 @@ jobs: run: ./run.sh -i server -o deb11 - name: Update SAI-C server package run: ./exec.sh -i server --no-tty pip3 install /sai-challenger/common /sai-challenger - if: ${{ env.REDIS_RPC == '0' }} + if: ${{ env.REDIS_SERVER == '0' }} - name: Update SAI-C client package run: ./exec.sh -i client --no-tty pip3 install /sai-challenger/common /sai-challenger - if: ${{ env.REDIS_RPC == '0' }} + if: ${{ env.REDIS_CLIENT == '0' }} - name: Create veth links between client and server dockers run: sudo ./veth-create-host.sh sc-server-trident2-saivs-run sc-client-run diff --git a/.github/workflows/sc-standalone-deb10.yml b/.github/workflows/sc-standalone-deb10.yml index 57688b62..d88caef9 100644 --- a/.github/workflows/sc-standalone-deb10.yml +++ b/.github/workflows/sc-standalone-deb10.yml @@ -42,7 +42,9 @@ jobs: - name: Check what files were updated id: check_changes run: | - echo 'changed_files=$(git diff --name-only origin/HEAD | xargs)' >> $GITHUB_OUTPUT + echo 'changed_files<> $GITHUB_OUTPUT + echo "$(git diff --name-only HEAD~1)" >> $GITHUB_OUTPUT + echo 'EOF' >> $GITHUB_OUTPUT - name: Check what Docker images have to be rebuild run: | diff --git a/.github/workflows/sc-standalone-deb11.yml b/.github/workflows/sc-standalone-deb11.yml index f237ae68..094e5360 100644 --- a/.github/workflows/sc-standalone-deb11.yml +++ b/.github/workflows/sc-standalone-deb11.yml @@ -42,7 +42,9 @@ jobs: - name: Check what files were updated id: check_changes run: | - echo 'changed_files=$(git diff --name-only origin/HEAD | xargs)' >> $GITHUB_OUTPUT + echo 'changed_files<> $GITHUB_OUTPUT + echo "$(git diff --name-only HEAD~1)" >> $GITHUB_OUTPUT + echo 'EOF' >> $GITHUB_OUTPUT - name: Check what Docker images have to be rebuild run: | From 233a8960f3341b8dd6783b42587f4c972fcd2d11 Mon Sep 17 00:00:00 2001 From: Andriy Kokhan Date: Sat, 30 Sep 2023 13:47:05 +0300 Subject: [PATCH 14/19] Updated to SAI v1.13+ (#205) * Added nlohmann-json3-dev as new sairedis dependency * Extended list of unsupported types in get_by_type() --------- Signed-off-by: Andriy Kokhan --- common/sai.py | 4 +++- dockerfiles/bullseye/Dockerfile.server | 3 ++- dockerfiles/buster/Dockerfile.server | 3 ++- sai.env | 14 +++++++------- 4 files changed, 14 insertions(+), 10 deletions(-) diff --git a/common/sai.py b/common/sai.py index 3b7fd28b..d83eb1a8 100644 --- a/common/sai.py +++ b/common/sai.py @@ -338,7 +338,9 @@ def get_by_type(self, obj, attr, attr_type, do_assert=False): unsupported_types = [ "sai_port_eye_values_list_t", "sai_prbs_rx_state_t", "sai_port_err_status_list_t", "sai_fabric_port_reachability_t", - "sai_port_lane_latch_status_list_t", "sai_latch_status_t" + "sai_port_lane_latch_status_list_t", "sai_latch_status_t", + "sai_port_frequency_offset_ppm_list_t", "sai_port_snr_list_t", + "sai_acl_chain_list_t" ] if attr_type == "sai_object_list_t": status, data = self.get(obj, [attr, "1:oid:0x0"], do_assert) diff --git a/dockerfiles/bullseye/Dockerfile.server b/dockerfiles/bullseye/Dockerfile.server index 7e8d732d..60a4ef0a 100644 --- a/dockerfiles/bullseye/Dockerfile.server +++ b/dockerfiles/bullseye/Dockerfile.server @@ -34,7 +34,8 @@ RUN apt-get install -y \ make libtool m4 autoconf dh-exec debhelper automake cmake pkg-config \ libhiredis-dev libnl-3-dev libnl-genl-3-dev libnl-route-3-dev swig \ libgtest-dev libgmock-dev libboost-dev autoconf-archive \ - uuid-dev libboost-serialization-dev libyang-dev libyang1 + uuid-dev libboost-serialization-dev libyang-dev libyang1 \ + nlohmann-json3-dev RUN apt-get install -y \ libnl-3-dev libnl-genl-3-dev libnl-route-3-dev libnl-nf-3-dev libzmq3-dev diff --git a/dockerfiles/buster/Dockerfile.server b/dockerfiles/buster/Dockerfile.server index d1e8c69b..371cff6d 100644 --- a/dockerfiles/buster/Dockerfile.server +++ b/dockerfiles/buster/Dockerfile.server @@ -40,7 +40,8 @@ RUN apt-get install -y \ make libtool m4 autoconf dh-exec debhelper automake cmake pkg-config \ libhiredis-dev libnl-3-dev libnl-genl-3-dev libnl-route-3-dev swig3.0 \ libgtest-dev libgmock-dev libboost-dev autoconf-archive \ - uuid-dev libboost-serialization-dev libyang-dev libyang0.16 + uuid-dev libboost-serialization-dev libyang-dev libyang0.16 \ + nlohmann-json3-dev RUN apt-get install -y \ libnl-3-dev libnl-genl-3-dev libnl-route-3-dev libnl-nf-3-dev libzmq3-dev diff --git a/sai.env b/sai.env index fa38aa3b..ba81acd2 100644 --- a/sai.env +++ b/sai.env @@ -1,13 +1,13 @@ # The sonic-swss-common and sonic-sairedis commits were taken from -# sonic-buildimage master as of Aug 15, 2023 +# sonic-buildimage master as of Sep 29, 2023 # -# https://github.com/sonic-net/sonic-buildimage/tree/4acaaf8 +# https://github.com/sonic-net/sonic-buildimage/tree/81a2f56 -SWSS_COMMON_ID=be425ed -SAIREDIS_ID=eb24302 +SWSS_COMMON_ID=b0f148e +SAIREDIS_ID=c22b76b # SAI version: -# Branch v1.12 -# May 17, 2023 +# Branch master (post v1.13) +# Sep 8, 2023 -SAI_ID=92875f9 +SAI_ID=7f7a758 From dcde9c05005995d425394b43c49ffb72d87e655b Mon Sep 17 00:00:00 2001 From: Andriy Kokhan Date: Sun, 1 Oct 2023 13:13:16 +0300 Subject: [PATCH 15/19] Fixed pytest warnings (#207) Signed-off-by: Andriy Kokhan --- common/sai_dut.py | 2 +- tests/pytest.ini | 3 +++ 2 files changed, 4 insertions(+), 1 deletion(-) create mode 100644 tests/pytest.ini diff --git a/common/sai_dut.py b/common/sai_dut.py index d3f27224..95820a60 100644 --- a/common/sai_dut.py +++ b/common/sai_dut.py @@ -137,7 +137,7 @@ def init(self): # Write to CONFIG_DB SONiC device information needed on syncd start config_db = redis.Redis(host=self.server_ip, port=self.port, db=4) - config_db.hmset("DEVICE_METADATA|localhost", device_metadata) + config_db.hset("DEVICE_METADATA|localhost", mapping=device_metadata) config_db.set("CONFIG_DB_INITIALIZED", "1") def deinit(self): diff --git a/tests/pytest.ini b/tests/pytest.ini new file mode 100644 index 00000000..5fc853f8 --- /dev/null +++ b/tests/pytest.ini @@ -0,0 +1,3 @@ +[pytest] +markers = + dpu: mark DPU specific tests \ No newline at end of file From a9942029411e7be91411d573014645235a70f531 Mon Sep 17 00:00:00 2001 From: Andriy Kokhan Date: Sun, 1 Oct 2023 15:21:00 +0300 Subject: [PATCH 16/19] Refactored Redis RPC I/O failure processing (#208) Signed-off-by: Andriy Kokhan --- .../sai_redis_client/sai_redis_client.py | 49 ++++++++++--------- 1 file changed, 25 insertions(+), 24 deletions(-) diff --git a/common/sai_client/sai_redis_client/sai_redis_client.py b/common/sai_client/sai_redis_client/sai_redis_client.py index ebfbb795..2782302e 100644 --- a/common/sai_client/sai_redis_client/sai_redis_client.py +++ b/common/sai_client/sai_redis_client/sai_redis_client.py @@ -86,16 +86,10 @@ def operate(self, obj, attrs, op): if self.asic_channel is None: self.__assert_syncd_running() + # Clean-up Redis RPC I/O pipe self.r.delete("GETRESPONSE_KEY_VALUE_OP_QUEUE") - - tout = 0.01 - attempts = self.attempts - while len(self.r.lrange("GETRESPONSE_KEY_VALUE_OP_QUEUE", 0, -1)) > 0 and attempts > 0: - time.sleep(0.01) - attempts -= 1 - - if attempts == 0: - return [] + status = self.r.lrange("GETRESPONSE_KEY_VALUE_OP_QUEUE", 0, -1) + assert len(status) == 0, "Redis RPC I/O failure!" # Remove spaces from the key string. # Required by sai_deserialize_route_entry() in sonic-sairedis. @@ -107,22 +101,25 @@ def operate(self, obj, attrs, op): self.r.lpush("ASIC_STATE_KEY_VALUE_OP_QUEUE", obj, attrs, op) self.r.publish(self.asic_channel, "G") - status = [] - attempts = self.attempts - - # Wait upto 3 mins for switch init if obj.startswith("SAI_OBJECT_TYPE_SWITCH") and op == "Screate": + # Wait upto 3 mins for switch init tout = 0.5 attempts = 240 + else: + tout = 0.01 + attempts = self.attempts + # Get response + status = self.r.lrange("GETRESPONSE_KEY_VALUE_OP_QUEUE", 0, -1) while len(status) < 3 and attempts > 0: + assert self.__check_syncd_running(), "FATAL - SyncD has exited or crashed!" time.sleep(tout) attempts -= 1 status = self.r.lrange("GETRESPONSE_KEY_VALUE_OP_QUEUE", 0, -1) self.r.delete("GETRESPONSE_KEY_VALUE_OP_QUEUE") - assert len(status) == 3, "SAI \"{}\" operation failure!".format(op) + assert len(status) == 3, f"SAI \"{op[1:]}\" operation failure!" return status def create(self, obj, attrs, do_assert=True): @@ -543,18 +540,22 @@ def vid_to_rid(self, vid): assert rid.startswith("oid:"), f"Invalid RID format {vid}" return rid + def __check_syncd_running(self): + if self.asic_db == 1: + numsub = self.r.execute_command('PUBSUB', 'NUMSUB', 'ASIC_STATE_CHANNEL') + if numsub[1] >= 1: + # SONiC 202111 or older detected + return "ASIC_STATE_CHANNEL" + numsub = self.r.execute_command('PUBSUB', 'NUMSUB', f'ASIC_STATE_CHANNEL@{self.asic_db}') + if numsub[1] >= 1: + # SONiC 202205 or newer detected + return f"ASIC_STATE_CHANNEL@{self.asic_db}" + return None + def __assert_syncd_running(self, tout=30): for i in range(tout + 1): - if self.asic_db == 1: - numsub = self.r.execute_command('PUBSUB', 'NUMSUB', 'ASIC_STATE_CHANNEL') - if numsub[1] >= 1: - # SONiC 202111 or older detected - self.asic_channel = "ASIC_STATE_CHANNEL" - return - numsub = self.r.execute_command('PUBSUB', 'NUMSUB', f'ASIC_STATE_CHANNEL@{self.asic_db}') - if numsub[1] >= 1: - # SONiC 202205 or newer detected - self.asic_channel = f"ASIC_STATE_CHANNEL@{self.asic_db}" + self.asic_channel = self.__check_syncd_running() + if self.asic_channel: return if i < tout: time.sleep(1) From 424897e208f73953ecac64162235de6044b73ca2 Mon Sep 17 00:00:00 2001 From: Andriy Kokhan Date: Tue, 3 Oct 2023 11:50:11 +0300 Subject: [PATCH 17/19] Removed obsoleted exec option. Please use "--testbed" instead (#209) Signed-off-by: Andriy Kokhan --- tests/conftest.py | 122 ++++++++++------------------------------------ 1 file changed, 25 insertions(+), 97 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index 05ee7384..d5cb609c 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -49,13 +49,8 @@ def prev_test_failed(): def pytest_addoption(parser): - parser.addoption("--sai-server", action="store", default='localhost', help="SAI server IP") - parser.addoption("--traffic", action="store_true", default=False, help="run tests with traffic") - parser.addoption("--loglevel", action="store", default='NOTICE', help="syncd logging level") - parser.addoption("--asic", action="store", default=os.getenv('SC_ASIC'), help="ASIC type") - parser.addoption("--target", action="store", default=os.getenv('SC_TARGET'), help="The target device with this NPU") - parser.addoption("--sku", action="store", default=None, help="SKU mode") - parser.addoption("--testbed", action="store", default=None, help="Testbed name") + parser.addoption("--traffic", action="store_false", help="run tests with traffic") + parser.addoption("--testbed", action="store", help="Testbed name", required=True) def pytest_sessionstart(session): @@ -69,119 +64,52 @@ def exec_params(request): # Generic parameters "traffic": request.config.getoption("--traffic"), "testbed": request.config.getoption("--testbed"), - # DUT specific parameters - "alias": "dut", - "asic": request.config.getoption("--asic"), - "target": request.config.getoption("--target"), - "sku": request.config.getoption("--sku"), - "client": { - "type": "redis", - "config": { - "ip": request.config.getoption("--sai-server"), - "port": "6379", - "loglevel": request.config.getoption("--loglevel") - } - } } return config_param @pytest.fixture(scope="session") def testbed_instance(exec_params): - testbed_name = exec_params.get("testbed", None) - if testbed_name is None: - yield None - else: - testbed = SaiTestbed(f"{curdir}/..", testbed_name, exec_params["traffic"]) - testbed.init() - yield testbed - testbed.deinit() + testbed = SaiTestbed(f"{curdir}/..", exec_params["testbed"], exec_params["traffic"]) + testbed.init() + yield testbed + testbed.deinit() @pytest.fixture(scope="function") def testbed(testbed_instance): - if testbed_instance: - testbed_instance.setup() - yield testbed_instance - testbed_instance.teardown() - else: - yield None + testbed_instance.setup() + yield testbed_instance + testbed_instance.teardown() @pytest.fixture(scope="session") -def npu(exec_params, testbed_instance): - if testbed_instance is not None: - if len(testbed_instance.npu) == 1: - return testbed_instance.npu[0] - return None - - npu = None - exec_params["asic_dir"] = None - - if exec_params["asic"] == "generic": - npu = SaiNpu(exec_params) - else: - npu = SaiTestbed.spawn_asic(f"{curdir}/..", exec_params, "npu") - - if npu is not None: - npu.reset() - return npu +def npu(testbed_instance): + if len(testbed_instance.npu) == 1: + return testbed_instance.npu[0] + return None @pytest.fixture(scope="session") -def dpu(exec_params, testbed_instance): - if testbed_instance is not None: - if len(testbed_instance.dpu) == 1: - return testbed_instance.dpu[0] - return None +def dpu(testbed_instance): + if len(testbed_instance.dpu) == 1: + return testbed_instance.dpu[0] + return None - dpu = None - exec_params["asic_dir"] = None - - if exec_params["asic"] == "generic": - dpu = SaiDpu(exec_params) - else: - dpu = SaiTestbed.spawn_asic(f"{curdir}/..", exec_params, "dpu") - - if dpu is not None: - dpu.reset() - return dpu @pytest.fixture(scope="session") -def phy(exec_params, testbed_instance): - if testbed_instance is not None: - if len(testbed_instance.phy) == 1: - return testbed_instance.phy[0] - return None +def phy(testbed_instance): + if len(testbed_instance.phy) == 1: + return testbed_instance.phy[0] + return None - phy = None - exec_params["asic_dir"] = None - - if exec_params["asic"] == "generic": - phy = SaiPhy(exec_params) - else: - phy = SaiTestbed.spawn_asic(f"{curdir}/..", exec_params, "phy") - - if phy is not None: - phy.reset() - return phy @pytest.fixture(scope="session") -def dataplane_instance(exec_params, testbed_instance): - if testbed_instance is not None: - if len(testbed_instance.dataplane) == 1: - yield testbed_instance.dataplane[0] - else: - yield None +def dataplane_instance(testbed_instance): + if len(testbed_instance.dataplane) == 1: + yield testbed_instance.dataplane[0] else: - cfg = { - "type": "ptf", - "traffic": exec_params["traffic"] - } - dp = SaiTestbed.spawn_dataplane(cfg) - dp.init() - yield dp - dp.deinit() + yield None @pytest.fixture(scope="function") From 1f0699bd096665fe5c4d68278b4c70d3784c6f34 Mon Sep 17 00:00:00 2001 From: Yurii Lisovskyi Date: Tue, 3 Oct 2023 14:02:16 +0300 Subject: [PATCH 18/19] Add generic SAI Thrift server (#203) Signed-off-by: Yurii Lisovskyi --- .github/workflows/sc-client-server-deb10.yml | 11 +++ .github/workflows/sc-client-server-deb11.yml | 11 +++ build.sh | 13 ++- configs/server/supervisord.conf.thrift | 32 ++++++++ .../bullseye/Dockerfile.saithrift-server | 79 +++++++++++++++++++ .../buster/Dockerfile.saithrift-server | 73 +++++++++++++++++ .../saivs/Dockerfile.saithrift-server | 10 +++ run.sh | 2 +- 8 files changed, 228 insertions(+), 3 deletions(-) create mode 100644 configs/server/supervisord.conf.thrift create mode 100644 dockerfiles/bullseye/Dockerfile.saithrift-server create mode 100644 dockerfiles/buster/Dockerfile.saithrift-server create mode 100644 npu/broadcom/BCM56850/saivs/Dockerfile.saithrift-server diff --git a/.github/workflows/sc-client-server-deb10.yml b/.github/workflows/sc-client-server-deb10.yml index de4ca19f..7e3b4b73 100644 --- a/.github/workflows/sc-client-server-deb10.yml +++ b/.github/workflows/sc-client-server-deb10.yml @@ -25,8 +25,10 @@ env: DOCKER_CLIENT: 'dockerfiles/buster/Dockerfile.client' DOCKER_SERVER_BASE: 'dockerfiles/buster/Dockerfile.server' DOCKER_SERVER: 'npu/broadcom/BCM56850/saivs/Dockerfile.server' + DOCKER_THRIFT_SERVER: 'dockerfiles/buster/Dockerfile.saithrift-server' REDIS_CLIENT: 0 REDIS_SERVER: 0 + THRIFT_SERVER: 0 jobs: @@ -54,6 +56,11 @@ jobs: echo "REDIS_SERVER=1" >> $GITHUB_ENV fi done + for file in "$DOCKER_THRIFT_SERVER" "sai.env"; do + if [[ "${{ steps.check_changes.outputs.changed_files }}" == *"$file"* ]]; then + echo "THRIFT_SERVER=1" >> $GITHUB_ENV + fi + done - name: Build server Docker image run: ./build.sh -i server -o deb10 @@ -70,6 +77,10 @@ jobs: with: name: Server Image path: sc-server.tar + + - name: Build Thrift server Docker image + run: ./build.sh -i server -o deb10 -s thrift + if: ${{ env.THRIFT_SERVER == '1' }} build-sc-client: name: Build SAI Challenger client image diff --git a/.github/workflows/sc-client-server-deb11.yml b/.github/workflows/sc-client-server-deb11.yml index b6639c5b..04d1332d 100644 --- a/.github/workflows/sc-client-server-deb11.yml +++ b/.github/workflows/sc-client-server-deb11.yml @@ -25,8 +25,10 @@ env: DOCKER_CLIENT: 'dockerfiles/bullseye/Dockerfile.client' DOCKER_SERVER_BASE: 'dockerfiles/bullseye/Dockerfile.server' DOCKER_SERVER: 'npu/broadcom/BCM56850/saivs/Dockerfile.server' + DOCKER_THRIFT_SERVER: 'dockerfiles/buster/Dockerfile.saithrift-server' REDIS_CLIENT: 0 REDIS_SERVER: 0 + THRIFT_SERVER: 0 jobs: build-sc-server: @@ -53,6 +55,11 @@ jobs: echo "REDIS_SERVER=1" >> $GITHUB_ENV fi done + for file in "$DOCKER_THRIFT_SERVER" "sai.env"; do + if [[ "${{ steps.check_changes.outputs.changed_files }}" == *"$file"* ]]; then + echo "THRIFT_SERVER=1" >> $GITHUB_ENV + fi + done - name: Build server Docker image run: ./build.sh -i server -o deb11 @@ -70,6 +77,10 @@ jobs: name: Server Image path: sc-server.tar + - name: Build Thrift server Docker image + run: ./build.sh -i server -o deb11 -s thrift + if: ${{ env.THRIFT_SERVER == '1' }} + build-sc-client: name: Build SAI Challenger client image runs-on: ubuntu-20.04 diff --git a/build.sh b/build.sh index 4ac00093..ddfddbf8 100755 --- a/build.sh +++ b/build.sh @@ -149,13 +149,18 @@ if [ "${IMAGE_TYPE}" = "standalone" ]; then elif [ "${IMAGE_TYPE}" = "server" ]; then find ${ASIC_PATH}/../ -type f -name \*.py -exec install -D {} .build/{} \; find ${ASIC_PATH}/../ -type f -name \*.json -exec install -D {} .build/{} \; - docker build -f dockerfiles/${BASE_OS}/Dockerfile.server -t sc-server-base:${BASE_OS} . + if [ "${SAI_INTERFACE}" = "thrift" ]; then + docker build -f dockerfiles/${BASE_OS}/Dockerfile.saithrift-server -t sc-thrift-server-base:${BASE_OS} . + else + docker build -f dockerfiles/${BASE_OS}/Dockerfile.server -t sc-server-base:${BASE_OS} . + fi rm -rf .build/ else docker build -f dockerfiles/${BASE_OS}/Dockerfile.client --build-arg NOSNAPPI=${NOSNAPPI} -t sc-client:${BASE_OS} . if [ "${SAI_INTERFACE}" = "thrift" ]; then docker build -f dockerfiles/${BASE_OS}/Dockerfile.saithrift-client -t sc-thrift-client:${BASE_OS} . fi + exit 0 fi # Build target Docker image @@ -168,6 +173,10 @@ if [ "${IMAGE_TYPE}" = "standalone" ]; then docker build -f Dockerfile --build-arg BASE_OS=${BASE_OS} -t sc-${IMG_NAME}:${BASE_OS} . fi elif [ "${IMAGE_TYPE}" = "server" ]; then - docker build -f Dockerfile.server --build-arg BASE_OS=${BASE_OS} -t sc-server-${IMG_NAME}:${BASE_OS} . + if [ "${SAI_INTERFACE}" = "thrift" ]; then + docker build -f Dockerfile.saithrift-server --build-arg BASE_OS=${BASE_OS} -t sc-thrift-server-${IMG_NAME}:${BASE_OS} . + else + docker build -f Dockerfile.server --build-arg BASE_OS=${BASE_OS} -t sc-server-${IMG_NAME}:${BASE_OS} . + fi fi popd diff --git a/configs/server/supervisord.conf.thrift b/configs/server/supervisord.conf.thrift new file mode 100644 index 00000000..3cd2a363 --- /dev/null +++ b/configs/server/supervisord.conf.thrift @@ -0,0 +1,32 @@ +[supervisord] +logfile_maxbytes=1MB +logfile_backups=2 +nodaemon=true + +[eventlistener:dependent-startup] +command=python3 -m supervisord_dependent_startup +autostart=true +autorestart=unexpected +startretries=0 +exitcodes=0,3 +events=PROCESS_STATE +buffer_size=1024 + +[program:rsyslogd] +command=/usr/sbin/rsyslogd -n -iNONE +priority=1 +autostart=false +autorestart=false +stdout_logfile=syslog +stderr_logfile=syslog +dependent_startup=true + +[program:saiserver] +command=/usr/sbin/saiserver -f /usr/share/sonic/hwsku/port_config.ini -p /usr/share/sonic/hwsku/sai.profile +priority=3 +autostart=true +autorestart=true +stdout_logfile=syslog +stderr_logfile=syslog +dependent_startup_wait_for=rsyslogd:running + diff --git a/dockerfiles/bullseye/Dockerfile.saithrift-server b/dockerfiles/bullseye/Dockerfile.saithrift-server new file mode 100644 index 00000000..83bfe034 --- /dev/null +++ b/dockerfiles/bullseye/Dockerfile.saithrift-server @@ -0,0 +1,79 @@ +FROM debian:buster-slim + +## Make apt-get non-interactive +ENV DEBIAN_FRONTEND=noninteractive + +COPY sai.env / + +# Install generic packages +RUN apt-get -o Acquire::Check-Valid-Until=false update && apt-get install -y \ + apt-utils \ + vim \ + curl \ + wget \ + iproute2 \ + unzip \ + git \ + procps \ + build-essential \ + graphviz \ + doxygen \ + aspell \ + python3-pip \ + rsyslog \ + supervisor + +# Add support for supervisord to handle startup dependencies +RUN pip3 install supervisord-dependent-startup==1.4.0 + +# Install generic packages +RUN apt-get install -y \ + libtemplate-perl \ + libconst-fast-perl \ + libmoosex-aliases-perl \ + libnamespace-autoclean-perl \ + libgetopt-long-descriptive-perl \ + aspell-en bison flex g++ \ + libboost-all-dev libevent-dev libssl-dev \ + libpython3-dev libpcap-dev + +WORKDIR /sai + +RUN apt-get install -y pkg-config \ + && wget "http://archive.apache.org/dist/thrift/0.11.0/thrift-0.11.0.tar.gz" \ + && tar -xf thrift-0.11.0.tar.gz \ + && cd thrift-0.11.0 \ + && ./bootstrap.sh \ + && ./configure --prefix=/usr --with-cpp --with-python \ + --with-qt4=no --with-qt5=no --with-csharp=no --with-java=no --with-erlang=no \ + --with-nodejs=no --with-lua=no --with-per=no --with-php=no --with-dart=no \ + --with-ruby=no --with-haskell=no --with-go=no --with-rs=no --with-haxe=no \ + --with-dotnetcore=no --with-d=no \ + && make && make install \ + && pip3 install ctypesgen lib/py \ + && cd /sai \ + && rm -rf thrift-0.11.0 thrift-0.11.0.tar.gz ; + +ENV SAITHRIFTV2=y +ENV GEN_SAIRPC_OPTS="-cve" +ENV SAIRPC_EXTRA_LIBS="-L/usr/local/lib/ -lpthread" + +RUN git clone https://github.com/opencomputeproject/SAI.git \ + && cd SAI && git fetch origin \ + && . /sai.env \ + && git checkout ${SAI_ID} \ + && cd meta \ + && make all libsaimetadata.so libsai.so \ + && cp libsaimetadata.so /usr/lib \ + && cp libsai.so /usr/lib \ + && cd .. \ + && mkdir /usr/include/sai/ \ + && cp ./inc/sai*.h /usr/include/sai/ \ + && cp ./experimental/sai*.h /usr/include/sai/ \ + && make saithrift-install + +WORKDIR /sai-challenger + +COPY configs/server/supervisord.conf.thrift /etc/supervisor/conf.d/supervisord.conf + +CMD ["/usr/bin/supervisord"] diff --git a/dockerfiles/buster/Dockerfile.saithrift-server b/dockerfiles/buster/Dockerfile.saithrift-server new file mode 100644 index 00000000..51c94506 --- /dev/null +++ b/dockerfiles/buster/Dockerfile.saithrift-server @@ -0,0 +1,73 @@ +FROM debian:buster-slim + +RUN echo "deb [arch=amd64] http://debian-archive.trafficmanager.net/debian/ buster main contrib non-free" >> /etc/apt/sources.list && \ + echo "deb-src [arch=amd64] http://debian-archive.trafficmanager.net/debian/ buster main contrib non-free" >> /etc/apt/sources.list && \ + echo "deb [arch=amd64] http://debian-archive.trafficmanager.net/debian-security/ buster/updates main contrib non-free" >> /etc/apt/sources.list && \ + echo "deb-src [arch=amd64] http://debian-archive.trafficmanager.net/debian-security/ buster/updates main contrib non-free" >> /etc/apt/sources.list && \ + echo "deb [arch=amd64] http://debian-archive.trafficmanager.net/debian buster-backports main" >> /etc/apt/sources.list + +## Make apt-get non-interactive +ENV DEBIAN_FRONTEND=noninteractive + +COPY sai.env / + +# Install generic packages +RUN apt-get -o Acquire::Check-Valid-Until=false update && apt-get install -y \ + apt-utils \ + vim \ + curl \ + wget \ + iproute2 \ + unzip \ + git \ + procps \ + build-essential \ + graphviz \ + doxygen \ + aspell \ + python3-pip \ + rsyslog \ + supervisor + +# Add support for supervisord to handle startup dependencies +RUN pip3 install supervisord-dependent-startup==1.4.0 + +# Install generic packages +RUN apt-get install -y \ + libtemplate-perl \ + libconst-fast-perl \ + libmoosex-aliases-perl \ + libnamespace-autoclean-perl \ + libgetopt-long-descriptive-perl \ + aspell-en bison flex g++ \ + libboost-all-dev libevent-dev libssl-dev \ + libpython3-dev libpcap-dev + +WORKDIR /sai + +RUN apt-get install -y thrift-compiler libthrift-dev libthrift-0.11.0 \ + && pip3 install ctypesgen thrift==0.11.0 ; + +ENV SAITHRIFTV2=y +ENV GEN_SAIRPC_OPTS="-cve" +ENV SAIRPC_EXTRA_LIBS="-L/usr/local/lib/ -lpthread" + +RUN git clone https://github.com/opencomputeproject/SAI.git \ + && cd SAI && git fetch origin \ + && . /sai.env \ + && git checkout ${SAI_ID} \ + && cd meta \ + && make all libsaimetadata.so libsai.so \ + && cp libsaimetadata.so /usr/lib \ + && cp libsai.so /usr/lib \ + && cd .. \ + && mkdir /usr/include/sai/ \ + && cp ./inc/sai*.h /usr/include/sai/ \ + && cp ./experimental/sai*.h /usr/include/sai/ \ + && make saithrift-install + +WORKDIR /sai-challenger + +COPY configs/server/supervisord.conf.thrift /etc/supervisor/conf.d/supervisord.conf + +CMD ["/usr/bin/supervisord"] diff --git a/npu/broadcom/BCM56850/saivs/Dockerfile.saithrift-server b/npu/broadcom/BCM56850/saivs/Dockerfile.saithrift-server new file mode 100644 index 00000000..cbbee09b --- /dev/null +++ b/npu/broadcom/BCM56850/saivs/Dockerfile.saithrift-server @@ -0,0 +1,10 @@ +ARG BASE_OS +FROM sc-thrift-server-base:$BASE_OS + +COPY configs/sai.profile /usr/share/sonic/hwsku/ +COPY configs/port_config.ini /usr/share/sonic/hwsku/ +COPY configs/lanemap.ini /usr/share/sonic/hwsku/ + +WORKDIR / + +CMD ["/usr/bin/supervisord"] \ No newline at end of file diff --git a/run.sh b/run.sh index 3d248752..64bb7101 100755 --- a/run.sh +++ b/run.sh @@ -186,7 +186,7 @@ if [ "${IMAGE_TYPE}" = "standalone" ]; then IMG_NAME=$(echo "${PREFIX}-${ASIC_TYPE}-${TARGET}" | tr '[:upper:]' '[:lower:]') OPTS="$OPTS -v $(pwd):/sai-challenger" elif [ "${IMAGE_TYPE}" = "server" ]; then - IMG_NAME=$(echo "sc-server-${ASIC_TYPE}-${TARGET}" | tr '[:upper:]' '[:lower:]') + IMG_NAME=$(echo "${PREFIX}-server-${ASIC_TYPE}-${TARGET}" | tr '[:upper:]' '[:lower:]') else IMG_NAME=${PREFIX}-client OPTS="$OPTS -v $(pwd):/sai-challenger" From 9fbc878b0cd71b58e646c6ea62b5916fcc34695e Mon Sep 17 00:00:00 2001 From: Andriy Kokhan Date: Tue, 3 Oct 2023 17:37:42 +0300 Subject: [PATCH 19/19] Fixed "--traffic" option behavior (#210) Signed-off-by: Andriy Kokhan --- tests/conftest.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/conftest.py b/tests/conftest.py index d5cb609c..c7f5f483 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -49,7 +49,7 @@ def prev_test_failed(): def pytest_addoption(parser): - parser.addoption("--traffic", action="store_false", help="run tests with traffic") + parser.addoption("--traffic", action="store_true", help="run tests with traffic") parser.addoption("--testbed", action="store", help="Testbed name", required=True)