diff --git a/.github/workflows/sc-client-server-deb10.yml b/.github/workflows/sc-client-server-deb10.yml index 53bfa247..7e3b4b73 100644 --- a/.github/workflows/sc-client-server-deb10.yml +++ b/.github/workflows/sc-client-server-deb10.yml @@ -7,7 +7,6 @@ on: paths: - '.github/workflows/sc-client-server-deb10.yml' - 'dockerfiles/buster/Dockerfile.client' - - 'dockerfiles/buster/Dockerfile.saithrift-client' - 'dockerfiles/buster/Dockerfile.server' - 'npu/broadcom/BCM56850/saivs/Dockerfile.server' - 'common/**' @@ -23,11 +22,14 @@ on: - 'sai.env' env: - DOCKER_BASE: 'dockerfiles/buster/Dockerfile' - DOCKER_REDIS: 'npu/broadcom/BCM56850/saivs/Dockerfile' - DOCKER_THRIFT: 'npu/broadcom/BCM56850/saivs/Dockerfile.saithrift' - REDIS_RPC: 0 - THRIFT_RPC: 0 + DOCKER_CLIENT: 'dockerfiles/buster/Dockerfile.client' + DOCKER_SERVER_BASE: 'dockerfiles/buster/Dockerfile.server' + DOCKER_SERVER: 'npu/broadcom/BCM56850/saivs/Dockerfile.server' + DOCKER_THRIFT_SERVER: 'dockerfiles/buster/Dockerfile.saithrift-server' + REDIS_CLIENT: 0 + REDIS_SERVER: 0 + THRIFT_SERVER: 0 + jobs: build-sc-server: @@ -43,23 +45,30 @@ jobs: - name: Check what files were updated id: check_changes run: | - echo 'changed_files=$(git diff --name-only origin/HEAD | xargs)' >> $GITHUB_OUTPUT + echo 'changed_files<> $GITHUB_OUTPUT + echo "$(git diff --name-only HEAD~1)" >> $GITHUB_OUTPUT + echo 'EOF' >> $GITHUB_OUTPUT - name: Check what Docker images have to be rebuild run: | - for file in "$DOCKER_BASE" "$DOCKER_REDIS" "sai.env"; do + for file in "$DOCKER_SERVER_BASE" "$DOCKER_SERVER" "sai.env"; do + if [[ "${{ steps.check_changes.outputs.changed_files }}" == *"$file"* ]]; then + echo "REDIS_SERVER=1" >> $GITHUB_ENV + fi + done + for file in "$DOCKER_THRIFT_SERVER" "sai.env"; do if [[ "${{ steps.check_changes.outputs.changed_files }}" == *"$file"* ]]; then - echo "REDIS_RPC=1" >> $GITHUB_ENV + echo "THRIFT_SERVER=1" >> $GITHUB_ENV fi done - name: Build server Docker image run: ./build.sh -i server -o deb10 - if: ${{ env.REDIS_RPC == '1' }} + if: ${{ env.REDIS_SERVER == '1' }} - name: Pull SAI-C server run: ./run.sh -i server -o deb10 - if: ${{ env.REDIS_RPC == '0' }} + if: ${{ env.REDIS_SERVER == '0' }} - name: Save server Docker image run: docker save sc-server-trident2-saivs > sc-server.tar @@ -68,6 +77,10 @@ jobs: with: name: Server Image path: sc-server.tar + + - name: Build Thrift server Docker image + run: ./build.sh -i server -o deb10 -s thrift + if: ${{ env.THRIFT_SERVER == '1' }} build-sc-client: name: Build SAI Challenger client image @@ -82,24 +95,25 @@ jobs: - name: Check what files were updated id: check_changes run: | - echo 'changed_files=$(git diff --name-only origin/HEAD | xargs)' >> $GITHUB_OUTPUT + echo 'changed_files<> $GITHUB_OUTPUT + echo "$(git diff --name-only HEAD~1)" >> $GITHUB_OUTPUT + echo 'EOF' >> $GITHUB_OUTPUT - name: Check what Docker images have to be rebuild run: | - changed_files=$(git diff --name-only origin/HEAD | xargs) - for file in "$DOCKER_BASE" "$DOCKER_REDIS" "sai.env"; do - if [[ "$changed_files" == *"$file"* ]]; then - echo "REDIS_RPC=1" + for file in "$DOCKER_CLIENT" "sai.env"; do + if [[ "${{ steps.check_changes.outputs.changed_files }}" == *"$file"* ]]; then + echo "REDIS_CLIENT=1" >> $GITHUB_ENV fi done - name: Build client Docker image run: ./build.sh -i client -o deb10 --nosnappi - if: ${{ env.REDIS_RPC == '1' }} + if: ${{ env.REDIS_CLIENT == '1' }} - name: Pull SAI-C client run: ./run.sh -i client -o deb10 - if: ${{ env.REDIS_RPC == '0' }} + if: ${{ env.REDIS_CLIENT == '0' }} - name: Save client Docker image run: docker save sc-client > sc-client.tar @@ -147,17 +161,19 @@ jobs: run: ./run.sh -i server -o deb10 - name: Update SAI-C server package run: ./exec.sh -i server --no-tty pip3 install /sai-challenger/common /sai-challenger - if: ${{ env.REDIS_RPC == '0' }} + if: ${{ env.REDIS_SERVER == '0' }} - name: Update SAI-C client package run: ./exec.sh -i client --no-tty pip3 install /sai-challenger/common /sai-challenger - if: ${{ env.REDIS_RPC == '0' }} + if: ${{ env.REDIS_CLIENT == '0' }} - name: Create veth links between client and server dockers run: sudo ./veth-create-host.sh sc-server-trident2-saivs-run sc-client-run - name: Run functional test cases run: ./exec.sh --no-tty -i client pytest --testbed=saivs_client_server -v -k "test_l2_basic" + - name: Run unit tests + run: ./exec.sh --no-tty -i client pytest --testbed=saivs_client_server -v ut/test_acl_ut.py ut/test_bridge_ut.py ut/test_vrf_ut.py ut/test_port_ut.py ut/test_fdb_ut.py ut/test_lag_ut.py - name: Run unit tests run: ./exec.sh --no-tty -i client pytest --testbed=saivs_client_server -v -k \ - "test_acl_ut or test_bridge_ut or (test_switch_ut and not sai_map_list_t) or test_vrf_ut or test_port_ut.py" + "test_switch_ut and not sai_map_list_t" - name: Run thift data-driven tests run: ./exec.sh --no-tty -i client pytest --testbed=saivs_client_server -v test_l2_basic_dd.py diff --git a/.github/workflows/sc-client-server-deb11.yml b/.github/workflows/sc-client-server-deb11.yml index 389b3342..04d1332d 100644 --- a/.github/workflows/sc-client-server-deb11.yml +++ b/.github/workflows/sc-client-server-deb11.yml @@ -7,7 +7,6 @@ on: paths: - '.github/workflows/sc-client-server-deb11.yml' - 'dockerfiles/bullseye/Dockerfile.client' - - 'dockerfiles/bullseye/Dockerfile.saithrift-client' - 'dockerfiles/bullseye/Dockerfile.server' - 'npu/broadcom/BCM56850/saivs/Dockerfile.server' - 'common/**' @@ -23,11 +22,13 @@ on: - 'sai.env' env: - DOCKER_BASE: 'dockerfiles/bullseye/Dockerfile' - DOCKER_REDIS: 'npu/broadcom/BCM56850/saivs/Dockerfile' - DOCKER_THRIFT: 'npu/broadcom/BCM56850/saivs/Dockerfile.saithrift' - REDIS_RPC: 0 - THRIFT_RPC: 0 + DOCKER_CLIENT: 'dockerfiles/bullseye/Dockerfile.client' + DOCKER_SERVER_BASE: 'dockerfiles/bullseye/Dockerfile.server' + DOCKER_SERVER: 'npu/broadcom/BCM56850/saivs/Dockerfile.server' + DOCKER_THRIFT_SERVER: 'dockerfiles/buster/Dockerfile.saithrift-server' + REDIS_CLIENT: 0 + REDIS_SERVER: 0 + THRIFT_SERVER: 0 jobs: build-sc-server: @@ -43,23 +44,30 @@ jobs: - name: Check what files were updated id: check_changes run: | - echo 'changed_files=$(git diff --name-only origin/HEAD | xargs)' >> $GITHUB_OUTPUT + echo 'changed_files<> $GITHUB_OUTPUT + echo "$(git diff --name-only HEAD~1)" >> $GITHUB_OUTPUT + echo 'EOF' >> $GITHUB_OUTPUT - name: Check what Docker images have to be rebuild run: | - for file in "$DOCKER_BASE" "$DOCKER_REDIS" "sai.env"; do + for file in "$DOCKER_SERVER_BASE" "$DOCKER_SERVER" "sai.env"; do if [[ "${{ steps.check_changes.outputs.changed_files }}" == *"$file"* ]]; then - echo "REDIS_RPC=1" >> $GITHUB_ENV + echo "REDIS_SERVER=1" >> $GITHUB_ENV + fi + done + for file in "$DOCKER_THRIFT_SERVER" "sai.env"; do + if [[ "${{ steps.check_changes.outputs.changed_files }}" == *"$file"* ]]; then + echo "THRIFT_SERVER=1" >> $GITHUB_ENV fi done - name: Build server Docker image run: ./build.sh -i server -o deb11 - if: ${{ env.REDIS_RPC == '1' }} + if: ${{ env.REDIS_SERVER == '1' }} - name: Pull SAI-C server run: ./run.sh -i server -o deb11 - if: ${{ env.REDIS_RPC == '0' }} + if: ${{ env.REDIS_SERVER == '0' }} - name: Save server Docker image run: docker save sc-server-trident2-saivs > sc-server.tar @@ -69,6 +77,10 @@ jobs: name: Server Image path: sc-server.tar + - name: Build Thrift server Docker image + run: ./build.sh -i server -o deb11 -s thrift + if: ${{ env.THRIFT_SERVER == '1' }} + build-sc-client: name: Build SAI Challenger client image runs-on: ubuntu-20.04 @@ -82,23 +94,25 @@ jobs: - name: Check what files were updated id: check_changes run: | - echo 'changed_files=$(git diff --name-only origin/HEAD | xargs)' >> $GITHUB_OUTPUT + echo 'changed_files<> $GITHUB_OUTPUT + echo "$(git diff --name-only HEAD~1)" >> $GITHUB_OUTPUT + echo 'EOF' >> $GITHUB_OUTPUT - name: Check what Docker images have to be rebuild run: | - for file in "$DOCKER_BASE" "$DOCKER_REDIS" "sai.env"; do + for file in "$DOCKER_CLIENT" "sai.env"; do if [[ "${{ steps.check_changes.outputs.changed_files }}" == *"$file"* ]]; then - echo "REDIS_RPC=1" >> $GITHUB_ENV + echo "REDIS_CLIENT=1" >> $GITHUB_ENV fi done - name: Build client Docker image run: ./build.sh -i client -o deb11 --nosnappi - if: ${{ env.REDIS_RPC == '1' }} + if: ${{ env.REDIS_CLIENT == '1' }} - name: Pull SAI-C client run: ./run.sh -i client -o deb11 - if: ${{ env.REDIS_RPC == '0' }} + if: ${{ env.REDIS_CLIENT == '0' }} - name: Save client Docker image run: docker save sc-client > sc-client.tar @@ -146,17 +160,19 @@ jobs: run: ./run.sh -i server -o deb11 - name: Update SAI-C server package run: ./exec.sh -i server --no-tty pip3 install /sai-challenger/common /sai-challenger - if: ${{ env.REDIS_RPC == '0' }} + if: ${{ env.REDIS_SERVER == '0' }} - name: Update SAI-C client package run: ./exec.sh -i client --no-tty pip3 install /sai-challenger/common /sai-challenger - if: ${{ env.REDIS_RPC == '0' }} + if: ${{ env.REDIS_CLIENT == '0' }} - name: Create veth links between client and server dockers run: sudo ./veth-create-host.sh sc-server-trident2-saivs-run sc-client-run - name: Run functional test cases run: ./exec.sh --no-tty -i client pytest --testbed=saivs_client_server -v -k "test_l2_basic" + - name: Run unit tests + run: ./exec.sh --no-tty -i client pytest --testbed=saivs_client_server -v ut/test_acl_ut.py ut/test_bridge_ut.py ut/test_vrf_ut.py ut/test_port_ut.py ut/test_fdb_ut.py ut/test_lag_ut.py - name: Run unit tests run: ./exec.sh --no-tty -i client pytest --testbed=saivs_client_server -v -k \ - "test_acl_ut or test_bridge_ut or (test_switch_ut and not sai_map_list_t) or test_vrf_ut or test_port_ut.py" + "test_switch_ut and not sai_map_list_t" - name: Run thift data-driven tests run: ./exec.sh --no-tty -i client pytest --testbed=saivs_client_server -v test_l2_basic_dd.py diff --git a/.github/workflows/sc-standalone-deb10.yml b/.github/workflows/sc-standalone-deb10.yml index 440ec73d..d88caef9 100644 --- a/.github/workflows/sc-standalone-deb10.yml +++ b/.github/workflows/sc-standalone-deb10.yml @@ -42,7 +42,9 @@ jobs: - name: Check what files were updated id: check_changes run: | - echo 'changed_files=$(git diff --name-only origin/HEAD | xargs)' >> $GITHUB_OUTPUT + echo 'changed_files<> $GITHUB_OUTPUT + echo "$(git diff --name-only HEAD~1)" >> $GITHUB_OUTPUT + echo 'EOF' >> $GITHUB_OUTPUT - name: Check what Docker images have to be rebuild run: | @@ -72,8 +74,9 @@ jobs: - name: Run sairedis tests run: ./exec.sh --no-tty pytest --testbed=saivs_standalone -v -k "test_sairec" - name: Run unit tests - run: ./exec.sh --no-tty pytest --testbed=saivs_standalone -v -k \ - "test_acl_ut or test_bridge_ut or (test_switch_ut and not sai_map_list_t) or test_vrf_ut or test_port_ut.py" + run: ./exec.sh --no-tty pytest --testbed=saivs_standalone -v ut/test_acl_ut.py ut/test_bridge_ut.py ut/test_vrf_ut.py ut/test_port_ut.py ut/test_fdb_ut.py ut/test_lag_ut.py + - name: Run unit tests + run: ./exec.sh --no-tty pytest --testbed=saivs_standalone -v -k "test_switch_ut and not sai_map_list_t" - name: Run data-driven tests run: ./exec.sh --no-tty pytest --testbed=saivs_standalone -v test_l2_basic_dd.py - name: Run API tests @@ -94,6 +97,14 @@ jobs: - name: Run thift data-driven tests run: ./exec.sh --no-tty -s thrift pytest --testbed=saivs_thrift_standalone -v test_l2_basic_dd.py - name: Run thrift unit tests - run: ./exec.sh --no-tty -s thrift pytest --testbed=saivs_thrift_standalone -v ut/test_vrf_ut.py ut/test_bridge_ut.py ut/test_acl_ut.py + run: ./exec.sh --no-tty -s thrift pytest --testbed=saivs_thrift_standalone -v ut/test_vrf_ut.py ut/test_bridge_ut.py ut/test_acl_ut.py ut/test_fdb_ut.py ut/test_lag_ut.py + - name: Run thrift unit tests + run: ./exec.sh --no-tty -s thrift pytest --testbed=saivs_thrift_standalone -v -k \ + "(test_switch_ut and not sai_map_list_t and not sai_system_port_config_list_t) or (test_port_ut and not sai_map_list_t)" - name: Run thrift sairedis tests run: ./exec.sh --no-tty -s thrift pytest --testbed=saivs_thrift_standalone -v -k "test_sairec" + - name: Run thrift API tests + run: ./exec.sh --no-tty -s thrift pytest --testbed=saivs_thrift_standalone -v -k "api/test" + + - name: Run PTF tests + run: ./exec.sh --no-tty -s thrift pytest --testbed=saivs_thrift_standalone -v ../usecases/sai-ptf/SAI/ptf/saifdb.py -k FdbAttributeTest diff --git a/.github/workflows/sc-standalone-deb11.yml b/.github/workflows/sc-standalone-deb11.yml index 173e05ae..094e5360 100644 --- a/.github/workflows/sc-standalone-deb11.yml +++ b/.github/workflows/sc-standalone-deb11.yml @@ -42,7 +42,9 @@ jobs: - name: Check what files were updated id: check_changes run: | - echo 'changed_files=$(git diff --name-only origin/HEAD | xargs)' >> $GITHUB_OUTPUT + echo 'changed_files<> $GITHUB_OUTPUT + echo "$(git diff --name-only HEAD~1)" >> $GITHUB_OUTPUT + echo 'EOF' >> $GITHUB_OUTPUT - name: Check what Docker images have to be rebuild run: | @@ -72,8 +74,9 @@ jobs: - name: Run sairedis tests run: ./exec.sh --no-tty pytest --testbed=saivs_standalone -v -k "test_sairec" - name: Run unit tests - run: ./exec.sh --no-tty pytest --testbed=saivs_standalone -v -k \ - "test_acl_ut or test_bridge_ut or (test_switch_ut and not sai_map_list_t) or test_vrf_ut or test_port_ut.py" + run: ./exec.sh --no-tty pytest --testbed=saivs_standalone -v ut/test_acl_ut.py ut/test_bridge_ut.py ut/test_vrf_ut.py ut/test_port_ut.py ut/test_fdb_ut.py ut/test_lag_ut.py + - name: Run unit tests + run: ./exec.sh --no-tty pytest --testbed=saivs_standalone -v -k "test_switch_ut and not sai_map_list_t" - name: Run data-driven tests run: ./exec.sh --no-tty pytest --testbed=saivs_standalone -v test_l2_basic_dd.py - name: Run API tests @@ -94,6 +97,14 @@ jobs: - name: Run thift data-driven tests run: ./exec.sh --no-tty -s thrift pytest --testbed=saivs_thrift_standalone -v test_l2_basic_dd.py - name: Run thrift unit tests - run: ./exec.sh --no-tty -s thrift pytest --testbed=saivs_thrift_standalone -v ut/test_vrf_ut.py ut/test_bridge_ut.py ut/test_acl_ut.py + run: ./exec.sh --no-tty -s thrift pytest --testbed=saivs_thrift_standalone -v ut/test_vrf_ut.py ut/test_bridge_ut.py ut/test_acl_ut.py ut/test_fdb_ut.py ut/test_lag_ut.py + - name: Run thrift unit tests + run: ./exec.sh --no-tty -s thrift pytest --testbed=saivs_thrift_standalone -v -k \ + "(test_switch_ut and not sai_map_list_t and not sai_system_port_config_list_t) or (test_port_ut and not sai_map_list_t)" - name: Run thrift sairedis tests run: ./exec.sh --no-tty -s thrift pytest --testbed=saivs_thrift_standalone -v -k "test_sairec" + - name: Run thrift API tests + run: ./exec.sh --no-tty -s thrift pytest --testbed=saivs_thrift_standalone -v -k "api/test" + + - name: Run PTF tests + run: ./exec.sh --no-tty -s thrift pytest --testbed=saivs_thrift_standalone -v ../usecases/sai-ptf/SAI/ptf/saifdb.py -k FdbAttributeTest diff --git a/build.sh b/build.sh index 4ac00093..ddfddbf8 100755 --- a/build.sh +++ b/build.sh @@ -149,13 +149,18 @@ if [ "${IMAGE_TYPE}" = "standalone" ]; then elif [ "${IMAGE_TYPE}" = "server" ]; then find ${ASIC_PATH}/../ -type f -name \*.py -exec install -D {} .build/{} \; find ${ASIC_PATH}/../ -type f -name \*.json -exec install -D {} .build/{} \; - docker build -f dockerfiles/${BASE_OS}/Dockerfile.server -t sc-server-base:${BASE_OS} . + if [ "${SAI_INTERFACE}" = "thrift" ]; then + docker build -f dockerfiles/${BASE_OS}/Dockerfile.saithrift-server -t sc-thrift-server-base:${BASE_OS} . + else + docker build -f dockerfiles/${BASE_OS}/Dockerfile.server -t sc-server-base:${BASE_OS} . + fi rm -rf .build/ else docker build -f dockerfiles/${BASE_OS}/Dockerfile.client --build-arg NOSNAPPI=${NOSNAPPI} -t sc-client:${BASE_OS} . if [ "${SAI_INTERFACE}" = "thrift" ]; then docker build -f dockerfiles/${BASE_OS}/Dockerfile.saithrift-client -t sc-thrift-client:${BASE_OS} . fi + exit 0 fi # Build target Docker image @@ -168,6 +173,10 @@ if [ "${IMAGE_TYPE}" = "standalone" ]; then docker build -f Dockerfile --build-arg BASE_OS=${BASE_OS} -t sc-${IMG_NAME}:${BASE_OS} . fi elif [ "${IMAGE_TYPE}" = "server" ]; then - docker build -f Dockerfile.server --build-arg BASE_OS=${BASE_OS} -t sc-server-${IMG_NAME}:${BASE_OS} . + if [ "${SAI_INTERFACE}" = "thrift" ]; then + docker build -f Dockerfile.saithrift-server --build-arg BASE_OS=${BASE_OS} -t sc-thrift-server-${IMG_NAME}:${BASE_OS} . + else + docker build -f Dockerfile.server --build-arg BASE_OS=${BASE_OS} -t sc-server-${IMG_NAME}:${BASE_OS} . + fi fi popd diff --git a/common/sai_client/sai_redis_client/sai_redis_client.py b/common/sai_client/sai_redis_client/sai_redis_client.py index ebfbb795..2782302e 100644 --- a/common/sai_client/sai_redis_client/sai_redis_client.py +++ b/common/sai_client/sai_redis_client/sai_redis_client.py @@ -86,16 +86,10 @@ def operate(self, obj, attrs, op): if self.asic_channel is None: self.__assert_syncd_running() + # Clean-up Redis RPC I/O pipe self.r.delete("GETRESPONSE_KEY_VALUE_OP_QUEUE") - - tout = 0.01 - attempts = self.attempts - while len(self.r.lrange("GETRESPONSE_KEY_VALUE_OP_QUEUE", 0, -1)) > 0 and attempts > 0: - time.sleep(0.01) - attempts -= 1 - - if attempts == 0: - return [] + status = self.r.lrange("GETRESPONSE_KEY_VALUE_OP_QUEUE", 0, -1) + assert len(status) == 0, "Redis RPC I/O failure!" # Remove spaces from the key string. # Required by sai_deserialize_route_entry() in sonic-sairedis. @@ -107,22 +101,25 @@ def operate(self, obj, attrs, op): self.r.lpush("ASIC_STATE_KEY_VALUE_OP_QUEUE", obj, attrs, op) self.r.publish(self.asic_channel, "G") - status = [] - attempts = self.attempts - - # Wait upto 3 mins for switch init if obj.startswith("SAI_OBJECT_TYPE_SWITCH") and op == "Screate": + # Wait upto 3 mins for switch init tout = 0.5 attempts = 240 + else: + tout = 0.01 + attempts = self.attempts + # Get response + status = self.r.lrange("GETRESPONSE_KEY_VALUE_OP_QUEUE", 0, -1) while len(status) < 3 and attempts > 0: + assert self.__check_syncd_running(), "FATAL - SyncD has exited or crashed!" time.sleep(tout) attempts -= 1 status = self.r.lrange("GETRESPONSE_KEY_VALUE_OP_QUEUE", 0, -1) self.r.delete("GETRESPONSE_KEY_VALUE_OP_QUEUE") - assert len(status) == 3, "SAI \"{}\" operation failure!".format(op) + assert len(status) == 3, f"SAI \"{op[1:]}\" operation failure!" return status def create(self, obj, attrs, do_assert=True): @@ -543,18 +540,22 @@ def vid_to_rid(self, vid): assert rid.startswith("oid:"), f"Invalid RID format {vid}" return rid + def __check_syncd_running(self): + if self.asic_db == 1: + numsub = self.r.execute_command('PUBSUB', 'NUMSUB', 'ASIC_STATE_CHANNEL') + if numsub[1] >= 1: + # SONiC 202111 or older detected + return "ASIC_STATE_CHANNEL" + numsub = self.r.execute_command('PUBSUB', 'NUMSUB', f'ASIC_STATE_CHANNEL@{self.asic_db}') + if numsub[1] >= 1: + # SONiC 202205 or newer detected + return f"ASIC_STATE_CHANNEL@{self.asic_db}" + return None + def __assert_syncd_running(self, tout=30): for i in range(tout + 1): - if self.asic_db == 1: - numsub = self.r.execute_command('PUBSUB', 'NUMSUB', 'ASIC_STATE_CHANNEL') - if numsub[1] >= 1: - # SONiC 202111 or older detected - self.asic_channel = "ASIC_STATE_CHANNEL" - return - numsub = self.r.execute_command('PUBSUB', 'NUMSUB', f'ASIC_STATE_CHANNEL@{self.asic_db}') - if numsub[1] >= 1: - # SONiC 202205 or newer detected - self.asic_channel = f"ASIC_STATE_CHANNEL@{self.asic_db}" + self.asic_channel = self.__check_syncd_running() + if self.asic_channel: return if i < tout: time.sleep(1) diff --git a/common/sai_client/sai_thrift_client/sai_thrift_utils.py b/common/sai_client/sai_thrift_client/sai_thrift_utils.py index 5351ea23..6f2b1093 100644 --- a/common/sai_client/sai_thrift_client/sai_thrift_utils.py +++ b/common/sai_client/sai_thrift_client/sai_thrift_utils.py @@ -57,6 +57,24 @@ def convert_attribute_name_to_thrift(attr): """ return re.search('SAI_.*_ATTR_(.*)', attr).group(1).lower() + @staticmethod + def convert_u8_to_thrift(u8_str): + # Thrift does not support unsigned int notation. + # The values gt than 0x7F should be converted into the signed values. + value = int(u8_str, 0) + if value > 0x7F: + return -((~value & 0xFF) + 1) + return value + + @staticmethod + def convert_u16_to_thrift(u16_str): + # Thrift does not support unsigned int notation. + # The values gt than 0x7FFF should be converted into the signed values. + value = int(u16_str, 0) + if value > 0x7FFF: + return -((~value & 0xFFFF) + 1) + return value + @staticmethod def convert_value_to_thrift(value, attr_name=None, value_type=None): """ @@ -73,7 +91,13 @@ def convert_value_to_thrift(value, attr_name=None, value_type=None): actual_value = getattr(sai_headers, value, None) if actual_value != None: return actual_value - return 0 if value == '' else int(value, 0) + if value == '': + return 0 + if value_type == 'u8': + return ThriftConverter.convert_u8_to_thrift(value) + elif value_type == 'u16': + return ThriftConverter.convert_u16_to_thrift(value) + return int(value, 0) if value_type in [ 'booldata' ]: return value.lower() == "true" or value == "0" elif value_type in [ 'mac', 'ipv4', 'ipv6', 'chardata' ]: @@ -163,7 +187,7 @@ def sai_int_range(value_type, range): """ splitted = range.split(',') sai_thrift_class = getattr(ttypes, 'sai_thrift_{}_range_t'.format(value_type[:-5])) - return sai_thrift_class(min=splitted[0], max=splitted[1]) + return sai_thrift_class(min=int(splitted[0]), max=int(splitted[1])) @staticmethod def sai_qos_map_params(value): @@ -338,7 +362,7 @@ def object_id(oid): "16" => 16 "oid:0x10" => 16 """ - if oid == None or oid == 'null': + if oid == None or oid == 'null' or oid == 'SAI_NULL_OBJECT_ID': return 0 if isinstance(oid, str) and oid.startswith('oid:0x'): return int(oid[4:], 16) @@ -373,11 +397,15 @@ def convert_value_from_thrift(value, attr_name, obj_type=None): sai_thrift_ip_address_t('192.168.0.1'...), "ipaddr" => "192.168.0.1" """ value_type = ThriftConverter.get_attribute_type(attr_name) - if value_type in [ 's8', 'u8', 's16', 'u16', + if value_type in [ 's8', 's16', 'u32', 's64', 'u64', 'ptr', 'mac', 'ipv4', 'ipv6', 'chardata' ]: return str(value) + elif value_type == 'u8': + return str(value) if value > 0 else str(value & 0xFF) + elif value_type == 'u16': + return str(value) if value > 0 else str(value & 0xFFFF) elif value_type in [ 's32' ]: actual_value = ThriftConverter.get_str_by_enum(obj_type, attr_name, value) if actual_value != None: diff --git a/common/sai_dut.py b/common/sai_dut.py index d3f27224..95820a60 100644 --- a/common/sai_dut.py +++ b/common/sai_dut.py @@ -137,7 +137,7 @@ def init(self): # Write to CONFIG_DB SONiC device information needed on syncd start config_db = redis.Redis(host=self.server_ip, port=self.port, db=4) - config_db.hmset("DEVICE_METADATA|localhost", device_metadata) + config_db.hset("DEVICE_METADATA|localhost", mapping=device_metadata) config_db.set("CONFIG_DB_INITIALIZED", "1") def deinit(self): diff --git a/common/sai_npu.py b/common/sai_npu.py index 5a380aeb..65805dd8 100644 --- a/common/sai_npu.py +++ b/common/sai_npu.py @@ -100,8 +100,9 @@ def reset(self): attr = [] self.init(attr) - def create_fdb(self, vlan_oid, mac, bp_oid, action="SAI_PACKET_ACTION_FORWARD"): - self.create('SAI_OBJECT_TYPE_FDB_ENTRY:' + json.dumps( + def create_fdb(self, vlan_oid, mac, bp_oid, entry_type="SAI_FDB_ENTRY_TYPE_STATIC", action="SAI_PACKET_ACTION_FORWARD", do_assert=True): + return self.create( + 'SAI_OBJECT_TYPE_FDB_ENTRY:' + json.dumps( { "bvid" : vlan_oid, "mac" : mac, @@ -109,13 +110,14 @@ def create_fdb(self, vlan_oid, mac, bp_oid, action="SAI_PACKET_ACTION_FORWARD"): } ), [ - "SAI_FDB_ENTRY_ATTR_TYPE", "SAI_FDB_ENTRY_TYPE_STATIC", + "SAI_FDB_ENTRY_ATTR_TYPE", entry_type, "SAI_FDB_ENTRY_ATTR_BRIDGE_PORT_ID", bp_oid, "SAI_FDB_ENTRY_ATTR_PACKET_ACTION", action - ]) + ], + do_assert) def remove_fdb(self, vlan_oid, mac, do_assert=True): - self.remove('SAI_OBJECT_TYPE_FDB_ENTRY:' + json.dumps( + return self.remove('SAI_OBJECT_TYPE_FDB_ENTRY:' + json.dumps( { "bvid" : vlan_oid, "mac" : mac, @@ -207,11 +209,13 @@ def set_sku_mode(self, sku): # Remove existing ports num_ports = len(self.dot1q_bp_oids) for idx in range(num_ports): - self.remove_vlan_member(self.default_vlan_oid, self.dot1q_bp_oids[idx]) - self.remove(self.dot1q_bp_oids[idx]) - oid = self.get(self.port_oids[idx], ["SAI_PORT_ATTR_PORT_SERDES_ID"]).oid() - if oid != "oid:0x0": + oid = self.get_vlan_member(self.default_vlan_oid, self.dot1q_bp_oids[idx]) + if oid: self.remove(oid) + self.remove(self.dot1q_bp_oids[idx]) + status, data = self.get(self.port_oids[idx], ["SAI_PORT_ATTR_PORT_SERDES_ID"], do_assert=False) + if status == "SAI_STATUS_SUCCESS" and data.oid() != "oid:0x0": + self.remove(data.oid()) self.remove(self.port_oids[idx]) self.port_oids.clear() self.dot1q_bp_oids.clear() @@ -276,8 +280,7 @@ def set_sku_mode(self, sku): def assert_port_oper_up(self, port_oid, tout=15): for i in range(tout): - status, data = self.get(port_oid, ["SAI_PORT_ATTR_OPER_STATUS"]) - assert status == "SAI_STATUS_SUCCESS" + data = self.get(port_oid, ["SAI_PORT_ATTR_OPER_STATUS"]) if data.value() == "SAI_PORT_OPER_STATUS_UP": return if i + 1 < tout: diff --git a/configs/server/supervisord.conf.thrift b/configs/server/supervisord.conf.thrift new file mode 100644 index 00000000..3cd2a363 --- /dev/null +++ b/configs/server/supervisord.conf.thrift @@ -0,0 +1,32 @@ +[supervisord] +logfile_maxbytes=1MB +logfile_backups=2 +nodaemon=true + +[eventlistener:dependent-startup] +command=python3 -m supervisord_dependent_startup +autostart=true +autorestart=unexpected +startretries=0 +exitcodes=0,3 +events=PROCESS_STATE +buffer_size=1024 + +[program:rsyslogd] +command=/usr/sbin/rsyslogd -n -iNONE +priority=1 +autostart=false +autorestart=false +stdout_logfile=syslog +stderr_logfile=syslog +dependent_startup=true + +[program:saiserver] +command=/usr/sbin/saiserver -f /usr/share/sonic/hwsku/port_config.ini -p /usr/share/sonic/hwsku/sai.profile +priority=3 +autostart=true +autorestart=true +stdout_logfile=syslog +stderr_logfile=syslog +dependent_startup_wait_for=rsyslogd:running + diff --git a/dockerfiles/bullseye/Dockerfile.saithrift-server b/dockerfiles/bullseye/Dockerfile.saithrift-server new file mode 100644 index 00000000..83bfe034 --- /dev/null +++ b/dockerfiles/bullseye/Dockerfile.saithrift-server @@ -0,0 +1,79 @@ +FROM debian:buster-slim + +## Make apt-get non-interactive +ENV DEBIAN_FRONTEND=noninteractive + +COPY sai.env / + +# Install generic packages +RUN apt-get -o Acquire::Check-Valid-Until=false update && apt-get install -y \ + apt-utils \ + vim \ + curl \ + wget \ + iproute2 \ + unzip \ + git \ + procps \ + build-essential \ + graphviz \ + doxygen \ + aspell \ + python3-pip \ + rsyslog \ + supervisor + +# Add support for supervisord to handle startup dependencies +RUN pip3 install supervisord-dependent-startup==1.4.0 + +# Install generic packages +RUN apt-get install -y \ + libtemplate-perl \ + libconst-fast-perl \ + libmoosex-aliases-perl \ + libnamespace-autoclean-perl \ + libgetopt-long-descriptive-perl \ + aspell-en bison flex g++ \ + libboost-all-dev libevent-dev libssl-dev \ + libpython3-dev libpcap-dev + +WORKDIR /sai + +RUN apt-get install -y pkg-config \ + && wget "http://archive.apache.org/dist/thrift/0.11.0/thrift-0.11.0.tar.gz" \ + && tar -xf thrift-0.11.0.tar.gz \ + && cd thrift-0.11.0 \ + && ./bootstrap.sh \ + && ./configure --prefix=/usr --with-cpp --with-python \ + --with-qt4=no --with-qt5=no --with-csharp=no --with-java=no --with-erlang=no \ + --with-nodejs=no --with-lua=no --with-per=no --with-php=no --with-dart=no \ + --with-ruby=no --with-haskell=no --with-go=no --with-rs=no --with-haxe=no \ + --with-dotnetcore=no --with-d=no \ + && make && make install \ + && pip3 install ctypesgen lib/py \ + && cd /sai \ + && rm -rf thrift-0.11.0 thrift-0.11.0.tar.gz ; + +ENV SAITHRIFTV2=y +ENV GEN_SAIRPC_OPTS="-cve" +ENV SAIRPC_EXTRA_LIBS="-L/usr/local/lib/ -lpthread" + +RUN git clone https://github.com/opencomputeproject/SAI.git \ + && cd SAI && git fetch origin \ + && . /sai.env \ + && git checkout ${SAI_ID} \ + && cd meta \ + && make all libsaimetadata.so libsai.so \ + && cp libsaimetadata.so /usr/lib \ + && cp libsai.so /usr/lib \ + && cd .. \ + && mkdir /usr/include/sai/ \ + && cp ./inc/sai*.h /usr/include/sai/ \ + && cp ./experimental/sai*.h /usr/include/sai/ \ + && make saithrift-install + +WORKDIR /sai-challenger + +COPY configs/server/supervisord.conf.thrift /etc/supervisor/conf.d/supervisord.conf + +CMD ["/usr/bin/supervisord"] diff --git a/dockerfiles/bullseye/Dockerfile.server b/dockerfiles/bullseye/Dockerfile.server index 7e8d732d..60a4ef0a 100644 --- a/dockerfiles/bullseye/Dockerfile.server +++ b/dockerfiles/bullseye/Dockerfile.server @@ -34,7 +34,8 @@ RUN apt-get install -y \ make libtool m4 autoconf dh-exec debhelper automake cmake pkg-config \ libhiredis-dev libnl-3-dev libnl-genl-3-dev libnl-route-3-dev swig \ libgtest-dev libgmock-dev libboost-dev autoconf-archive \ - uuid-dev libboost-serialization-dev libyang-dev libyang1 + uuid-dev libboost-serialization-dev libyang-dev libyang1 \ + nlohmann-json3-dev RUN apt-get install -y \ libnl-3-dev libnl-genl-3-dev libnl-route-3-dev libnl-nf-3-dev libzmq3-dev diff --git a/dockerfiles/buster/Dockerfile.saithrift-server b/dockerfiles/buster/Dockerfile.saithrift-server new file mode 100644 index 00000000..51c94506 --- /dev/null +++ b/dockerfiles/buster/Dockerfile.saithrift-server @@ -0,0 +1,73 @@ +FROM debian:buster-slim + +RUN echo "deb [arch=amd64] http://debian-archive.trafficmanager.net/debian/ buster main contrib non-free" >> /etc/apt/sources.list && \ + echo "deb-src [arch=amd64] http://debian-archive.trafficmanager.net/debian/ buster main contrib non-free" >> /etc/apt/sources.list && \ + echo "deb [arch=amd64] http://debian-archive.trafficmanager.net/debian-security/ buster/updates main contrib non-free" >> /etc/apt/sources.list && \ + echo "deb-src [arch=amd64] http://debian-archive.trafficmanager.net/debian-security/ buster/updates main contrib non-free" >> /etc/apt/sources.list && \ + echo "deb [arch=amd64] http://debian-archive.trafficmanager.net/debian buster-backports main" >> /etc/apt/sources.list + +## Make apt-get non-interactive +ENV DEBIAN_FRONTEND=noninteractive + +COPY sai.env / + +# Install generic packages +RUN apt-get -o Acquire::Check-Valid-Until=false update && apt-get install -y \ + apt-utils \ + vim \ + curl \ + wget \ + iproute2 \ + unzip \ + git \ + procps \ + build-essential \ + graphviz \ + doxygen \ + aspell \ + python3-pip \ + rsyslog \ + supervisor + +# Add support for supervisord to handle startup dependencies +RUN pip3 install supervisord-dependent-startup==1.4.0 + +# Install generic packages +RUN apt-get install -y \ + libtemplate-perl \ + libconst-fast-perl \ + libmoosex-aliases-perl \ + libnamespace-autoclean-perl \ + libgetopt-long-descriptive-perl \ + aspell-en bison flex g++ \ + libboost-all-dev libevent-dev libssl-dev \ + libpython3-dev libpcap-dev + +WORKDIR /sai + +RUN apt-get install -y thrift-compiler libthrift-dev libthrift-0.11.0 \ + && pip3 install ctypesgen thrift==0.11.0 ; + +ENV SAITHRIFTV2=y +ENV GEN_SAIRPC_OPTS="-cve" +ENV SAIRPC_EXTRA_LIBS="-L/usr/local/lib/ -lpthread" + +RUN git clone https://github.com/opencomputeproject/SAI.git \ + && cd SAI && git fetch origin \ + && . /sai.env \ + && git checkout ${SAI_ID} \ + && cd meta \ + && make all libsaimetadata.so libsai.so \ + && cp libsaimetadata.so /usr/lib \ + && cp libsai.so /usr/lib \ + && cd .. \ + && mkdir /usr/include/sai/ \ + && cp ./inc/sai*.h /usr/include/sai/ \ + && cp ./experimental/sai*.h /usr/include/sai/ \ + && make saithrift-install + +WORKDIR /sai-challenger + +COPY configs/server/supervisord.conf.thrift /etc/supervisor/conf.d/supervisord.conf + +CMD ["/usr/bin/supervisord"] diff --git a/dockerfiles/buster/Dockerfile.server b/dockerfiles/buster/Dockerfile.server index d1e8c69b..371cff6d 100644 --- a/dockerfiles/buster/Dockerfile.server +++ b/dockerfiles/buster/Dockerfile.server @@ -40,7 +40,8 @@ RUN apt-get install -y \ make libtool m4 autoconf dh-exec debhelper automake cmake pkg-config \ libhiredis-dev libnl-3-dev libnl-genl-3-dev libnl-route-3-dev swig3.0 \ libgtest-dev libgmock-dev libboost-dev autoconf-archive \ - uuid-dev libboost-serialization-dev libyang-dev libyang0.16 + uuid-dev libboost-serialization-dev libyang-dev libyang0.16 \ + nlohmann-json3-dev RUN apt-get install -y \ libnl-3-dev libnl-genl-3-dev libnl-route-3-dev libnl-nf-3-dev libzmq3-dev diff --git a/npu/broadcom/BCM56850/saivs/Dockerfile.saithrift-server b/npu/broadcom/BCM56850/saivs/Dockerfile.saithrift-server new file mode 100644 index 00000000..cbbee09b --- /dev/null +++ b/npu/broadcom/BCM56850/saivs/Dockerfile.saithrift-server @@ -0,0 +1,10 @@ +ARG BASE_OS +FROM sc-thrift-server-base:$BASE_OS + +COPY configs/sai.profile /usr/share/sonic/hwsku/ +COPY configs/port_config.ini /usr/share/sonic/hwsku/ +COPY configs/lanemap.ini /usr/share/sonic/hwsku/ + +WORKDIR / + +CMD ["/usr/bin/supervisord"] \ No newline at end of file diff --git a/run.sh b/run.sh index 3d248752..64bb7101 100755 --- a/run.sh +++ b/run.sh @@ -186,7 +186,7 @@ if [ "${IMAGE_TYPE}" = "standalone" ]; then IMG_NAME=$(echo "${PREFIX}-${ASIC_TYPE}-${TARGET}" | tr '[:upper:]' '[:lower:]') OPTS="$OPTS -v $(pwd):/sai-challenger" elif [ "${IMAGE_TYPE}" = "server" ]; then - IMG_NAME=$(echo "sc-server-${ASIC_TYPE}-${TARGET}" | tr '[:upper:]' '[:lower:]') + IMG_NAME=$(echo "${PREFIX}-server-${ASIC_TYPE}-${TARGET}" | tr '[:upper:]' '[:lower:]') else IMG_NAME=${PREFIX}-client OPTS="$OPTS -v $(pwd):/sai-challenger" diff --git a/sai.env b/sai.env index fa38aa3b..ba81acd2 100644 --- a/sai.env +++ b/sai.env @@ -1,13 +1,13 @@ # The sonic-swss-common and sonic-sairedis commits were taken from -# sonic-buildimage master as of Aug 15, 2023 +# sonic-buildimage master as of Sep 29, 2023 # -# https://github.com/sonic-net/sonic-buildimage/tree/4acaaf8 +# https://github.com/sonic-net/sonic-buildimage/tree/81a2f56 -SWSS_COMMON_ID=be425ed -SAIREDIS_ID=eb24302 +SWSS_COMMON_ID=b0f148e +SAIREDIS_ID=c22b76b # SAI version: -# Branch v1.12 -# May 17, 2023 +# Branch master (post v1.13) +# Sep 8, 2023 -SAI_ID=92875f9 +SAI_ID=7f7a758 diff --git a/tests/api/test_dash_acl_group.py b/tests/api/test_dash_acl_group.py new file mode 100644 index 00000000..3551a051 --- /dev/null +++ b/tests/api/test_dash_acl_group.py @@ -0,0 +1,56 @@ + +from pprint import pprint + +import pytest + +@pytest.fixture(scope="module", autouse=True) +def skip_all(testbed_instance): + testbed = testbed_instance + if testbed is not None and len(testbed.dpu) != 1: + pytest.skip("invalid for \"{}\" testbed".format(testbed.name)) + +@pytest.mark.dpu +class TestSaiDashAclGroup: + # object with no attributes + + def test_dash_acl_group_create(self, dpu): + #Attribs are not marked mandatory but if we dont gives it throws an error + commands = [ + { + 'name': 'dash_acl_group_1', + 'op': 'create', + 'type': 'SAI_OBJECT_TYPE_DASH_ACL_GROUP', + 'attributes': ["SAI_DASH_ACL_GROUP_ATTR_IP_ADDR_FAMILY","SAI_IP_ADDR_FAMILY_IPV4"] + } + ] + + results = [*dpu.process_commands(commands)] + print('======= SAI commands RETURN values create =======') + pprint(results) + + + + @pytest.mark.dependency(name="test_sai_dash_acl_group_attr_ip_addr_family_set") + def test_sai_dash_acl_group_attr_ip_addr_family_set(self, dpu): + + commands = [ + { + "name": "dash_acl_group_1", + "op": "set", + "attributes": ["SAI_DASH_ACL_GROUP_ATTR_IP_ADDR_FAMILY", 'SAI_IP_ADDR_FAMILY_IPV4'] + } + ] + results = [*dpu.process_commands(commands)] + print("======= SAI commands RETURN values get =======") + pprint(results) + + + + def test_dash_acl_group_remove(self, dpu): + + commands = [{'name': 'dash_acl_group_1', 'op': 'remove'}] + + results = [*dpu.process_commands(commands)] + print('======= SAI commands RETURN values remove =======') + pprint(results) + diff --git a/tests/api/test_dash_acl_rule.py b/tests/api/test_dash_acl_rule.py new file mode 100644 index 00000000..2cfd3986 --- /dev/null +++ b/tests/api/test_dash_acl_rule.py @@ -0,0 +1,68 @@ +from pprint import pprint + +import pytest + +@pytest.fixture(scope="module", autouse=True) +def skip_all(testbed_instance): + testbed = testbed_instance + if testbed is not None and len(testbed.dpu) != 1: + pytest.skip("invalid for \"{}\" testbed".format(testbed.name)) + +@pytest.mark.dpu +class TestSaiDashAclRule: + # object with parent SAI_OBJECT_TYPE_DASH_ACL_GROUP + + def test_dash_acl_rule_create(self, dpu): + commands = [ + { + 'name': 'dash_acl_group_1', + 'op': 'create', + 'type': 'SAI_OBJECT_TYPE_DASH_ACL_GROUP', + 'attributes': ['SAI_DASH_ACL_GROUP_ATTR_IP_ADDR_FAMILY', 'SAI_IP_ADDR_FAMILY_IPV4',], + }, + { + 'name': 'dash_acl_rule_1', + 'op': 'create', + 'type': 'SAI_OBJECT_TYPE_DASH_ACL_RULE', + 'attributes': [ + 'SAI_DASH_ACL_RULE_ATTR_DASH_ACL_GROUP_ID','$dash_acl_group_1', + 'SAI_DASH_ACL_RULE_ATTR_DIP','1.1.1.1', + 'SAI_DASH_ACL_RULE_ATTR_SIP','2.2.2.2', + 'SAI_DASH_ACL_RULE_ATTR_PROTOCOL','17', + 'SAI_DASH_ACL_RULE_ATTR_SRC_PORT','5678', + 'SAI_DASH_ACL_RULE_ATTR_DST_PORT','8765', + 'SAI_DASH_ACL_RULE_ATTR_PRIORITY','10', + ], + }, + ] + + results = [*dpu.process_commands(commands)] + print('======= SAI commands RETURN values create =======') + pprint(results) + + @pytest.mark.dependency(name='test_sai_dash_acl_rule_attr_action_set') + def test_sai_dash_acl_rule_attr_action_set(self, dpu): + commands = [ + { + 'name': 'dash_acl_rule_1', + 'op': 'set', + 'attributes': [ + 'SAI_DASH_ACL_RULE_ATTR_ACTION', + 'SAI_DASH_ACL_RULE_ACTION_PERMIT', + ], + } + ] + results = [*dpu.process_commands(commands)] + print('======= SAI commands RETURN values set =======') + pprint(results) + + + def test_dash_acl_rule_remove(self, dpu): + commands = [ + {'name': 'dash_acl_rule_1', 'op': 'remove'}, + {'name': 'dash_acl_group_1', 'op': 'remove'}, + ] + + results = [*dpu.process_commands(commands)] + print('======= SAI commands RETURN values remove =======') + pprint(results) diff --git a/tests/api/test_direction_lookup_entry.py b/tests/api/test_direction_lookup_entry.py new file mode 100644 index 00000000..c94ace8d --- /dev/null +++ b/tests/api/test_direction_lookup_entry.py @@ -0,0 +1,54 @@ + +from pprint import pprint + +import pytest + +@pytest.fixture(scope="module", autouse=True) +def skip_all(testbed_instance): + testbed = testbed_instance + if testbed is not None and len(testbed.dpu) != 1: + pytest.skip("invalid for \"{}\" testbed".format(testbed.name)) + +@pytest.mark.dpu +class TestSaiDirectionLookupEntry: + # object with no attributes + + def test_direction_lookup_entry_create(self, dpu): + commands = [ + { + 'name': 'direction_lookup_entry_1', + 'op': 'create', + 'type': 'SAI_OBJECT_TYPE_DIRECTION_LOOKUP_ENTRY', + 'attributes': [], + 'key': {'switch_id': '$SWITCH_ID', 'vni': "2000"} + } + ] + results = [*dpu.process_commands(commands)] + print('======= SAI commands RETURN values create =======') + pprint(results) + + + + @pytest.mark.dependency(name="test_sai_direction_lookup_entry_attr_action_set") + def test_sai_direction_lookup_entry_attr_action_set(self, dpu): + + commands = [ + { + "name": "direction_lookup_entry_1", + "op": "set", + "attributes": ["SAI_DIRECTION_LOOKUP_ENTRY_ATTR_ACTION", 'SAI_DIRECTION_LOOKUP_ENTRY_ACTION_SET_OUTBOUND_DIRECTION'] + } + ] + results = [*dpu.process_commands(commands)] + print("======= SAI commands RETURN values get =======") + pprint(results) + + + def test_direction_lookup_entry_remove(self, dpu): + + commands = [{'name': 'direction_lookup_entry_1', 'key': {'switch_id': '$SWITCH_ID', 'vni': '2000'}, 'op': 'remove'}] + + results = [*dpu.process_commands(commands)] + print('======= SAI commands RETURN values remove =======') + pprint(results) + diff --git a/tests/api/test_eni.py b/tests/api/test_eni.py new file mode 100644 index 00000000..e420e991 --- /dev/null +++ b/tests/api/test_eni.py @@ -0,0 +1,489 @@ +from pprint import pprint + +import pytest + +@pytest.fixture(scope="module", autouse=True) +def skip_all(testbed_instance): + testbed = testbed_instance + if testbed is not None and len(testbed.dpu) != 1: + pytest.skip("invalid for \"{}\" testbed".format(testbed.name)) + +@pytest.mark.dpu +class TestSaiEni: + # object with no attributes + + def test_eni_create(self, dpu): + commands = [ + {"name": "vnet","op": "create","type": "SAI_OBJECT_TYPE_VNET","attributes": ["SAI_VNET_ATTR_VNI","2000"]}, + { + "name": "eni_1", + "op": "create", + "type": "SAI_OBJECT_TYPE_ENI", + "attributes": [ + "SAI_ENI_ATTR_ADMIN_STATE","True", + "SAI_ENI_ATTR_VM_UNDERLAY_DIP","10.10.1.10", + "SAI_ENI_ATTR_VM_VNI","2000", + "SAI_ENI_ATTR_VNET_ID","$vnet", + ] + } + ] + results = [*dpu.process_commands(commands)] + print('======= SAI commands RETURN values create =======') + pprint(results) + + + @pytest.mark.dependency(name='test_sai_eni_attr_cps_set') + def test_sai_eni_attr_cps_set(self, dpu): + commands = [ + {'name': 'eni_1', 'op': 'set', 'attributes': ['SAI_ENI_ATTR_CPS', '0']} + ] + results = [*dpu.process_commands(commands)] + print('======= SAI commands RETURN values get =======') + pprint(results) + + + @pytest.mark.dependency(name='test_sai_eni_attr_pps_set') + def test_sai_eni_attr_pps_set(self, dpu): + commands = [ + {'name': 'eni_1', 'op': 'set', 'attributes': ['SAI_ENI_ATTR_PPS', '0']} + ] + results = [*dpu.process_commands(commands)] + print('======= SAI commands RETURN values get =======') + pprint(results) + + @pytest.mark.dependency(name='test_sai_eni_attr_flows_set') + def test_sai_eni_attr_flows_set(self, dpu): + commands = [ + {'name': 'eni_1', 'op': 'set', 'attributes': ['SAI_ENI_ATTR_FLOWS', '0']} + ] + results = [*dpu.process_commands(commands)] + print('======= SAI commands RETURN values get =======') + pprint(results) + + @pytest.mark.dependency(name='test_sai_eni_attr_admin_state_set') + def test_sai_eni_attr_admin_state_set(self, dpu): + commands = [ + { + 'name': 'eni_1', + 'op': 'set', + 'attributes': ['SAI_ENI_ATTR_ADMIN_STATE', 'false'], + } + ] + results = [*dpu.process_commands(commands)] + print('======= SAI commands RETURN values get =======') + pprint(results) + + @pytest.mark.dependency(name='test_sai_eni_attr_vm_underlay_dip_set') + def test_sai_eni_attr_vm_underlay_dip_set(self, dpu): + commands = [ + { + 'name': 'eni_1', + 'op': 'set', + 'attributes': ['SAI_ENI_ATTR_VM_UNDERLAY_DIP', '0.0.0.0'], + } + ] + results = [*dpu.process_commands(commands)] + print('======= SAI commands RETURN values get =======') + pprint(results) + + @pytest.mark.dependency(name='test_sai_eni_attr_vm_vni_set') + def test_sai_eni_attr_vm_vni_set(self, dpu): + commands = [ + {'name': 'eni_1', 'op': 'set', 'attributes': ['SAI_ENI_ATTR_VM_VNI', '0']} + ] + results = [*dpu.process_commands(commands)] + print('======= SAI commands RETURN values get =======') + pprint(results) + + @pytest.mark.dependency(name='test_sai_eni_attr_vnet_id_set') + def test_sai_eni_attr_vnet_id_set(self, dpu): + commands = [ + { + 'name': 'eni_1', + 'op': 'set', + 'attributes': ['SAI_ENI_ATTR_VNET_ID', 'null'], + } + ] + results = [*dpu.process_commands(commands)] + print('======= SAI commands RETURN values get =======') + pprint(results) + + @pytest.mark.dependency( + name='test_sai_eni_attr_inbound_v4_stage1_dash_acl_group_id_set' + ) + def test_sai_eni_attr_inbound_v4_stage1_dash_acl_group_id_set(self, dpu): + commands = [ + { + 'name': 'eni_1', + 'op': 'set', + 'attributes': [ + 'SAI_ENI_ATTR_INBOUND_V4_STAGE1_DASH_ACL_GROUP_ID', + 'null', + ], + } + ] + results = [*dpu.process_commands(commands)] + print('======= SAI commands RETURN values get =======') + pprint(results) + + @pytest.mark.dependency( + name='test_sai_eni_attr_inbound_v4_stage2_dash_acl_group_id_set' + ) + def test_sai_eni_attr_inbound_v4_stage2_dash_acl_group_id_set(self, dpu): + commands = [ + { + 'name': 'eni_1', + 'op': 'set', + 'attributes': [ + 'SAI_ENI_ATTR_INBOUND_V4_STAGE2_DASH_ACL_GROUP_ID', + 'null', + ], + } + ] + results = [*dpu.process_commands(commands)] + print('======= SAI commands RETURN values get =======') + pprint(results) + + @pytest.mark.dependency( + name='test_sai_eni_attr_inbound_v4_stage3_dash_acl_group_id_set' + ) + def test_sai_eni_attr_inbound_v4_stage3_dash_acl_group_id_set(self, dpu): + commands = [ + { + 'name': 'eni_1', + 'op': 'set', + 'attributes': [ + 'SAI_ENI_ATTR_INBOUND_V4_STAGE3_DASH_ACL_GROUP_ID', + 'null', + ], + } + ] + results = [*dpu.process_commands(commands)] + print('======= SAI commands RETURN values get =======') + pprint(results) + + @pytest.mark.dependency( + name='test_sai_eni_attr_inbound_v4_stage4_dash_acl_group_id_set' + ) + def test_sai_eni_attr_inbound_v4_stage4_dash_acl_group_id_set(self, dpu): + commands = [ + { + 'name': 'eni_1', + 'op': 'set', + 'attributes': [ + 'SAI_ENI_ATTR_INBOUND_V4_STAGE4_DASH_ACL_GROUP_ID', + 'null', + ], + } + ] + results = [*dpu.process_commands(commands)] + print('======= SAI commands RETURN values get =======') + pprint(results) + + @pytest.mark.dependency( + name='test_sai_eni_attr_inbound_v4_stage5_dash_acl_group_id_set' + ) + def test_sai_eni_attr_inbound_v4_stage5_dash_acl_group_id_set(self, dpu): + commands = [ + { + 'name': 'eni_1', + 'op': 'set', + 'attributes': [ + 'SAI_ENI_ATTR_INBOUND_V4_STAGE5_DASH_ACL_GROUP_ID', + 'null', + ], + } + ] + results = [*dpu.process_commands(commands)] + print('======= SAI commands RETURN values get =======') + pprint(results) + + + @pytest.mark.dependency( + name='test_sai_eni_attr_inbound_v6_stage1_dash_acl_group_id_set' + ) + def test_sai_eni_attr_inbound_v6_stage1_dash_acl_group_id_set(self, dpu): + commands = [ + { + 'name': 'eni_1', + 'op': 'set', + 'attributes': [ + 'SAI_ENI_ATTR_INBOUND_V6_STAGE1_DASH_ACL_GROUP_ID', + 'null', + ], + } + ] + results = [*dpu.process_commands(commands)] + print('======= SAI commands RETURN values get =======') + pprint(results) + + @pytest.mark.dependency( + name='test_sai_eni_attr_inbound_v6_stage2_dash_acl_group_id_set' + ) + def test_sai_eni_attr_inbound_v6_stage2_dash_acl_group_id_set(self, dpu): + commands = [ + { + 'name': 'eni_1', + 'op': 'set', + 'attributes': [ + 'SAI_ENI_ATTR_INBOUND_V6_STAGE2_DASH_ACL_GROUP_ID', + 'null', + ], + } + ] + results = [*dpu.process_commands(commands)] + print('======= SAI commands RETURN values get =======') + pprint(results) + + @pytest.mark.dependency( + name='test_sai_eni_attr_inbound_v6_stage3_dash_acl_group_id_set' + ) + def test_sai_eni_attr_inbound_v6_stage3_dash_acl_group_id_set(self, dpu): + commands = [ + { + 'name': 'eni_1', + 'op': 'set', + 'attributes': [ + 'SAI_ENI_ATTR_INBOUND_V6_STAGE3_DASH_ACL_GROUP_ID', + 'null', + ], + } + ] + results = [*dpu.process_commands(commands)] + print('======= SAI commands RETURN values get =======') + pprint(results) + + @pytest.mark.dependency( + name='test_sai_eni_attr_inbound_v6_stage4_dash_acl_group_id_set' + ) + def test_sai_eni_attr_inbound_v6_stage4_dash_acl_group_id_set(self, dpu): + commands = [ + { + 'name': 'eni_1', + 'op': 'set', + 'attributes': [ + 'SAI_ENI_ATTR_INBOUND_V6_STAGE4_DASH_ACL_GROUP_ID', + 'null', + ], + } + ] + results = [*dpu.process_commands(commands)] + print('======= SAI commands RETURN values get =======') + pprint(results) + + + @pytest.mark.dependency( + name='test_sai_eni_attr_inbound_v6_stage5_dash_acl_group_id_set' + ) + def test_sai_eni_attr_inbound_v6_stage5_dash_acl_group_id_set(self, dpu): + commands = [ + { + 'name': 'eni_1', + 'op': 'set', + 'attributes': [ + 'SAI_ENI_ATTR_INBOUND_V6_STAGE5_DASH_ACL_GROUP_ID', + 'null', + ], + } + ] + results = [*dpu.process_commands(commands)] + print('======= SAI commands RETURN values get =======') + pprint(results) + + + @pytest.mark.dependency( + name='test_sai_eni_attr_outbound_v4_stage1_dash_acl_group_id_set' + ) + def test_sai_eni_attr_outbound_v4_stage1_dash_acl_group_id_set(self, dpu): + commands = [ + { + 'name': 'eni_1', + 'op': 'set', + 'attributes': [ + 'SAI_ENI_ATTR_OUTBOUND_V4_STAGE1_DASH_ACL_GROUP_ID', + 'null', + ], + } + ] + results = [*dpu.process_commands(commands)] + print('======= SAI commands RETURN values get =======') + pprint(results) + + @pytest.mark.dependency( + name='test_sai_eni_attr_outbound_v4_stage2_dash_acl_group_id_set' + ) + def test_sai_eni_attr_outbound_v4_stage2_dash_acl_group_id_set(self, dpu): + commands = [ + { + 'name': 'eni_1', + 'op': 'set', + 'attributes': [ + 'SAI_ENI_ATTR_OUTBOUND_V4_STAGE2_DASH_ACL_GROUP_ID', + 'null', + ], + } + ] + results = [*dpu.process_commands(commands)] + print('======= SAI commands RETURN values get =======') + pprint(results) + + + @pytest.mark.dependency( + name='test_sai_eni_attr_outbound_v4_stage3_dash_acl_group_id_set' + ) + def test_sai_eni_attr_outbound_v4_stage3_dash_acl_group_id_set(self, dpu): + commands = [ + { + 'name': 'eni_1', + 'op': 'set', + 'attributes': [ + 'SAI_ENI_ATTR_OUTBOUND_V4_STAGE3_DASH_ACL_GROUP_ID', + 'null', + ], + } + ] + results = [*dpu.process_commands(commands)] + print('======= SAI commands RETURN values get =======') + pprint(results) + + + @pytest.mark.dependency( + name='test_sai_eni_attr_outbound_v4_stage4_dash_acl_group_id_set' + ) + def test_sai_eni_attr_outbound_v4_stage4_dash_acl_group_id_set(self, dpu): + commands = [ + { + 'name': 'eni_1', + 'op': 'set', + 'attributes': [ + 'SAI_ENI_ATTR_OUTBOUND_V4_STAGE4_DASH_ACL_GROUP_ID', + 'null', + ], + } + ] + results = [*dpu.process_commands(commands)] + print('======= SAI commands RETURN values get =======') + pprint(results) + + + @pytest.mark.dependency( + name='test_sai_eni_attr_outbound_v4_stage5_dash_acl_group_id_set' + ) + def test_sai_eni_attr_outbound_v4_stage5_dash_acl_group_id_set(self, dpu): + commands = [ + { + 'name': 'eni_1', + 'op': 'set', + 'attributes': [ + 'SAI_ENI_ATTR_OUTBOUND_V4_STAGE5_DASH_ACL_GROUP_ID', + 'null', + ], + } + ] + results = [*dpu.process_commands(commands)] + print('======= SAI commands RETURN values get =======') + pprint(results) + + @pytest.mark.dependency( + name='test_sai_eni_attr_outbound_v6_stage1_dash_acl_group_id_set' + ) + def test_sai_eni_attr_outbound_v6_stage1_dash_acl_group_id_set(self, dpu): + commands = [ + { + 'name': 'eni_1', + 'op': 'set', + 'attributes': [ + 'SAI_ENI_ATTR_OUTBOUND_V6_STAGE1_DASH_ACL_GROUP_ID', + 'null', + ], + } + ] + results = [*dpu.process_commands(commands)] + print('======= SAI commands RETURN values get =======') + pprint(results) + + @pytest.mark.dependency( + name='test_sai_eni_attr_outbound_v6_stage2_dash_acl_group_id_set' + ) + def test_sai_eni_attr_outbound_v6_stage2_dash_acl_group_id_set(self, dpu): + commands = [ + { + 'name': 'eni_1', + 'op': 'set', + 'attributes': [ + 'SAI_ENI_ATTR_OUTBOUND_V6_STAGE2_DASH_ACL_GROUP_ID', + 'null', + ], + } + ] + results = [*dpu.process_commands(commands)] + print('======= SAI commands RETURN values get =======') + pprint(results) + + + @pytest.mark.dependency( + name='test_sai_eni_attr_outbound_v6_stage3_dash_acl_group_id_set' + ) + def test_sai_eni_attr_outbound_v6_stage3_dash_acl_group_id_set(self, dpu): + commands = [ + { + 'name': 'eni_1', + 'op': 'set', + 'attributes': [ + 'SAI_ENI_ATTR_OUTBOUND_V6_STAGE3_DASH_ACL_GROUP_ID', + 'null', + ], + } + ] + results = [*dpu.process_commands(commands)] + print('======= SAI commands RETURN values get =======') + pprint(results) + + @pytest.mark.dependency( + name='test_sai_eni_attr_outbound_v6_stage4_dash_acl_group_id_set' + ) + def test_sai_eni_attr_outbound_v6_stage4_dash_acl_group_id_set(self, dpu): + commands = [ + { + 'name': 'eni_1', + 'op': 'set', + 'attributes': [ + 'SAI_ENI_ATTR_OUTBOUND_V6_STAGE4_DASH_ACL_GROUP_ID', + 'null', + ], + } + ] + results = [*dpu.process_commands(commands)] + print('======= SAI commands RETURN values get =======') + pprint(results) + + + @pytest.mark.dependency( + name='test_sai_eni_attr_outbound_v6_stage5_dash_acl_group_id_set' + ) + def test_sai_eni_attr_outbound_v6_stage5_dash_acl_group_id_set(self, dpu): + commands = [ + { + 'name': 'eni_1', + 'op': 'set', + 'attributes': [ + 'SAI_ENI_ATTR_OUTBOUND_V6_STAGE5_DASH_ACL_GROUP_ID', + 'null', + ], + } + ] + results = [*dpu.process_commands(commands)] + print('======= SAI commands RETURN values get =======') + pprint(results) + + + def test_eni_remove(self, dpu): + + commands = [ + {'name': 'eni_1', 'op': 'remove'}, + {"name": "vnet","op": "remove"}, + ] + + results = [*dpu.process_commands(commands)] + print('======= SAI commands RETURN values remove =======') + pprint(results) \ No newline at end of file diff --git a/tests/api/test_eni_ether_address_map_entry.py b/tests/api/test_eni_ether_address_map_entry.py new file mode 100644 index 00000000..359c7788 --- /dev/null +++ b/tests/api/test_eni_ether_address_map_entry.py @@ -0,0 +1,71 @@ +from pprint import pprint + +import pytest + +@pytest.fixture(scope="module", autouse=True) +def skip_all(testbed_instance): + testbed = testbed_instance + if testbed is not None and len(testbed.dpu) != 1: + pytest.skip("invalid for \"{}\" testbed".format(testbed.name)) + +@pytest.mark.dpu +class TestSaiEniEtherAddressMapEntry: + # object with no attributes + + def test_eni_ether_address_map_entry_create(self, dpu): + commands = [ + {"name": "vnet","op": "create","type": "SAI_OBJECT_TYPE_VNET","attributes": ["SAI_VNET_ATTR_VNI","2000"]}, + { + "name": "eni_1", + "op": "create", + "type": "SAI_OBJECT_TYPE_ENI", + "attributes": [ + "SAI_ENI_ATTR_ADMIN_STATE","True", + "SAI_ENI_ATTR_VM_UNDERLAY_DIP","10.10.1.10", + "SAI_ENI_ATTR_VM_VNI","2000", + "SAI_ENI_ATTR_VNET_ID","$vnet", + ] + }, + { + 'name': 'eni_ether_address_map_entry_1', + 'op': 'create', + 'type': 'SAI_OBJECT_TYPE_ENI_ETHER_ADDRESS_MAP_ENTRY', + 'attributes': ["SAI_ENI_ETHER_ADDRESS_MAP_ENTRY_ATTR_ENI_ID","$eni_1"], + 'key': {'switch_id': '$SWITCH_ID', 'address': '00:AA:AA:AA:AB:00'}, + } + ] + + results = [*dpu.process_commands(commands)] + print('\n======= SAI commands RETURN values create =======\n') + pprint(results) + + @pytest.mark.dependency(name='test_sai_eni_ether_address_map_entry_attr_eni_id_set') + def test_sai_eni_ether_address_map_entry_attr_eni_id_set(self, dpu): + commands = [ + { + 'name': 'eni_ether_address_map_entry_1', + 'op': 'set', + 'attributes': ['SAI_ENI_ETHER_ADDRESS_MAP_ENTRY_ATTR_ENI_ID', 'null'], + } + ] + results = [*dpu.process_commands(commands)] + print('\n======= SAI commands RETURN values set =======\n') + pprint(results) + + + def test_eni_ether_address_map_entry_remove(self, dpu): + commands = [ + + { + 'name': 'eni_ether_address_map_entry_1', + 'key': {'switch_id': '$SWITCH_ID', 'address': '00:AA:AA:AA:AB:00'}, + 'op': 'remove', + }, + {'name': 'eni_1', 'op': 'remove'}, + {"name": "vnet","op": "remove"}, + ] + + results = [*dpu.process_commands(commands)] + print('\n======= SAI commands RETURN values remove =======\n') + pprint(results) + diff --git a/tests/api/test_inbound_routing_entry.py b/tests/api/test_inbound_routing_entry.py new file mode 100644 index 00000000..54b0ea54 --- /dev/null +++ b/tests/api/test_inbound_routing_entry.py @@ -0,0 +1,97 @@ + +from pprint import pprint + +import pytest + +@pytest.fixture(scope="module", autouse=True) +def skip_all(testbed_instance): + testbed = testbed_instance + if testbed is not None and len(testbed.dpu) != 1: + pytest.skip("invalid for \"{}\" testbed".format(testbed.name)) + +@pytest.mark.dpu +class TestSaiInboundRoutingEntry: + # object with no attributes + + def test_inbound_routing_entry_create(self, dpu): + + commands = [ + {"name": "vnet","op": "create","type": "SAI_OBJECT_TYPE_VNET","attributes": ["SAI_VNET_ATTR_VNI","2000"]}, + {"name": "eni_1","op": "create","type": "SAI_OBJECT_TYPE_ENI", + "attributes": [ + "SAI_ENI_ATTR_ADMIN_STATE","True", + "SAI_ENI_ATTR_VM_UNDERLAY_DIP","10.10.1.10", + "SAI_ENI_ATTR_VM_VNI","2000", + "SAI_ENI_ATTR_VNET_ID","$vnet", + ] + }, + {'name': 'inbound_routing_entry_1', 'op': 'create', 'type': 'SAI_OBJECT_TYPE_INBOUND_ROUTING_ENTRY', + 'attributes': [ + "SAI_INBOUND_ROUTING_ENTRY_ATTR_ACTION","SAI_INBOUND_ROUTING_ENTRY_ACTION_VXLAN_DECAP_PA_VALIDATE", + "SAI_INBOUND_ROUTING_ENTRY_ATTR_SRC_VNET_ID","$vnet" + ], + 'key': {'switch_id': '$SWITCH_ID', 'eni_id': "33", 'vni': '2000', 'sip': '1.1.1.1', 'sip_mask': '32', 'priority': '0'} + } + ] + + results = [*dpu.process_commands(commands)] + print('======= SAI commands RETURN values create =======') + pprint(results) + + + + @pytest.mark.dependency(name="test_sai_inbound_routing_entry_attr_action_set") + def test_sai_inbound_routing_entry_attr_action_set(self, dpu): + + commands = [ + { + "name": "inbound_routing_entry_1", + "op": "set", + "attributes": [ + "SAI_INBOUND_ROUTING_ENTRY_ATTR_ACTION", + 'SAI_INBOUND_ROUTING_ENTRY_ACTION_VXLAN_DECAP' + ], + } + ] + results = [*dpu.process_commands(commands)] + print("======= SAI commands RETURN values get =======") + pprint(results) + + @pytest.mark.dependency(name="test_sai_inbound_routing_entry_attr_src_vnet_id_set") + def test_sai_inbound_routing_entry_attr_src_vnet_id_set(self, dpu): + + commands = [ + { + "name": "inbound_routing_entry_1", + "op": "set", + "attributes": ["SAI_INBOUND_ROUTING_ENTRY_ATTR_SRC_VNET_ID", '0'] + } + ] + results = [*dpu.process_commands(commands)] + print("======= SAI commands RETURN values get =======") + pprint(results) + + + def test_inbound_routing_entry_remove(self, dpu): + + commands = [ + { + 'name': 'inbound_routing_entry_1', + 'op': 'remove', + 'key': + { + 'switch_id': '$SWITCH_ID', + 'eni_id': '33', + 'vni': '2000', + 'sip': '1.1.1.1', + 'sip_mask': '32', + 'priority': '0' + }, + }, + {'name': 'eni_1', 'op': 'remove'}, + {"name": "vnet","op": "remove"}, + ] + results = [*dpu.process_commands(commands)] + print('======= SAI commands RETURN values remove =======') + pprint(results) + diff --git a/tests/api/test_outbound_ca_to_pa_entry.py b/tests/api/test_outbound_ca_to_pa_entry.py new file mode 100644 index 00000000..100b036e --- /dev/null +++ b/tests/api/test_outbound_ca_to_pa_entry.py @@ -0,0 +1,124 @@ + +from pprint import pprint + +import pytest + +@pytest.fixture(scope="module", autouse=True) +def skip_all(testbed_instance): + testbed = testbed_instance + if testbed is not None and len(testbed.dpu) != 1: + pytest.skip("invalid for \"{}\" testbed".format(testbed.name)) + +@pytest.mark.dpu +class TestSaiOutboundCaToPaEntry: + # object with no attributes + + def test_outbound_ca_to_pa_entry_create(self, dpu): + + commands = [ + { + "name": "vnet", + "op": "create", + "type": "SAI_OBJECT_TYPE_VNET", + "attributes": ["SAI_VNET_ATTR_VNI","2000"] + }, + { + 'name': 'outbound_ca_to_pa_entry_1', + 'op': 'create', + 'type': 'SAI_OBJECT_TYPE_OUTBOUND_CA_TO_PA_ENTRY', + 'attributes': [ + "SAI_OUTBOUND_CA_TO_PA_ENTRY_ATTR_UNDERLAY_DIP","221.0.2.100", + "SAI_OUTBOUND_CA_TO_PA_ENTRY_ATTR_OVERLAY_DMAC","00:1B:6E:00:00:01", + "SAI_OUTBOUND_CA_TO_PA_ENTRY_ATTR_USE_DST_VNET_VNI","True", + ], + 'key': {'switch_id': '$SWITCH_ID', 'dst_vnet_id': '$vnet', 'dip': '1.128.0.1'} + } + ] + + results = [*dpu.process_commands(commands)] + print('======= SAI commands RETURN values create =======') + pprint(results) + + + + @pytest.mark.dependency(name="test_sai_outbound_ca_to_pa_entry_attr_underlay_dip_set") + def test_sai_outbound_ca_to_pa_entry_attr_underlay_dip_set(self, dpu): + + commands = [ + { + "name": "outbound_ca_to_pa_entry_1", + "op": "set", + "attributes": ["SAI_OUTBOUND_CA_TO_PA_ENTRY_ATTR_UNDERLAY_DIP", '0.0.0.0'] + } + ] + results = [*dpu.process_commands(commands)] + print("======= SAI commands RETURN values get =======") + pprint(results) + + + + @pytest.mark.dependency(name="test_sai_outbound_ca_to_pa_entry_attr_overlay_dmac_set") + def test_sai_outbound_ca_to_pa_entry_attr_overlay_dmac_set(self, dpu): + + commands = [ + { + "name": "outbound_ca_to_pa_entry_1", + "op": "set", + "attributes": ["SAI_OUTBOUND_CA_TO_PA_ENTRY_ATTR_OVERLAY_DMAC", '0:0:0:0:0:0'] + } + ] + results = [*dpu.process_commands(commands)] + print("======= SAI commands RETURN values get =======") + pprint(results) + + + @pytest.mark.dependency(name="test_sai_outbound_ca_to_pa_entry_attr_use_dst_vnet_vni_set") + def test_sai_outbound_ca_to_pa_entry_attr_use_dst_vnet_vni_set(self, dpu): + + commands = [ + { + "name": "outbound_ca_to_pa_entry_1", + "op": "set", + "attributes": ["SAI_OUTBOUND_CA_TO_PA_ENTRY_ATTR_USE_DST_VNET_VNI", 'false'] + } + ] + results = [*dpu.process_commands(commands)] + print("======= SAI commands RETURN values get =======") + pprint(results) + + + @pytest.mark.dependency(name="test_sai_outbound_ca_to_pa_entry_attr_counter_id_set") + def test_sai_outbound_ca_to_pa_entry_attr_counter_id_set(self, dpu): + + commands = [ + { + "name": "outbound_ca_to_pa_entry_1", + "op": "set", + "attributes": ["SAI_OUTBOUND_CA_TO_PA_ENTRY_ATTR_COUNTER_ID", '0'] + } + ] + results = [*dpu.process_commands(commands)] + print("======= SAI commands RETURN values get =======") + pprint(results) + + + def test_outbound_ca_to_pa_entry_remove(self, dpu): + + commands = [ + { + 'name': 'outbound_ca_to_pa_entry_1', + 'op': 'remove', + 'key': + { + 'switch_id': '$SWITCH_ID', + 'dst_vnet_id': '$vnet', + 'dip': '1.128.0.1' + }, + }, + {"name": "vnet","op": "remove"}, + + ] + results = [*dpu.process_commands(commands)] + print('======= SAI commands RETURN values remove =======') + pprint(results) + diff --git a/tests/api/test_outbound_routing_entry.py b/tests/api/test_outbound_routing_entry.py new file mode 100644 index 00000000..be7b5af0 --- /dev/null +++ b/tests/api/test_outbound_routing_entry.py @@ -0,0 +1,132 @@ +from pprint import pprint + +import pytest + +@pytest.fixture(scope="module", autouse=True) +def skip_all(testbed_instance): + testbed = testbed_instance + if testbed is not None and len(testbed.dpu) != 1: + pytest.skip("invalid for \"{}\" testbed".format(testbed.name)) + + +@pytest.mark.dpu +class TestSaiOutboundRoutingEntry: + # object with no attributes + + def test_outbound_routing_entry_create(self, dpu): + commands = [ + {"name": "vnet","op": "create","type": "SAI_OBJECT_TYPE_VNET","attributes": ["SAI_VNET_ATTR_VNI","2000"]}, + { + "name": "eni_1", + "op": "create", + "type": "SAI_OBJECT_TYPE_ENI", + "attributes": [ + "SAI_ENI_ATTR_ADMIN_STATE","True", + "SAI_ENI_ATTR_VM_UNDERLAY_DIP","10.10.1.10", + "SAI_ENI_ATTR_VM_VNI","2000", + "SAI_ENI_ATTR_VNET_ID","$vnet", + ] + }, + { + 'name': 'outbound_routing_entry_1', + 'op': 'create', + 'type': 'SAI_OBJECT_TYPE_OUTBOUND_ROUTING_ENTRY', + 'attributes': ["SAI_OUTBOUND_ROUTING_ENTRY_ATTR_ACTION", "SAI_OUTBOUND_ROUTING_ENTRY_ACTION_ROUTE_VNET","SAI_OUTBOUND_ROUTING_ENTRY_ATTR_DST_VNET_ID", "$vnet"], + 'key': { + 'switch_id': '$SWITCH_ID', + 'eni_id': '$eni_1', + 'destination': '10.1.0.0/16', + }, + } + ] + + results = [*dpu.process_commands(commands)] + print('\n======= SAI commands RETURN values create =======\n') + pprint(results) + + @pytest.mark.dependency(name='test_sai_outbound_routing_entry_attr_action_set') + def test_sai_outbound_routing_entry_attr_action_set(self, dpu): + commands = [ + { + 'name': 'outbound_routing_entry_1', + 'op': 'set', + 'attributes': [ + 'SAI_OUTBOUND_ROUTING_ENTRY_ATTR_ACTION', + 'SAI_OUTBOUND_ROUTING_ENTRY_ACTION_ROUTE_VNET', + "SAI_OUTBOUND_ROUTING_ENTRY_ATTR_DST_VNET_ID", "$vnet" + ], + } + ] + results = [*dpu.process_commands(commands)] + print('\n======= SAI commands RETURN values set =======\n') + pprint(results) + + + @pytest.mark.dependency(name='test_sai_outbound_routing_entry_attr_dst_vnet_id_set') + def test_sai_outbound_routing_entry_attr_dst_vnet_id_set(self, dpu): + commands = [ + { + 'name': 'outbound_routing_entry_1', + 'op': 'set', + 'attributes': [ + 'SAI_OUTBOUND_ROUTING_ENTRY_ATTR_ACTION', + 'SAI_OUTBOUND_ROUTING_ENTRY_ACTION_ROUTE_VNET', + "SAI_OUTBOUND_ROUTING_ENTRY_ATTR_DST_VNET_ID", "$vnet" + ], + } + ] + results = [*dpu.process_commands(commands)] + print('\n======= SAI commands RETURN values set =======\n') + pprint(results) + + + + + @pytest.mark.dependency(name='test_sai_outbound_routing_entry_attr_overlay_ip_set') + def test_sai_outbound_routing_entry_attr_overlay_ip_set(self, dpu): + commands = [ + { + 'name': 'outbound_routing_entry_1', + 'op': 'set', + 'attributes': ['SAI_OUTBOUND_ROUTING_ENTRY_ATTR_OVERLAY_IP', '0.0.0.0'], + } + ] + results = [*dpu.process_commands(commands)] + print('\n======= SAI commands RETURN values set =======\n') + pprint(results) + + + @pytest.mark.dependency(name='test_sai_outbound_routing_entry_attr_counter_id_set') + def test_sai_outbound_routing_entry_attr_counter_id_set(self, dpu): + commands = [ + { + 'name': 'outbound_routing_entry_1', + 'op': 'set', + 'attributes': ['SAI_OUTBOUND_ROUTING_ENTRY_ATTR_COUNTER_ID', 'null'], + } + ] + results = [*dpu.process_commands(commands)] + print('\n======= SAI commands RETURN values set =======\n') + pprint(results) + + + + def test_outbound_routing_entry_remove(self, dpu): + commands = [ + { + 'name': 'outbound_routing_entry_1', + 'key': { + 'switch_id': '$SWITCH_ID', + 'eni_id': '$eni_1', + 'destination': '10.1.0.0/16', + }, + 'op': 'remove', + }, + {'name': 'eni_1', 'op': 'remove'}, + {"name": "vnet","op": "remove"}, + + ] + + results = [*dpu.process_commands(commands)] + print('======= SAI commands RETURN values remove =======') + pprint(results) diff --git a/tests/api/test_pa_validation_entry.py b/tests/api/test_pa_validation_entry.py new file mode 100644 index 00000000..118b8f7e --- /dev/null +++ b/tests/api/test_pa_validation_entry.py @@ -0,0 +1,79 @@ + +from pprint import pprint + +import pytest + +@pytest.fixture(scope="module", autouse=True) +def skip_all(testbed_instance): + testbed = testbed_instance + if testbed is not None and len(testbed.dpu) != 1: + pytest.skip("invalid for \"{}\" testbed".format(testbed.name)) + +@pytest.mark.dpu +class TestSaiPaValidationEntry: + # object with no attributes + + def test_pa_validation_entry_create(self, dpu): + + commands = [ + { + "name": "vnet", + "op": "create", + "type": "SAI_OBJECT_TYPE_VNET", + "attributes": ["SAI_VNET_ATTR_VNI","7000"] + }, + { + 'name': 'pa_validation_entry_1', + 'op': 'create', + 'type': 'SAI_OBJECT_TYPE_PA_VALIDATION_ENTRY', + 'attributes': [], + 'key': + { + 'switch_id': '$SWITCH_ID', + 'vnet_id': '$vnet', + 'sip': '1.1.1.1' + } + } + ] + results = [*dpu.process_commands(commands)] + print('======= SAI commands RETURN values create =======') + pprint(results) + + + + @pytest.mark.dependency(name="test_sai_pa_validation_entry_attr_action_set") + def test_sai_pa_validation_entry_attr_action_set(self, dpu): + + commands = [ + { + "name": "pa_validation_entry_1", + "op": "set", + "attributes": [ + "SAI_PA_VALIDATION_ENTRY_ATTR_ACTION", + 'SAI_PA_VALIDATION_ENTRY_ACTION_PERMIT' + ], + } + ] + results = [*dpu.process_commands(commands)] + print("======= SAI commands RETURN values get =======") + pprint(results) + + def test_pa_validation_entry_remove(self, dpu): + + commands = [ + { + 'name': 'pa_validation_entry_1', + 'op': 'remove', + 'key': + { + 'switch_id': '$SWITCH_ID', + 'vnet_id': '$vnet', + 'sip': '1.1.1.1' + }, + } + ] + + results = [*dpu.process_commands(commands)] + print('======= SAI commands RETURN values remove =======') + pprint(results) + diff --git a/tests/api/test_vip_entry.py b/tests/api/test_vip_entry.py new file mode 100644 index 00000000..d59e619f --- /dev/null +++ b/tests/api/test_vip_entry.py @@ -0,0 +1,55 @@ + +from pprint import pprint + +import pytest + +@pytest.fixture(scope="module", autouse=True) +def skip_all(testbed_instance): + testbed = testbed_instance + if testbed is not None and len(testbed.dpu) != 1: + pytest.skip("invalid for \"{}\" testbed".format(testbed.name)) + +@pytest.mark.dpu +class TestSaiVipEntry: + # object with no attributes + + def test_vip_entry_create(self, dpu): + + commands = [ + { + 'name': 'vip_entry_1', + 'op': 'create', + 'type': 'SAI_OBJECT_TYPE_VIP_ENTRY', + 'attributes': [], + 'key': {'switch_id': '$SWITCH_ID', 'vip': '1.2.1.1'} + } + ] + results = [*dpu.process_commands(commands)] + print('======= SAI commands RETURN values create =======') + pprint(results) + + + + @pytest.mark.dependency(name="test_sai_vip_entry_attr_action_set") + def test_sai_vip_entry_attr_action_set(self, dpu): + + commands = [ + { + "name": "vip_entry_1", + "op": "set", + "attributes": ["SAI_VIP_ENTRY_ATTR_ACTION", 'SAI_VIP_ENTRY_ACTION_ACCEPT'] + } + ] + results = [*dpu.process_commands(commands)] + print("======= SAI commands RETURN values get =======") + pprint(results) + + + def test_vip_entry_remove(self, dpu): + + commands = [{'name': 'vip_entry_1', 'key': {'switch_id': '$SWITCH_ID', 'vip': '1.2.1.1'}, 'op': 'remove'}] + + results = [*dpu.process_commands(commands)] + print('======= SAI commands RETURN values remove =======') + pprint(results) + diff --git a/tests/api/test_vnet.py b/tests/api/test_vnet.py new file mode 100644 index 00000000..db35a3cf --- /dev/null +++ b/tests/api/test_vnet.py @@ -0,0 +1,54 @@ + +from pprint import pprint + +import pytest + +@pytest.fixture(scope="module", autouse=True) +def skip_all(testbed_instance): + testbed = testbed_instance + if testbed is not None and len(testbed.dpu) != 1: + pytest.skip("invalid for \"{}\" testbed".format(testbed.name)) + +@pytest.mark.dpu +class TestSaiVnet: + # object with no attributes + + def test_vnet_create(self, dpu): + + commands = [ + { + 'name': 'vnet_1', + 'op': 'create', + 'type': 'SAI_OBJECT_TYPE_VNET', + 'attributes': ["SAI_VNET_ATTR_VNI", '2001'] + } + ] + + results = [*dpu.process_commands(commands)] + print('======= SAI commands RETURN values create =======') + pprint(results) + + + @pytest.mark.dependency(name="test_sai_vnet_attr_vni_set") + def test_sai_vnet_attr_vni_set(self, dpu): + + commands = [ + { + "name": "vnet_1", + "op": "set", + "attributes": ["SAI_VNET_ATTR_VNI", '2001'] + } + ] + results = [*dpu.process_commands(commands)] + print("======= SAI commands RETURN values get =======") + pprint(results) + + + def test_vnet_remove(self, dpu): + + commands = [{'name': 'vnet_1', 'op': 'remove'}] + + results = [*dpu.process_commands(commands)] + print('======= SAI commands RETURN values remove =======') + pprint(results) + diff --git a/tests/conftest.py b/tests/conftest.py index 05ee7384..c7f5f483 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -49,13 +49,8 @@ def prev_test_failed(): def pytest_addoption(parser): - parser.addoption("--sai-server", action="store", default='localhost', help="SAI server IP") - parser.addoption("--traffic", action="store_true", default=False, help="run tests with traffic") - parser.addoption("--loglevel", action="store", default='NOTICE', help="syncd logging level") - parser.addoption("--asic", action="store", default=os.getenv('SC_ASIC'), help="ASIC type") - parser.addoption("--target", action="store", default=os.getenv('SC_TARGET'), help="The target device with this NPU") - parser.addoption("--sku", action="store", default=None, help="SKU mode") - parser.addoption("--testbed", action="store", default=None, help="Testbed name") + parser.addoption("--traffic", action="store_true", help="run tests with traffic") + parser.addoption("--testbed", action="store", help="Testbed name", required=True) def pytest_sessionstart(session): @@ -69,119 +64,52 @@ def exec_params(request): # Generic parameters "traffic": request.config.getoption("--traffic"), "testbed": request.config.getoption("--testbed"), - # DUT specific parameters - "alias": "dut", - "asic": request.config.getoption("--asic"), - "target": request.config.getoption("--target"), - "sku": request.config.getoption("--sku"), - "client": { - "type": "redis", - "config": { - "ip": request.config.getoption("--sai-server"), - "port": "6379", - "loglevel": request.config.getoption("--loglevel") - } - } } return config_param @pytest.fixture(scope="session") def testbed_instance(exec_params): - testbed_name = exec_params.get("testbed", None) - if testbed_name is None: - yield None - else: - testbed = SaiTestbed(f"{curdir}/..", testbed_name, exec_params["traffic"]) - testbed.init() - yield testbed - testbed.deinit() + testbed = SaiTestbed(f"{curdir}/..", exec_params["testbed"], exec_params["traffic"]) + testbed.init() + yield testbed + testbed.deinit() @pytest.fixture(scope="function") def testbed(testbed_instance): - if testbed_instance: - testbed_instance.setup() - yield testbed_instance - testbed_instance.teardown() - else: - yield None + testbed_instance.setup() + yield testbed_instance + testbed_instance.teardown() @pytest.fixture(scope="session") -def npu(exec_params, testbed_instance): - if testbed_instance is not None: - if len(testbed_instance.npu) == 1: - return testbed_instance.npu[0] - return None - - npu = None - exec_params["asic_dir"] = None - - if exec_params["asic"] == "generic": - npu = SaiNpu(exec_params) - else: - npu = SaiTestbed.spawn_asic(f"{curdir}/..", exec_params, "npu") - - if npu is not None: - npu.reset() - return npu +def npu(testbed_instance): + if len(testbed_instance.npu) == 1: + return testbed_instance.npu[0] + return None @pytest.fixture(scope="session") -def dpu(exec_params, testbed_instance): - if testbed_instance is not None: - if len(testbed_instance.dpu) == 1: - return testbed_instance.dpu[0] - return None +def dpu(testbed_instance): + if len(testbed_instance.dpu) == 1: + return testbed_instance.dpu[0] + return None - dpu = None - exec_params["asic_dir"] = None - - if exec_params["asic"] == "generic": - dpu = SaiDpu(exec_params) - else: - dpu = SaiTestbed.spawn_asic(f"{curdir}/..", exec_params, "dpu") - - if dpu is not None: - dpu.reset() - return dpu @pytest.fixture(scope="session") -def phy(exec_params, testbed_instance): - if testbed_instance is not None: - if len(testbed_instance.phy) == 1: - return testbed_instance.phy[0] - return None +def phy(testbed_instance): + if len(testbed_instance.phy) == 1: + return testbed_instance.phy[0] + return None - phy = None - exec_params["asic_dir"] = None - - if exec_params["asic"] == "generic": - phy = SaiPhy(exec_params) - else: - phy = SaiTestbed.spawn_asic(f"{curdir}/..", exec_params, "phy") - - if phy is not None: - phy.reset() - return phy @pytest.fixture(scope="session") -def dataplane_instance(exec_params, testbed_instance): - if testbed_instance is not None: - if len(testbed_instance.dataplane) == 1: - yield testbed_instance.dataplane[0] - else: - yield None +def dataplane_instance(testbed_instance): + if len(testbed_instance.dataplane) == 1: + yield testbed_instance.dataplane[0] else: - cfg = { - "type": "ptf", - "traffic": exec_params["traffic"] - } - dp = SaiTestbed.spawn_dataplane(cfg) - dp.init() - yield dp - dp.deinit() + yield None @pytest.fixture(scope="function") diff --git a/tests/pytest.ini b/tests/pytest.ini new file mode 100644 index 00000000..5fc853f8 --- /dev/null +++ b/tests/pytest.ini @@ -0,0 +1,3 @@ +[pytest] +markers = + dpu: mark DPU specific tests \ No newline at end of file diff --git a/tests/ut/test_fdb_ut.py b/tests/ut/test_fdb_ut.py new file mode 100644 index 00000000..af9a4305 --- /dev/null +++ b/tests/ut/test_fdb_ut.py @@ -0,0 +1,164 @@ +import pytest +import json +from saichallenger.common.sai_data import SaiObjType + + +@pytest.fixture(scope="module", autouse=True) +def skip_all(testbed_instance): + testbed = testbed_instance + if testbed is not None and len(testbed.npu) != 1: + pytest.skip("invalid for \"{}\" testbed".format(testbed.name)) + + +class TestFdbEntry: + state = dict() + mac = "00:00:11:22:33:44" + + @classmethod + def key(cls, npu, bvid, mac=None): + key = 'SAI_OBJECT_TYPE_FDB_ENTRY:' + json.dumps( + { + "bvid" : bvid, + "mac" : mac if mac else cls.mac, + "switch_id" : npu.switch_oid + } + ) + return key + + @pytest.mark.dependency() + def test_create_dynamic(self, npu): + npu.create_fdb(npu.default_vlan_oid, TestFdbEntry.mac, npu.dot1q_bp_oids[0], "SAI_FDB_ENTRY_TYPE_DYNAMIC") + + @pytest.mark.dependency(depends=['TestFdbEntry::test_create_dynamic']) + def test_create_duplicated_dynamic(self, npu): + status, _ = npu.create_fdb(npu.default_vlan_oid, TestFdbEntry.mac, npu.dot1q_bp_oids[0], "SAI_FDB_ENTRY_TYPE_DYNAMIC", do_assert=False) + assert status == "SAI_STATUS_ITEM_ALREADY_EXISTS" + + @pytest.mark.dependency(depends=['TestFdbEntry::test_create_dynamic']) + def test_create_duplicated_static(self, npu): + status, _ = npu.create_fdb(npu.default_vlan_oid, TestFdbEntry.mac, npu.dot1q_bp_oids[0], "SAI_FDB_ENTRY_TYPE_STATIC", do_assert=False) + assert status == "SAI_STATUS_ITEM_ALREADY_EXISTS" + + @pytest.mark.dependency(depends=['TestFdbEntry::test_create_dynamic']) + def test_change_to_static(self, npu): + npu.set(TestFdbEntry.key(npu, npu.default_vlan_oid), ["SAI_FDB_ENTRY_ATTR_TYPE", "SAI_FDB_ENTRY_TYPE_STATIC"]) + + @pytest.mark.dependency(depends=['TestFdbEntry::test_change_to_static']) + def test_change_to_dynamic(self, npu): + npu.set(TestFdbEntry.key(npu, npu.default_vlan_oid), ["SAI_FDB_ENTRY_ATTR_TYPE", "SAI_FDB_ENTRY_TYPE_DYNAMIC"]) + + @pytest.mark.dependency(depends=['TestFdbEntry::test_create_dynamic']) + def test_default_action(self, npu): + data = npu.get(TestFdbEntry.key(npu, npu.default_vlan_oid), ["SAI_FDB_ENTRY_ATTR_PACKET_ACTION", ""]) + assert data.value() == "SAI_PACKET_ACTION_FORWARD" + self.state["SAI_FDB_ENTRY_ATTR_PACKET_ACTION"] = data.value() + + @pytest.mark.parametrize( + "action", + [ + ("SAI_PACKET_ACTION_DROP"), + ("SAI_PACKET_ACTION_DONOTDROP"), + ("SAI_PACKET_ACTION_COPY"), + ("SAI_PACKET_ACTION_COPY_CANCEL"), + ("SAI_PACKET_ACTION_TRAP"), + ("SAI_PACKET_ACTION_LOG"), + ("SAI_PACKET_ACTION_DENY"), + ("SAI_PACKET_ACTION_TRANSIT"), + ("SAI_PACKET_ACTION_FORWARD") + ] + ) + @pytest.mark.dependency(depends=['TestFdbEntry::test_create_dynamic']) + def test_set_action(self, npu, action): + attr = "SAI_FDB_ENTRY_ATTR_PACKET_ACTION" + status = npu.set(TestFdbEntry.key(npu, npu.default_vlan_oid), + [attr, action], do_assert=False) + npu.assert_status_success(status) + data = npu.get(TestFdbEntry.key(npu, npu.default_vlan_oid), [attr, ""]) + assert data.value() == action + + @pytest.mark.dependency(depends=['TestFdbEntry::test_create_dynamic']) + def test_no_bridge_port(self, npu): + npu.set(TestFdbEntry.key(npu, npu.default_vlan_oid), ["SAI_FDB_ENTRY_ATTR_BRIDGE_PORT_ID", "oid:0x0"]) + + @pytest.mark.dependency(depends=['TestFdbEntry::test_create_dynamic']) + def test_remove_dynamic(self, npu): + npu.remove_fdb(npu.default_vlan_oid, TestFdbEntry.mac) + + @pytest.mark.dependency(depends=['TestFdbEntry::test_create_dynamic']) + def test_duplicated_remove(self, npu): + status = npu.remove_fdb(npu.default_vlan_oid, TestFdbEntry.mac, do_assert=False) + assert status == "SAI_STATUS_ITEM_NOT_FOUND" + + +class TestFlushFdbEntries: + dynamic_entries = ["00:00:00:00:00:11", "00:00:00:00:00:22"] + static_entries = ["00:00:00:00:00:33", "00:00:00:00:00:44"] + + def flush_fdb_entries(self, npu, entries_type): + for mac in TestFlushFdbEntries.dynamic_entries: + npu.create_fdb(npu.default_vlan_oid, mac, npu.dot1q_bp_oids[0], f"SAI_FDB_ENTRY_TYPE_DYNAMIC") + + for mac in TestFlushFdbEntries.static_entries: + npu.create_fdb(npu.default_vlan_oid, mac, npu.dot1q_bp_oids[0], f"SAI_FDB_ENTRY_TYPE_STATIC") + + npu.flush_fdb_entries(npu.switch_oid, [ + "SAI_FDB_FLUSH_ATTR_BV_ID", npu.default_vlan_oid, + "SAI_FDB_FLUSH_ATTR_ENTRY_TYPE", f"SAI_FDB_FLUSH_ENTRY_TYPE_{entries_type}" + ]) + + def test_flush_dynamic(self, npu): + self.flush_fdb_entries(npu, "DYNAMIC") + + flushed = [] + not_flushed = [] + + for mac in TestFlushFdbEntries.dynamic_entries: + status = npu.remove_fdb(npu.default_vlan_oid, mac, do_assert=False) + if status == "SAI_STATUS_SUCCESS": + not_flushed.append(mac) + for mac in TestFlushFdbEntries.static_entries: + status = npu.remove_fdb(npu.default_vlan_oid, mac, do_assert=False) + if status != "SAI_STATUS_SUCCESS": + flushed.append(mac) + + msg = "" if len(not_flushed) == 0 else f"Dynamic FDB entries {not_flushed} have not been flushed. " + msg += "" if len(flushed) == 0 else f"Static FDB entries {flushed} have been flushed." + assert not msg, msg + + def test_flush_static(self, npu): + self.flush_fdb_entries(npu, "STATIC") + + flushed = [] + not_flushed = [] + + for mac in TestFlushFdbEntries.dynamic_entries: + status = npu.remove_fdb(npu.default_vlan_oid, mac, do_assert=False) + if status != "SAI_STATUS_SUCCESS": + flushed.append(mac) + for mac in TestFlushFdbEntries.static_entries: + status = npu.remove_fdb(npu.default_vlan_oid, mac, do_assert=False) + if status == "SAI_STATUS_SUCCESS": + not_flushed.append(mac) + + msg = "" if len(not_flushed) == 0 else f"Static FDB entries {not_flushed} have not been flushed. " + msg += "" if len(flushed) == 0 else f"Dynamic FDB entries {flushed} have been flushed." + assert not msg, msg + + def test_flush_all(self, npu): + self.flush_fdb_entries(npu, "ALL") + + not_flushed_dynamic = [] + not_flushed_static = [] + + for mac in TestFlushFdbEntries.dynamic_entries: + status = npu.remove_fdb(npu.default_vlan_oid, mac, do_assert=False) + if status == "SAI_STATUS_SUCCESS": + not_flushed_dynamic.append(mac) + for mac in TestFlushFdbEntries.static_entries: + status = npu.remove_fdb(npu.default_vlan_oid, mac, do_assert=False) + if status == "SAI_STATUS_SUCCESS": + not_flushed_static.append(mac) + + msg = "" if len(not_flushed_static) == 0 else f"Static FDB entries {not_flushed_static} have not been flushed. " + msg += "" if len(not_flushed_dynamic) == 0 else f"Dynamic FDB entries {not_flushed_dynamic} have not been flushed." + assert not msg, msg diff --git a/tests/ut/test_lag_ut.py b/tests/ut/test_lag_ut.py new file mode 100644 index 00000000..84cb4737 --- /dev/null +++ b/tests/ut/test_lag_ut.py @@ -0,0 +1,116 @@ +import pytest +from saichallenger.common.sai import Sai +from saichallenger.common.sai_data import SaiObjType + +lag_attrs = Sai.get_obj_attrs("SAI_OBJECT_TYPE_LAG") +lag_mbr_attrs = Sai.get_obj_attrs("SAI_OBJECT_TYPE_LAG_MEMBER") + +@pytest.fixture(scope="module", autouse=True) +def skip_all(testbed_instance): + testbed = testbed_instance + if testbed is not None and len(testbed.npu) != 1: + pytest.skip("invalid for \"{}\" testbed".format(testbed.name)) + + +class TestLag: + oid = None + lag_mbr_num = 2 + lag_mbr_oids = [] + + @pytest.mark.dependency() + def test_create(self, npu): + TestLag.oid = npu.create(SaiObjType.LAG) + + @pytest.mark.parametrize( + "attr,attr_type", + lag_attrs + ) + @pytest.mark.dependency(depends=['TestLag::test_create']) + def test_get_attr(self, npu, attr, attr_type): + if attr == "SAI_LAG_ATTR_SYSTEM_PORT_AGGREGATE_ID": + pytest.skip("Valid for SAI_SWITCH_TYPE_VOQ only") + status, data = npu.get_by_type(TestLag.oid, attr, attr_type, False) + npu.assert_status_success(status) + if attr == "SAI_LAG_ATTR_PORT_LIST": + assert len(data.to_list()) == 0 + elif attr == "SAI_LAG_ATTR_PORT_VLAN_ID": + assert data.value() == npu.default_vlan_id + elif attr in ["SAI_LAG_ATTR_DROP_UNTAGGED", "SAI_LAG_ATTR_DROP_TAGGED"]: + assert data.value() == "false" + + @pytest.mark.dependency(depends=['TestLag::test_create']) + def test_create_members(self, npu): + # Remove bridge ports + for idx in range(TestLag.lag_mbr_num): + npu.remove_vlan_member(npu.default_vlan_oid, npu.dot1q_bp_oids[idx]) + npu.remove(npu.dot1q_bp_oids[idx]) + + # Create LAG members + for idx in range(TestLag.lag_mbr_num): + oid = npu.create(SaiObjType.LAG_MEMBER, + [ + "SAI_LAG_MEMBER_ATTR_LAG_ID", TestLag.oid, + "SAI_LAG_MEMBER_ATTR_PORT_ID", npu.port_oids[idx] + ]) + TestLag.lag_mbr_oids.append(oid) + + @pytest.mark.parametrize( + "attr,attr_type", + lag_mbr_attrs + ) + @pytest.mark.dependency(depends=['TestLag::test_create_members']) + def test_get_member_attr(self, npu, attr, attr_type): + status, data = npu.get_by_type(TestLag.lag_mbr_oids[0], attr, attr_type, False) + npu.assert_status_success(status) + if attr == "SAI_LAG_MEMBER_ATTR_LAG_ID": + assert data.value() == TestLag.oid + elif attr == "SAI_LAG_MEMBER_ATTR_PORT_ID": + assert data.value() == npu.port_oids[0] + elif attr in ["SAI_LAG_MEMBER_ATTR_EGRESS_DISABLE", "SAI_LAG_MEMBER_ATTR_INGRESS_DISABLE"]: + assert data.value() == "false" + + + @pytest.mark.dependency(depends=['TestLag::test_create_members']) + def test_check_members(self, npu): + status, data = npu.get(TestLag.oid, ["SAI_LAG_ATTR_PORT_LIST"], False) + npu.assert_status_success(status) + mbr_oids = data.oids() + assert len(mbr_oids) == TestLag.lag_mbr_num + for oid in mbr_oids: + assert oid in TestLag.lag_mbr_oids + + @pytest.mark.dependency(depends=['TestLag::test_create_members']) + def test_remove_members(self, npu): + # Remove LAG members + for oid in TestLag.lag_mbr_oids: + npu.remove(oid) + + # Create bridge port for ports removed from LAG + for idx in range(TestLag.lag_mbr_num): + bp_oid = npu.create(SaiObjType.BRIDGE_PORT, + [ + "SAI_BRIDGE_PORT_ATTR_TYPE", "SAI_BRIDGE_PORT_TYPE_PORT", + "SAI_BRIDGE_PORT_ATTR_PORT_ID", npu.port_oids[idx], + #"SAI_BRIDGE_PORT_ATTR_BRIDGE_ID", npu.dot1q_br_oid, + "SAI_BRIDGE_PORT_ATTR_ADMIN_STATE", "true" + ]) + npu.dot1q_bp_oids[idx] = bp_oid + + # Add ports to default VLAN + for oid in npu.dot1q_bp_oids[0:TestLag.lag_mbr_num]: + npu.create_vlan_member(npu.default_vlan_oid, oid, "SAI_VLAN_TAGGING_MODE_UNTAGGED") + + # Set PVID + for oid in npu.port_oids[0:TestLag.lag_mbr_num]: + npu.set(oid, ["SAI_PORT_ATTR_PORT_VLAN_ID", npu.default_vlan_id]) + + @pytest.mark.dependency(depends=['TestLag::test_remove_members']) + def test_check_no_members(self, npu): + status, data = npu.get(TestLag.oid, ["SAI_LAG_ATTR_PORT_LIST"], False) + npu.assert_status_success(status) + assert len(data.oids()) == 0 + + @pytest.mark.dependency(depends=['TestLag::test_create']) + def test_remove(self, npu): + npu.remove(TestLag.oid) + diff --git a/usecases/sai-ptf/README.md b/usecases/sai-ptf/README.md index eebab1f9..a3693395 100644 --- a/usecases/sai-ptf/README.md +++ b/usecases/sai-ptf/README.md @@ -6,29 +6,22 @@ SAI Challenger has capability to run these tests by setting up proper test envir # Steps to run tests -0. Setup the environment +1. Setup the environment ``` git submodule update --init -cp usecases/sai-ptf/ptf-conftest.py usecases/sai-ptf/SAI/ptf/conftest.py -cp usecases/sai-ptf/patches/0001-sai-base-test.patch usecases/sai-ptf/SAI/ -cd usecases/sai-ptf/SAI/ && patch -p1 < 0001-sai-base-test.patch && cd - ``` -1. Build a Docker image with required test env +2. Build a Docker image with a required test environment. + This step is optional. The image can be implicitly pulled from DockerHub by `run.sh`. ``` ./build.sh -s thrift ``` -2. Start a container based on newly built image +3. Start a Docker container ``` ./run.sh -s thrift ``` -3. Login into the container -``` -docker exec -ti sc-thrift-trident2-saivs-run bash -``` - 4. Run a test @@ -39,11 +32,6 @@ To run PTF test case: pytest --testbed=saivs_thrift_standalone ../usecases/sai-ptf/SAI/ptf/saifdb.py -k FdbAttributeTest -v ``` -To clean-up `saiserver` for `saivs` target after test case execution: -``` -supervisorctl restart saiserver -``` - To run SAI Challenger test case using Thrift RPC: ``` pytest --testbed=saivs_thrift_standalone -k "access_to_access" -v diff --git a/usecases/sai-ptf/ptf-conftest.py b/usecases/sai-ptf/conftest.py similarity index 55% rename from usecases/sai-ptf/ptf-conftest.py rename to usecases/sai-ptf/conftest.py index 0af18404..be11ebec 100644 --- a/usecases/sai-ptf/ptf-conftest.py +++ b/usecases/sai-ptf/conftest.py @@ -1,18 +1,42 @@ import sys import pytest +import subprocess from saichallenger.common.sai_testbed import SaiTestbedMeta +sys.path.insert(0, '/sai-challenger/ptf/src') -def import_base_modules(): - sys.path.insert(0, '/sai-challenger/ptf/src') -import_base_modules() +@pytest.hookimpl(tryfirst=True) +def pytest_sessionstart(session): + patch_file = "/sai-challenger/usecases/sai-ptf/patches/0001-sai-base-test.patch" + target_directory = "/sai-challenger/usecases/sai-ptf/SAI/" + + try: + command = ["patch", "--dry-run", "--silent", "-N", "-p1", "-i", patch_file, "-d", target_directory] + result = subprocess.run(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True) + if result.returncode == 0: + subprocess.run(["patch", "-p1", "-i", patch_file, "-d", target_directory], check=True) + elif result.returncode == 1: + # The patch is already applied + return + else: + raise RuntimeError(f"Failed to check whether the patch is already applied: {result}") + except Exception as e: + raise RuntimeError(f"Failed to apply the patch: {e}") @pytest.fixture(scope="session", autouse=True) def set_ptf_params(request): if request.config.option.testbed: tb_params = SaiTestbedMeta("/sai-challenger", request.config.option.testbed) + if tb_params.config['npu'][0]['target'] == 'saivs' and \ + tb_params.config['npu'][0]['client']['config']['ip'] in ['localhost', '127.0.0.1']: + try: + # Clean-up saiserver after previous test session + subprocess.run(["supervisorctl", "restart", "saiserver"], check=True) + except Exception as e: + raise RuntimeError(f"Failed to apply the patch: {e}") + tb_params.generate_sai_ptf_config_files() ports = to_ptf_int_list(tb_params.config['dataplane'][0]['port_groups']) else: