Skip to content

Commit

Permalink
Remove redundant CurrentSnapshotConfig and CurrentVideoCodecs attributes
Browse files Browse the repository at this point in the history
Fix name of MaxFramRate in Snapshot Allocation functions.
  • Loading branch information
pidarped committed Nov 13, 2024
1 parent aec7bc8 commit 31c6f24
Show file tree
Hide file tree
Showing 4 changed files with 28 additions and 102 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -42,18 +42,19 @@ namespace CameraAVStreamMgmt {

CameraAVStreamMgmtServer::CameraAVStreamMgmtServer(CameraAVStreamMgmtDelegate & aDelegate, EndpointId aEndpointId,
ClusterId aClusterId, BitMask<Feature> aFeature,
OptionalAttributes aOptionalAttrs,
uint8_t aMaxConcurrentVideoEncoders, uint32_t aMaxEncodedPixelRate,
OptionalAttributes aOptionalAttrs, uint8_t aMaxConcurrentVideoEncoders,
uint32_t aMaxEncodedPixelRate,
const VideoSensorParamsStruct & aVideoSensorParams, bool aNightVisionCapable,
const VideoResolutionStruct & aMinViewPort, uint32_t aMaxContentBufferSize,
const AudioCapabilitiesStruct & aMicrophoneCapabilities,
const AudioCapabilitiesStruct & aSpkrCapabilities,
TwoWayTalkSupportTypeEnum aTwoWayTalkSupport, uint32_t aMaxNetworkBandwidth) :
CommandHandlerInterface(MakeOptional(aEndpointId), aClusterId),
AttributeAccessInterface(MakeOptional(aEndpointId), aClusterId), mDelegate(aDelegate), mEndpointId(aEndpointId),
mClusterId(aClusterId), mFeature(aFeature), mOptionalAttrs(aOptionalAttrs), mMaxConcurrentVideoEncoders(aMaxConcurrentVideoEncoders),
mMaxEncodedPixelRate(aMaxEncodedPixelRate), mVideoSensorParams(aVideoSensorParams), mNightVisionCapable(aNightVisionCapable),
mMinViewPort(aMinViewPort), mMaxContentBufferSize(aMaxContentBufferSize), mMicrophoneCapabilities(aMicrophoneCapabilities),
mClusterId(aClusterId), mFeature(aFeature), mOptionalAttrs(aOptionalAttrs),
mMaxConcurrentVideoEncoders(aMaxConcurrentVideoEncoders), mMaxEncodedPixelRate(aMaxEncodedPixelRate),
mVideoSensorParams(aVideoSensorParams), mNightVisionCapable(aNightVisionCapable), mMinViewPort(aMinViewPort),
mMaxContentBufferSize(aMaxContentBufferSize), mMicrophoneCapabilities(aMicrophoneCapabilities),
mSpeakerCapabilities(aSpeakerCapabilities), mTwoWayTalkSupport(aTwoWayTalkSupport), mMaxNetworkBandwidth(aMaxNetworkBandwidth)
{
mDelegate.SetCameraAVStreamMgmtServer(this);
Expand Down Expand Up @@ -154,36 +155,6 @@ CHIP_ERROR CameraAVStreamMgmtServer::ReadAndEncodeSupportedSnapshotParams(const
return err;
}

CHIP_ERROR CameraAVStreamMgmtServer::ReadAndEncodeCurrentVideoCodecs(const AttributeValueEncoder::ListEncodeHelper & encoder)
{
CHIP_ERROR err = CHIP_NO_ERROR;

// Tell the delegate the read is starting..
ReturnErrorOnFailure(mDelegate.StartCurrentVideoCodecsRead());

for (uint8_t i = 0; true; i++)
{
VideoCodecEnum videoCodec;

err = mDelegate.GetCurrentVideoCodecByIndex(i, videoCodec);
SuccessOrExit(err);

err = encoder.Encode(videoCodec);
SuccessOrExit(err);
}

exit:
if (err == CHIP_ERROR_PROVIDER_LIST_EXHAUSTED)
{
// Convert end of list to CHIP_NO_ERROR
err = CHIP_NO_ERROR;
}

// Tell the delegate the read is complete
err = mDelegate.EndCurrentVideoCodecsRead();
return err;
}

CHIP_ERROR CameraAVStreamMgmtServer::ReadAndEncodeFabricsUsingCamera(const AttributeValueEncoder::ListEncodeHelper & encoder)
{
CHIP_ERROR err = CHIP_NO_ERROR;
Expand Down Expand Up @@ -415,52 +386,46 @@ CHIP_ERROR CameraAVStreamMgmtServer::Read(const ConcreteReadAttributePath & aPat
ChipLogError(Zcl, "CameraAVStreamMgmt: can not get HDRModeEnabled, feature is not supported");
ReturnErrorOnFailure(aEncoder.Encode(mHDRModeEnabled));
break;
case CurrentVideoCodecs::Id:
VerifyOrReturnError(HasFeature(Feature::kVideo), CHIP_ERROR_UNSUPPORTED_CHIP_FEATURE,
ChipLogError(Zcl, "CameraAVStreamMgmt: can not get CurrentVideoCodecs, feature is not supported"));
ReturnErrorOnFailure(aEncoder.EncodeList(
[this](const auto & encoder) -> CHIP_ERROR { return this->ReadAndEncodeCurrentVideoCodecs(encoder); }));
break;
case CurrentSnapshotConfig::Id:
VerifyOrReturnError(HasFeature(Feature::kSnapshot), CHIP_ERROR_UNSUPPORTED_CHIP_FEATURE,
ChipLogError(Zcl, "CameraAVStreamMgmt: can not get CurrentSnapshotConfig, feature is not supported"));
ReturnErrorOnFailure(aEncoder.Encode(mCurrentSnapshotConfig));
break;
case FabricsUsingCamera::Id:
VerifyOrReturnError(HasFeature(Feature::kVideo), CHIP_ERROR_UNSUPPORTED_CHIP_FEATURE,
ChipLogError(Zcl, "CameraAVStreamMgmt: can not get FabricsUsingCamera, feature is not supported"));
ReturnErrorOnFailure(aEncoder.EncodeList(
[this](const auto & encoder) -> CHIP_ERROR { return this->ReadAndEncodeFabricsUsingCamera(encoder); }));
[this](const auto & encoder) -> CHIP_ERROR {
return this->ReadAndEncodeFabricsUsingCamera(encoder); }));
break;
case AllocatedVideoStreams::Id:
VerifyOrReturnError(HasFeature(Feature::kVideo), CHIP_ERROR_UNSUPPORTED_CHIP_FEATURE,
ChipLogError(Zcl, "CameraAVStreamMgmt: can not get AllocatedVideoStreams, feature is not supported"));

ReturnErrorOnFailure(aEncoder.EncodeList(
[this](const auto & encoder) -> CHIP_ERROR { return this->ReadAndEncodeAllocatedVideoStreams(encoder); }));
[this](const auto & encoder) -> CHIP_ERROR {
return this->ReadAndEncodeAllocatedVideoStreams(encoder); }));
break;
case AllocatedAudioStreams::Id:
VerifyOrReturnError(HasFeature(Feature::kAudio), CHIP_ERROR_UNSUPPORTED_CHIP_FEATURE,
ChipLogError(Zcl, "CameraAVStreamMgmt: can not get AllocatedAudioStreams, feature is not supported"));

ReturnErrorOnFailure(aEncoder.EncodeList(
[this](const auto & encoder) -> CHIP_ERROR { return this->ReadAndEncodeAllocatedAudioStreams(encoder); }));
[this](const auto & encoder) -> CHIP_ERROR {
return this->ReadAndEncodeAllocatedAudioStreams(encoder); }));
break;
case AllocatedSnapshotStreams::Id:
VerifyOrReturnError(
HasFeature(Feature::kAudio), CHIP_ERROR_UNSUPPORTED_CHIP_FEATURE,
ChipLogError(Zcl, "CameraAVStreamMgmt: can not get AllocatedSnapshotStreams, feature is not supported"));

ReturnErrorOnFailure(aEncoder.EncodeList(
[this](const auto & encoder) -> CHIP_ERROR { return this->ReadAndEncodeAllocatedSnapshotStreams(encoder); }));
[this](const auto & encoder) -> CHIP_ERROR {
return this->ReadAndEncodeAllocatedSnapshotStreams(encoder); }));
break;
case RankedVideoStreamPrioritiesList::Id:
VerifyOrReturnError(
HasFeature(Feature::kVideo), CHIP_ERROR_UNSUPPORTED_CHIP_FEATURE,
ChipLogError(Zcl, "CameraAVStreamMgmt: can not get RankedVideoStreamPrioritiesList, feature is not supported"));

ReturnErrorOnFailure(aEncoder.EncodeList(
[this](const auto & encoder) -> CHIP_ERROR { return this->ReadAndEncodeRankedVideoStreamPrioritiesList(encoder); }));
[this](const auto & encoder) -> CHIP_ERROR {
return this->ReadAndEncodeRankedVideoStreamPrioritiesList(encoder); }));
break;
case SoftRecordingPrivacyModeEnabled::Id:
VerifyOrReturnError(
Expand Down Expand Up @@ -604,7 +569,7 @@ CHIP_ERROR CameraAVStreamMgmtServer::Write(const ConcreteDataAttributePath & aPa
ReturnErrorOnFailure(SetHDRModeEnabled(hdrModeEnabled));
return CHIP_NO_ERROR;
}
case RankedVideoStreamPrioritiesList::Id: {// TODO
case RankedVideoStreamPrioritiesList::Id: { // TODO
VerifyOrReturnError(
HasFeature(Feature::kVideo), CHIP_ERROR_UNSUPPORTED_CHIP_FEATURE,
ChipLogError(Zcl, "CameraAVStreamMgmt: can not set RankedVideoStreamPrioritiesList, feature is not supported"));
Expand Down Expand Up @@ -635,7 +600,8 @@ CHIP_ERROR CameraAVStreamMgmtServer::Write(const ConcreteDataAttributePath & aPa
}
case NightVision::Id: {
VerifyOrReturnError((HasFeature(Feature::kVideo) || HasFeature(Feature::kSnapshot)) &&
SupportsOptAttr(OptionalAttributes::kSupportsNightVision), CHIP_ERROR_UNSUPPORTED_CHIP_FEATURE,
SupportsOptAttr(OptionalAttributes::kSupportsNightVision),
CHIP_ERROR_UNSUPPORTED_CHIP_FEATURE,
ChipLogError(Zcl, "CameraAVStreamMgmt: can not set NightVision, feature is not supported"));

TriStateAutoEnum nightVision;
Expand All @@ -645,7 +611,8 @@ CHIP_ERROR CameraAVStreamMgmtServer::Write(const ConcreteDataAttributePath & aPa
}
case NightVisionIllum::Id: {
VerifyOrReturnError((HasFeature(Feature::kVideo) || HasFeature(Feature::kSnapshot)) &&
SupportsOptAttr(OptionalAttributes::kSupportsNightVisionIllum), CHIP_ERROR_UNSUPPORTED_CHIP_FEATURE,
SupportsOptAttr(OptionalAttributes::kSupportsNightVisionIllum),
CHIP_ERROR_UNSUPPORTED_CHIP_FEATURE,
ChipLogError(Zcl, "CameraAVStreamMgmt: can not set NightVisionIllumination, feature is not supported"));

TriStateAutoEnum nightVisionIllum;
Expand All @@ -657,7 +624,7 @@ CHIP_ERROR CameraAVStreamMgmtServer::Write(const ConcreteDataAttributePath & aPa
VerifyOrReturnError(
HasFeature(Feature::kVideo), CHIP_ERROR_UNSUPPORTED_CHIP_FEATURE,
ChipLogError(Zcl, "CameraAVStreamMgmt: can not set RankedVideoStreamPrioritiesList, feature is not supported"));
ViewportStruct viewPort;
ViewportStruct viewPort;
ReturnErrorOnFailure(aDecoder.Decode(viewPort));
ReturnErrorOnFailure(SetViewport(viewPort));
return CHIP_NO_ERROR;
Expand Down Expand Up @@ -770,34 +737,6 @@ Status CameraAVStreamMgmtServer::SetHDRModeEnabled(bool aHDRModeEnabled)
return Protocols::InteractionModel::Status::Success;
}

Status CameraAVStreamMgmtServer::SetCurrentSnapshotConfig(const VideoResolutionStruct & aVideoResolution, uint16_t aMaxFrameRate,
ImageCodecEnum aImageCodecEnum)
{
bool snapshotConfigChanged = false;
if (mCurrentSnapshotConfig.Resolution != aVideoResolution)
{
mCurrentSnapshotConfig.Resolution = aVideoResolution;
snapshotConfigChanged = true;
}
if (mCurrentSnapshotConfig.MaxFrameRate != aMaxFrameRate)
{
mCurrentSnapshotConfig.MaxFrameRate = aMaxFrameRate;
snapshotConfigChanged = true;
}
if (mCurrentSnapshotConfig.ImageCodec != aImageCodecEnum)
{
mCurrentSnapshotConfig.ImageCodec = aImageCodecEnum;
snapshotConfigChanged = true;
}
if (snapshotConfigChanged)
{
ConcreteAttributePath path = ConcreteAttributePath(mEndpointId, mClusterId, Attributes::CurrentSnapshotConfig::Id);
MatterReportingAttributeChangeCallback(path);
}

return Protocols::InteractionModel::Status::Success;
}

Status CameraAVStreamMgmtServer::SetSoftRecordingPrivacyModeEnabled(bool aSoftRecordingPrivacyModeEnabled)
{
if (mSoftRecordingPrivacyModeEnabled != aSoftRecordingPrivacyModeEnabled)
Expand Down Expand Up @@ -1272,7 +1211,6 @@ void CameraAVStreamMgmtServer::InvokeCommand(HandlerContext & handlerContext)
[this](HandlerContext & ctx, const auto & commandData) { HandleCaptureSnapshot(ctx, commandData); });
}
return;

}
}

Expand Down Expand Up @@ -1392,14 +1330,14 @@ void CameraAVStreamMgmtServer::HandleSnapshotStreamAllocate(HandlerContext & ctx

Commands::SnapshotStreamAllocateResponse::Type response;
auto & imageCodec = commandData.imageCodec;
auto & frameRate = commandData.frameRate;
auto & maxFrameRate = commandData.maxFrameRate;
auto & bitRate = commandData.bitRate;
auto & minResolution = commandData.minResolution;
auto & maxResolution = commandData.maxResolution;
auto & quality = commandData.quality;

// Call the delegate
Status status = mDelegate.SnapshotStreamAllocate(imageCodec, frameRate, bitRate, minResolution, maxResolution, quality);
Status status = mDelegate.SnapshotStreamAllocate(imageCodec, maxFrameRate, bitRate, minResolution, maxResolution, quality);

if (status != Status::Success)
{
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -128,10 +128,6 @@ class CameraAVStreamMgmtDelegate
virtual CHIP_ERROR GetSupportedSnapshotParamByIndex(uint8_t, Structs::RateDistortionTradeOffPointsStruct::Type &) = 0;
virtual CHIP_ERROR EndSupportedSnapshotParamsRead() = 0;

virtual CHIP_ERROR StartCurrentVideoCodecsRead() = 0;
virtual CHIP_ERROR GetCurrentVideoCodecByIndex(uint8_t, VideoCodecEnumType) = 0;
virtual CHIP_ERROR EndCurrentVideoCodecsRead() = 0;

virtual CHIP_ERROR StartFabricsUsingCameraRead() = 0;
virtual CHIP_ERROR GetFabricUsingCameraByIndex(uint8_t, chip::FabricIndex) = 0;
virtual CHIP_ERROR EndFabricsUsingCameraRead() = 0;
Expand Down Expand Up @@ -183,8 +179,9 @@ class CameraAVStreamMgmtServer : public CommandHandlerInterface, public Attribut
* @param aFeature The bitmask value that identifies which features are supported by this instance.
*/
CameraAVStreamMgmtServer(CameraAVStreamMgmtDelegate * aDelegate, EndpointId aEndpointId, ClusterId aClusterId,
BitMask<CameraAVStreamMgmt::Feature> aFeature, OptionalAttributes aOptionalAttrs, uint8_t aMaxConVideoEncoders,
uint32_t aMaxEncodedPixelRate, VideoSensorParamsStruct aVideoSensorParams, bool aNightVisionCapable,
BitMask<CameraAVStreamMgmt::Feature> aFeature, OptionalAttributes aOptionalAttrs,
uint8_t aMaxConVideoEncoders, uint32_t aMaxEncodedPixelRate,
VideoSensorParamsStruct aVideoSensorParams, bool aNightVisionCapable,
VideoResolutionStruct minViewPort, uint32_t aMaxContentBufferSize,
AudioCapabilitiesStruct aMicCapabilities, AudioCapabilitiesStruct aSpkrCapabilities,
TwoWayTalkSupportTypeEnum aTwoWayTalkSupport, uint32_t aMaxNetworkBandwidth);
Expand All @@ -210,10 +207,6 @@ class CameraAVStreamMgmtServer : public CommandHandlerInterface, public Attribut

Protocols::InteractionModel::Status SetHDRModeEnabled(bool aHDRModeEnabled);

Protocols::InteractionModel::Status
CameraAVStreamMgmtServer::SetCurrentSnapshotConfig(const VideoResolutionStruct & aVideoResolution, uint16_t aMaxFrameRate,
ImageCodecEnumType aImageCodecEnum);

Protocols::InteractionModel::Status SetSoftRecordingPrivacyModeEnabled(bool aSoftRecordingPrivacyModeEnabled);

Protocols::InteractionModel::Status SetSoftLivestreamPrivacyModeEnabled(bool aSoftLivestreamPrivacyModeEnabled);
Expand Down Expand Up @@ -278,8 +271,7 @@ class CameraAVStreamMgmtServer : public CommandHandlerInterface, public Attribut
const uint32_t mMaxNetworkBandwidth;

uint16_t mCurrentFrameRate;
bool mHDRModeEnabled = false;
SnapshotParamsStruct mCurrentSnapshotConfig;
bool mHDRModeEnabled = false;
bool mSoftRecordingPrivacyModeEnabled = false;
bool mSoftLivestreamPrivacyModeEnabled = false;
bool mHardPrivacyModeOn = false;
Expand Down Expand Up @@ -329,7 +321,6 @@ class CameraAVStreamMgmtServer : public CommandHandlerInterface, public Attribut
// Helpers to read list items via delegate APIs
CHIP_ERROR ReadAndEncodeRateDistortionTradeOffPoints(const AttributeValueEncoder::ListEncodeHelper & encoder);
CHIP_ERROR ReadAndEncodeSupportedSnapshotParams(const AttributeValueEncoder::ListEncodeHelper & encoder);
CHIP_ERROR ReadAndEncodeCurrentVideoCodecs(const AttributeValueEncoder::ListEncodeHelper & encoder);
CHIP_ERROR ReadAndEncodeFabricsUsingCamera(const AttributeValueEncoder::ListEncodeHelper & encoder);

CHIP_ERROR ReadAndEncodeAllocatedVideoStreams(const AttributeValueEncoder::ListEncodeHelper & encoder);
Expand Down Expand Up @@ -358,7 +349,6 @@ class CameraAVStreamMgmtServer : public CommandHandlerInterface, public Attribut
void HandleSnapshotStreamDeallocate(HandlerContext & ctx, const Commands::VideoStreamModify::DecodableType & req);

void HandleCaptureSnapshot(HandlerContext & ctx, const Commands::VideoStreamModify::DecodableType & req);

};

} // namespace CameraAVStreamMgmt
Expand Down
1 change: 0 additions & 1 deletion src/app/zap-templates/zcl/zcl-with-test-extensions.json
Original file line number Diff line number Diff line change
Expand Up @@ -688,7 +688,6 @@
"MaxNetworkBandwidth",
"CurrentFrameRate",
"HDRModeEnabled",
"CurrentVideoCodecs",
"FabricsUsingCamera",
"AllocatedVideoStreams",
"AllocatedAudioStreams",
Expand Down
1 change: 0 additions & 1 deletion src/app/zap-templates/zcl/zcl.json
Original file line number Diff line number Diff line change
Expand Up @@ -682,7 +682,6 @@
"MaxNetworkBandwidth",
"CurrentFrameRate",
"HDRModeEnabled",
"CurrentVideoCodecs",
"FabricsUsingCamera",
"AllocatedVideoStreams",
"AllocatedAudioStreams",
Expand Down

0 comments on commit 31c6f24

Please sign in to comment.