diff --git a/fixlib/fixlib/baseresources.py b/fixlib/fixlib/baseresources.py
index d22f707862..e6f42e1713 100644
--- a/fixlib/fixlib/baseresources.py
+++ b/fixlib/fixlib/baseresources.py
@@ -1088,6 +1088,16 @@ class BaseBucket(BaseResource):
_metadata: ClassVar[Dict[str, Any]] = {"icon": "bucket", "group": "storage"}
_categories: ClassVar[List[Category]] = [Category.storage]
+ encryption_enabled: Optional[bool] = None
+ versioning_enabled: Optional[bool] = None
+
+
+@unique
+class QueueType(Enum):
+ kind: ClassVar[str] = "queue_type"
+ STANDARD = "standard"
+ FIFO = "fifo"
+
@define(eq=False, slots=False)
class BaseQueue(BaseResource):
@@ -1096,6 +1106,9 @@ class BaseQueue(BaseResource):
_kind_description: ClassVar[str] = "A storage queue."
_metadata: ClassVar[Dict[str, Any]] = {"icon": "queue", "group": "storage"}
_categories: ClassVar[List[Category]] = [Category.storage]
+ queue_type: Optional[QueueType] = None
+ approximate_message_count: Optional[int] = None
+ message_retention_period: Optional[int] = None
@define(eq=False, slots=False)
@@ -1124,6 +1137,8 @@ class BaseServerlessFunction(BaseResource):
_metadata: ClassVar[Dict[str, Any]] = {"icon": "function", "group": "compute"}
_categories: ClassVar[List[Category]] = [Category.compute]
+ memory_size: Optional[int] = None
+
@define(eq=False, slots=False)
class BaseNetwork(BaseResource):
@@ -1133,6 +1148,8 @@ class BaseNetwork(BaseResource):
_metadata: ClassVar[Dict[str, Any]] = {"icon": "network", "group": "networking"}
_categories: ClassVar[List[Category]] = [Category.networking]
+ cidr_blocks: List[str] = field(factory=list)
+
@define(eq=False, slots=False)
class BaseNetworkQuota(BaseQuota):
@@ -1214,6 +1231,8 @@ class BaseSubnet(BaseResource):
_metadata: ClassVar[Dict[str, Any]] = {"icon": "subnet", "group": "networking"}
_categories: ClassVar[List[Category]] = [Category.networking]
+ cidr_block: Optional[str] = None
+
@define(eq=False, slots=False)
class BaseGateway(BaseResource):
@@ -1373,8 +1392,8 @@ class BaseAccessKey(BaseResource):
_kind_display: ClassVar[str] = "Access Key"
_kind_description: ClassVar[str] = "An access key."
_metadata: ClassVar[Dict[str, Any]] = {"icon": "key", "group": "access_control"}
- access_key_status: str = ""
_categories: ClassVar[List[Category]] = [Category.access_control, Category.security]
+ access_key_status: Optional[str] = None
@define(eq=False, slots=False)
@@ -1403,10 +1422,10 @@ class BaseStack(BaseResource):
_kind_display: ClassVar[str] = "Stack"
_kind_description: ClassVar[str] = "A stack."
_metadata: ClassVar[Dict[str, Any]] = {"icon": "stack", "group": "management"}
+ _categories: ClassVar[List[Category]] = [Category.devops, Category.management]
stack_status: str = ""
stack_status_reason: str = ""
stack_parameters: Dict[str, str] = field(factory=dict)
- _categories: ClassVar[List[Category]] = [Category.devops, Category.management]
@define(eq=False, slots=False)
@@ -1452,6 +1471,7 @@ class BaseDNSZone(BaseResource):
_kind_description: ClassVar[str] = "A DNS zone."
_metadata: ClassVar[Dict[str, Any]] = {"icon": "dns", "group": "networking"}
_categories: ClassVar[List[Category]] = [Category.dns, Category.networking]
+ private_zone: Optional[bool] = None
@define(eq=False, slots=False)
@@ -1573,6 +1593,19 @@ class BaseManagedKubernetesClusterProvider(BaseResource):
endpoint: Optional[str] = field(default=None, metadata={"description": "The kubernetes API endpoint"})
+class AIJobStatus(Enum):
+ PENDING = "pending"
+ PREPARING = "preparing"
+ RUNNING = "running"
+ STOPPING = "stopping"
+ STOPPED = "stopped"
+ COMPLETED = "completed"
+ FAILED = "failed"
+ CANCELLED = "cancelled"
+ PAUSED = "paused"
+ UNKNOWN = "unknown"
+
+
@define(eq=False, slots=False)
class BaseAIResource(BaseResource):
kind: ClassVar[str] = "ai_resource"
@@ -1589,6 +1622,8 @@ class BaseAIJob(BaseAIResource):
_kind_description: ClassVar[str] = "An AI Job resource."
_metadata: ClassVar[Dict[str, Any]] = {"icon": "job", "group": "ai"}
+ ai_job_status: Optional[AIJobStatus] = field(default=None, metadata={"description": "Current status of the AI job"})
+
@define(eq=False, slots=False)
class BaseAIModel(BaseAIResource):
diff --git a/plugins/aws/fix_plugin_aws/resource/acm.py b/plugins/aws/fix_plugin_aws/resource/acm.py
index 8a12298791..9b7e4c0115 100644
--- a/plugins/aws/fix_plugin_aws/resource/acm.py
+++ b/plugins/aws/fix_plugin_aws/resource/acm.py
@@ -80,6 +80,7 @@ class AwsAcmCertificate(AwsResource, BaseCertificate):
"tags": S("Tags", default=[]) >> ToDict(),
"name": S("DomainName"),
"ctime": S("CreatedAt"),
+ "mtime": S("RenewalSummary", "UpdatedAt"),
"arn": S("CertificateArn"),
"subject_alternative_names": S("SubjectAlternativeNames", default=[]),
"domain_validation_options": S("DomainValidationOptions", default=[])
diff --git a/plugins/aws/fix_plugin_aws/resource/bedrock.py b/plugins/aws/fix_plugin_aws/resource/bedrock.py
index d4aecec800..c8e5ba4fff 100644
--- a/plugins/aws/fix_plugin_aws/resource/bedrock.py
+++ b/plugins/aws/fix_plugin_aws/resource/bedrock.py
@@ -12,9 +12,9 @@
from fix_plugin_aws.resource.lambda_ import AwsLambdaFunction
from fix_plugin_aws.resource.s3 import AwsS3Bucket
from fix_plugin_aws.resource.rds import AwsRdsCluster, AwsRdsInstance
-from fixlib.baseresources import BaseAIJob, ModelReference, BaseAIModel
+from fixlib.baseresources import AIJobStatus, BaseAIJob, ModelReference, BaseAIModel
from fixlib.graph import Graph
-from fixlib.json_bender import Bender, S, ForallBend, Bend, Sort
+from fixlib.json_bender import Bender, S, ForallBend, Bend, MapEnum, Sort
from fixlib.types import Json
log = logging.getLogger("fix.plugins.aws")
@@ -82,6 +82,16 @@ def service_name(cls) -> str:
return service_name
+AWS_BEDROCK_JOB_STATUS_MAPPING = {
+ "InProgress": AIJobStatus.RUNNING,
+ "Completed": AIJobStatus.COMPLETED,
+ "Failed": AIJobStatus.FAILED,
+ "Stopping": AIJobStatus.STOPPING,
+ "Stopped": AIJobStatus.STOPPED,
+ "Deleting": AIJobStatus.STOPPING,
+}
+
+
@define(eq=False, slots=False)
class AwsBedrockFoundationModel(BaseAIModel, AwsResource):
kind: ClassVar[str] = "aws_bedrock_foundation_model"
@@ -553,7 +563,7 @@ class AwsBedrockModelCustomizationJob(BedrockTaggable, BaseAIJob, AwsResource):
"output_model_arn": S("outputModelArn"),
"client_request_token": S("clientRequestToken"),
"role_arn": S("roleArn"),
- "status": S("status"),
+ "status": S("status") >> MapEnum(AWS_BEDROCK_JOB_STATUS_MAPPING, AIJobStatus.UNKNOWN),
"failure_message": S("failureMessage"),
"creation_time": S("creationTime"),
"last_modified_time": S("lastModifiedTime"),
@@ -575,7 +585,6 @@ class AwsBedrockModelCustomizationJob(BedrockTaggable, BaseAIJob, AwsResource):
output_model_arn: Optional[str] = field(default=None, metadata={"description": "The Amazon Resource Name (ARN) of the output model."}) # fmt: skip
client_request_token: Optional[str] = field(default=None, metadata={"description": "The token that you specified in the CreateCustomizationJob request."}) # fmt: skip
role_arn: Optional[str] = field(default=None, metadata={"description": "The Amazon Resource Name (ARN) of the IAM role."}) # fmt: skip
- status: Optional[str] = field(default=None, metadata={"description": "The status of the job. A successful job transitions from in-progress to completed when the output model is ready to use. If the job failed, the failure message contains information about why the job failed."}) # fmt: skip
failure_message: Optional[str] = field(default=None, metadata={"description": "Information about why the job failed."}) # fmt: skip
creation_time: Optional[datetime] = field(default=None, metadata={"description": "Time that the resource was created."}) # fmt: skip
last_modified_time: Optional[datetime] = field(default=None, metadata={"description": "Time that the resource was last modified."}) # fmt: skip
@@ -777,7 +786,7 @@ class AwsBedrockEvaluationJob(BedrockTaggable, BaseAIJob, AwsResource):
"ctime": S("creationTime"),
"mtime": S("lastModifiedTime"),
"job_name": S("jobName"),
- "status": S("status"),
+ "status": S("status") >> MapEnum(AWS_BEDROCK_JOB_STATUS_MAPPING, AIJobStatus.UNKNOWN),
"job_arn": S("jobArn"),
"job_description": S("jobDescription"),
"role_arn": S("roleArn"),
@@ -791,7 +800,6 @@ class AwsBedrockEvaluationJob(BedrockTaggable, BaseAIJob, AwsResource):
"failure_messages": S("failureMessages", default=[]),
}
job_name: Optional[str] = field(default=None, metadata={"description": "The name of the model evaluation job."}) # fmt: skip
- status: Optional[str] = field(default=None, metadata={"description": "The status of the model evaluation job."}) # fmt: skip
job_arn: Optional[str] = field(default=None, metadata={"description": "The Amazon Resource Name (ARN) of the model evaluation job."}) # fmt: skip
job_description: Optional[str] = field(default=None, metadata={"description": "The description of the model evaluation job."}) # fmt: skip
role_arn: Optional[str] = field(default=None, metadata={"description": "The Amazon Resource Name (ARN) of the IAM service role used in the model evaluation job."}) # fmt: skip
diff --git a/plugins/aws/fix_plugin_aws/resource/cognito.py b/plugins/aws/fix_plugin_aws/resource/cognito.py
index c2cdd3e0ae..ebc39fbdbd 100644
--- a/plugins/aws/fix_plugin_aws/resource/cognito.py
+++ b/plugins/aws/fix_plugin_aws/resource/cognito.py
@@ -113,6 +113,7 @@ class AwsCognitoUser(AwsResource, BaseUser):
"enabled": S("Enabled"),
"user_status": S("UserStatus"),
"mfa_options": S("MFAOptions", default=[]) >> ForallBend(AwsCognitoMFAOptionType.mapping),
+ "username": S("Username"),
}
user_attributes: List[AwsCognitoAttributeType] = field(factory=list)
enabled: Optional[bool] = field(default=None)
diff --git a/plugins/aws/fix_plugin_aws/resource/dynamodb.py b/plugins/aws/fix_plugin_aws/resource/dynamodb.py
index c068aaf921..92199b8c2b 100644
--- a/plugins/aws/fix_plugin_aws/resource/dynamodb.py
+++ b/plugins/aws/fix_plugin_aws/resource/dynamodb.py
@@ -8,9 +8,16 @@
from fix_plugin_aws.resource.kinesis import AwsKinesisStream
from fix_plugin_aws.resource.kms import AwsKmsKey
from fix_plugin_aws.utils import ToDict
-from fixlib.baseresources import HasResourcePolicy, ModelReference, PolicySource, PolicySourceKind
+from fixlib.baseresources import (
+ BaseDatabase,
+ DatabaseInstanceStatus,
+ HasResourcePolicy,
+ ModelReference,
+ PolicySource,
+ PolicySourceKind,
+)
from fixlib.graph import Graph
-from fixlib.json_bender import S, Bend, Bender, ForallBend, bend
+from fixlib.json_bender import S, Bend, Bender, ForallBend, bend, K, MapValue
from fixlib.types import Json
from fixlib.json import sort_json
@@ -356,7 +363,7 @@ class AwsDynamoDbContinuousBackup:
@define(eq=False, slots=False)
-class AwsDynamoDbTable(DynamoDbTaggable, AwsResource, HasResourcePolicy):
+class AwsDynamoDbTable(DynamoDbTaggable, BaseDatabase, AwsResource, HasResourcePolicy):
kind: ClassVar[str] = "aws_dynamodb_table"
_kind_display: ClassVar[str] = "AWS DynamoDB Table"
_kind_description: ClassVar[str] = "AWS DynamoDB Table is a fully managed NoSQL database service that stores and retrieves data. It supports key-value and document data models, offering automatic scaling and low-latency performance. DynamoDB Tables handle data storage, indexing, and querying, providing consistent read and write throughput. They offer data encryption, backup, and recovery features for secure and reliable data management." # fmt: skip
@@ -396,6 +403,25 @@ class AwsDynamoDbTable(DynamoDbTaggable, AwsResource, HasResourcePolicy):
"dynamodb_sse_description": S("SSEDescription") >> Bend(AwsDynamoDbSSEDescription.mapping),
"dynamodb_archival_summary": S("ArchivalSummary") >> Bend(AwsDynamoDbArchivalSummary.mapping),
"dynamodb_table_class_summary": S("TableClassSummary") >> Bend(AwsDynamoDbTableClassSummary.mapping),
+ "db_type": K("dynamodb"),
+ "db_status": S("TableStatus")
+ >> MapValue(
+ {
+ "CREATING": DatabaseInstanceStatus.BUSY,
+ "UPDATING": DatabaseInstanceStatus.BUSY,
+ "DELETING": DatabaseInstanceStatus.BUSY,
+ "ACTIVE": DatabaseInstanceStatus.AVAILABLE,
+ "INACCESSIBLE_ENCRYPTION_CREDENTIALS": DatabaseInstanceStatus.FAILED,
+ "ARCHIVING": DatabaseInstanceStatus.BUSY,
+ "ARCHIVED": DatabaseInstanceStatus.STOPPED,
+ },
+ default=DatabaseInstanceStatus.UNKNOWN,
+ ),
+ "volume_encrypted": S("SSEDescription", "Status")
+ >> MapValue(
+ {"ENABLING": True, "ENABLED": True, "DISABLING": False, "DISABLED": False, "UPDATING": None},
+ default=None,
+ ),
}
arn: Optional[str] = field(default=None)
dynamodb_attribute_definitions: List[AwsDynamoDbAttributeDefinition] = field(factory=list)
diff --git a/plugins/aws/fix_plugin_aws/resource/ec2.py b/plugins/aws/fix_plugin_aws/resource/ec2.py
index 1a2209dceb..2a21b2f097 100644
--- a/plugins/aws/fix_plugin_aws/resource/ec2.py
+++ b/plugins/aws/fix_plugin_aws/resource/ec2.py
@@ -23,7 +23,6 @@
from fix_plugin_aws.resource.kms import AwsKmsKey
from fix_plugin_aws.resource.s3 import AwsS3Bucket
from fix_plugin_aws.utils import ToDict, TagsValue
-from fix_plugin_aws.aws_client import AwsClient
from fixlib.baseresources import (
BaseInstance,
BaseKeyPair,
@@ -2155,6 +2154,7 @@ class AwsEc2Vpc(EC2Taggable, AwsResource, BaseNetwork):
"vpc_cidr_block_association_set": S("CidrBlockAssociationSet", default=[])
>> ForallBend(AwsEc2VpcCidrBlockAssociation.mapping),
"vpc_is_default": S("IsDefault"),
+ "cidr_blocks": S("CidrBlockAssociationSet", default=[]) >> ForallBend(S("CidrBlock")),
}
vpc_cidr_block: Optional[str] = field(default=None)
vpc_dhcp_options_id: Optional[str] = field(default=None)
@@ -2506,6 +2506,7 @@ class AwsEc2Subnet(EC2Taggable, AwsResource, BaseSubnet):
"subnet_ipv6_native": S("Ipv6Native"),
"subnet_private_dns_name_options_on_launch": S("PrivateDnsNameOptionsOnLaunch")
>> Bend(AwsEc2PrivateDnsNameOptionsOnLaunch.mapping),
+ "cidr_block": S("CidrBlock"),
}
subnet_availability_zone: Optional[str] = field(default=None)
subnet_availability_zone_id: Optional[str] = field(default=None)
diff --git a/plugins/aws/fix_plugin_aws/resource/iam.py b/plugins/aws/fix_plugin_aws/resource/iam.py
index bdf6b15a39..ced7704b64 100644
--- a/plugins/aws/fix_plugin_aws/resource/iam.py
+++ b/plugins/aws/fix_plugin_aws/resource/iam.py
@@ -654,6 +654,7 @@ class AwsIamUser(AwsResource, BaseUser, BaseIamPrincipal):
"arn": S("Arn"),
"user_policies": S("UserPolicyList", default=[]) >> ForallBend(AwsIamPolicyDetail.mapping),
"user_permissions_boundary": S("PermissionsBoundary") >> Bend(AwsIamAttachedPermissionsBoundary.mapping),
+ "username": S("UserName"),
}
path: Optional[str] = field(default=None)
user_policies: List[AwsIamPolicyDetail] = field(factory=list)
diff --git a/plugins/aws/fix_plugin_aws/resource/lambda_.py b/plugins/aws/fix_plugin_aws/resource/lambda_.py
index 599bef09ce..b5476fbe57 100644
--- a/plugins/aws/fix_plugin_aws/resource/lambda_.py
+++ b/plugins/aws/fix_plugin_aws/resource/lambda_.py
@@ -251,6 +251,7 @@ class AwsLambdaFunction(AwsResource, BaseServerlessFunction, HasResourcePolicy):
"function_signing_job_arn": S("SigningJobArn"),
"function_architectures": S("Architectures", default=[]),
"function_ephemeral_storage": S("EphemeralStorage", "Size"),
+ "memory_size": S("MemorySize"),
}
function_runtime: Optional[str] = field(default=None)
function_role: Optional[str] = field(default=None)
diff --git a/plugins/aws/fix_plugin_aws/resource/route53.py b/plugins/aws/fix_plugin_aws/resource/route53.py
index c92f2c4659..9cf65df94e 100644
--- a/plugins/aws/fix_plugin_aws/resource/route53.py
+++ b/plugins/aws/fix_plugin_aws/resource/route53.py
@@ -81,12 +81,13 @@ class AwsRoute53Zone(AwsResource, BaseDNSZone):
"name": S("Name"),
"zone_caller_reference": S("CallerReference"),
"zone_config": S("Config") >> Bend(AwsRoute53ZoneConfig.mapping),
- "zone_resource_record_set_count": S("ResourceRecordSetCount"),
"zone_linked_service": S("LinkedService") >> Bend(AwsRoute53LinkedService.mapping),
+ "private_zone": S("Config", "PrivateZone"),
+ "zone_resource_record_set_count": S("ResourceRecordSetCount"),
}
+ zone_resource_record_set_count: Optional[int] = field(default=None, metadata=dict(ignore_history=True))
zone_caller_reference: Optional[str] = field(default=None)
zone_config: Optional[AwsRoute53ZoneConfig] = field(default=None)
- zone_resource_record_set_count: Optional[int] = field(default=None, metadata=dict(ignore_history=True))
zone_linked_service: Optional[AwsRoute53LinkedService] = field(default=None)
zone_logging_config: Optional[AwsRoute53LoggingConfig] = field(default=None)
diff --git a/plugins/aws/fix_plugin_aws/resource/s3.py b/plugins/aws/fix_plugin_aws/resource/s3.py
index 180e1c3e08..4a22979eaf 100644
--- a/plugins/aws/fix_plugin_aws/resource/s3.py
+++ b/plugins/aws/fix_plugin_aws/resource/s3.py
@@ -235,6 +235,7 @@ def add_bucket_encryption(bck: AwsS3Bucket) -> None:
mapped = bend(AwsS3ServerSideEncryptionRule.mapping, raw)
if rule := parse_json(mapped, AwsS3ServerSideEncryptionRule, builder):
bck.bucket_encryption_rules.append(rule)
+ bck.encryption_enabled = len(bck.bucket_encryption_rules) > 0
def add_bucket_policy(bck: AwsS3Bucket) -> None:
with builder.suppress(f"{service_name}.get-bucket-policy"):
@@ -267,9 +268,11 @@ def add_bucket_versioning(bck: AwsS3Bucket) -> None:
):
bck.bucket_versioning = raw_versioning.get("Status") == "Enabled"
bck.bucket_mfa_delete = raw_versioning.get("MFADelete") == "Enabled"
+ bck.versioning_enabled = bck.bucket_versioning
else:
bck.bucket_versioning = False
bck.bucket_mfa_delete = False
+ bck.versioning_enabled = False
def add_public_access(bck: AwsS3Bucket) -> None:
with builder.suppress(f"{service_name}.get-public-access-block"):
diff --git a/plugins/aws/fix_plugin_aws/resource/sqs.py b/plugins/aws/fix_plugin_aws/resource/sqs.py
index ebaa8923be..dd4b87a2e0 100644
--- a/plugins/aws/fix_plugin_aws/resource/sqs.py
+++ b/plugins/aws/fix_plugin_aws/resource/sqs.py
@@ -15,6 +15,7 @@
ModelReference,
PolicySource,
PolicySourceKind,
+ QueueType,
)
from fixlib.graph import Graph
from fixlib.json_bender import F, Bender, S, AsInt, AsBool, Bend, ParseJson, Sorted
@@ -80,6 +81,8 @@ class AwsSqsQueue(AwsResource, BaseQueue, HasResourcePolicy):
"sqs_delay_seconds": S("DelaySeconds") >> AsInt(),
"sqs_receive_message_wait_time_seconds": S("ReceiveMessageWaitTimeSeconds") >> AsInt(),
"sqs_managed_sse_enabled": S("SqsManagedSseEnabled") >> AsBool(),
+ "message_retention_period": S("MessageRetentionPeriod") >> AsInt(),
+ "approximate_message_count": S("ApproximateNumberOfMessages") >> AsInt(),
}
sqs_queue_url: Optional[str] = field(default=None)
sqs_approximate_number_of_messages: Optional[int] = field(default=None, metadata=dict(ignore_history=True))
@@ -118,7 +121,7 @@ def called_collect_apis(cls) -> List[AwsApiSpec]:
]
@classmethod
- def collect(cls: Type[AwsResource], json: List[Json], builder: GraphBuilder) -> None:
+ def collect(cls, json: List[Json], builder: GraphBuilder) -> None:
def add_instance(queue_url: str) -> None:
queue_attributes = builder.client.get(
service_name, "get-queue-attributes", "Attributes", QueueUrl=queue_url, AttributeNames=["All"]
@@ -126,8 +129,9 @@ def add_instance(queue_url: str) -> None:
if queue_attributes is not None:
queue_attributes["QueueUrl"] = queue_url
queue_attributes["QueueName"] = queue_url.rsplit("/", 1)[-1]
- if instance := cls.from_api(queue_attributes, builder):
+ if instance := AwsSqsQueue.from_api(queue_attributes, builder):
builder.add_node(instance, queue_attributes)
+ instance.queue_type = QueueType.FIFO if instance.sqs_fifo_queue else QueueType.STANDARD
builder.submit_work(service_name, add_tags, instance)
def add_tags(queue: AwsSqsQueue) -> None:
diff --git a/plugins/aws/fix_plugin_aws/resource/ssm.py b/plugins/aws/fix_plugin_aws/resource/ssm.py
index a9a4d1074f..38d7446545 100644
--- a/plugins/aws/fix_plugin_aws/resource/ssm.py
+++ b/plugins/aws/fix_plugin_aws/resource/ssm.py
@@ -360,7 +360,7 @@ class AwsSSMResourceCompliance(AwsResource, PhantomBaseResource):
"ssm",
"list-resource-compliance-summaries",
"ResourceComplianceSummaryItems",
- {"Filters": [{"Key": "Status", "Values": ["COMPLIANT"], "Type": "EQUAL"}]},
+ {"Filters": [{"Key": "Status", "Values": ["NON_COMPLIANT"], "Type": "EQUAL"}]},
)
mapping: ClassVar[Dict[str, Bender]] = {
"id": S("Id"),
diff --git a/plugins/aws/test/resources/cloudfront_test.py b/plugins/aws/test/resources/cloudfront_test.py
index 8dfb163dde..fffda5a6e4 100644
--- a/plugins/aws/test/resources/cloudfront_test.py
+++ b/plugins/aws/test/resources/cloudfront_test.py
@@ -46,14 +46,14 @@ def validate_delete_args(**kwargs: Any) -> Any:
def test_functions() -> None:
- first, builder = round_trip_for(AwsCloudFrontFunction)
+ first, builder = round_trip_for(AwsCloudFrontFunction, "memory_size")
assert len(builder.resources_of(AwsCloudFrontFunction)) == 1
assert len(first.tags) == 1
assert first.arn == "arn"
def test_function_deletion() -> None:
- func, _ = round_trip_for(AwsCloudFrontFunction)
+ func, _ = round_trip_for(AwsCloudFrontFunction, "memory_size")
def validate_delete_args(**kwargs: Any) -> Any:
assert kwargs["action"] == "delete-function"
diff --git a/plugins/aws/test/resources/dynamodb_test.py b/plugins/aws/test/resources/dynamodb_test.py
index ea0b22eb86..3f39f4cc97 100644
--- a/plugins/aws/test/resources/dynamodb_test.py
+++ b/plugins/aws/test/resources/dynamodb_test.py
@@ -7,13 +7,17 @@
def test_tables() -> None:
- first, builder = round_trip_for(AwsDynamoDbTable, "dynamodb_policy")
+ first, builder = round_trip_for(
+ AwsDynamoDbTable, "dynamodb_policy", "db_version", "db_publicly_accessible", "volume_size", "volume_iops"
+ )
assert len(builder.resources_of(AwsDynamoDbTable)) == 1
assert len(first.tags) == 1
def test_tagging_tables() -> None:
- table, _ = round_trip_for(AwsDynamoDbTable, "dynamodb_policy")
+ table, _ = round_trip_for(
+ AwsDynamoDbTable, "dynamodb_policy", "db_version", "db_publicly_accessible", "volume_size", "volume_iops"
+ )
def validate_update_args(**kwargs: Any) -> Any:
if kwargs["action"] == "list-tags-of-resource":
@@ -37,7 +41,9 @@ def validate_delete_args(**kwargs: Any) -> Any:
def test_delete_tables() -> None:
- table, _ = round_trip_for(AwsDynamoDbTable, "dynamodb_policy")
+ table, _ = round_trip_for(
+ AwsDynamoDbTable, "dynamodb_policy", "db_version", "db_publicly_accessible", "volume_size", "volume_iops"
+ )
def validate_delete_args(**kwargs: Any) -> Any:
assert kwargs["action"] == "delete-table"
diff --git a/plugins/aws/test/resources/files/cloudwatch/get-metric-data__2024_05_01_12_00_00_00_00_numberofobjects_aws_s3_bucketname_bucket_1_storagetype_allstoragetypes_average_AWS_S3_NumberOfObjects_BucketName_bucket_1_StorageType_AllStorageTypes_86400_Average_Count_True_bucketsizebytes_a.json b/plugins/aws/test/resources/files/cloudwatch/get-metric-data__2020_05_30_17_45_30_numberofobjects_aws_s3_bucketname_bucket_1_storagetype_allstoragetypes_average_AWS_S3_NumberOfObjects_BucketName_bucket_1_StorageType_AllStorageTypes_86400_Average_Count_True_bucketsizebytes_aws_s3_.json
similarity index 60%
rename from plugins/aws/test/resources/files/cloudwatch/get-metric-data__2024_05_01_12_00_00_00_00_numberofobjects_aws_s3_bucketname_bucket_1_storagetype_allstoragetypes_average_AWS_S3_NumberOfObjects_BucketName_bucket_1_StorageType_AllStorageTypes_86400_Average_Count_True_bucketsizebytes_a.json
rename to plugins/aws/test/resources/files/cloudwatch/get-metric-data__2020_05_30_17_45_30_numberofobjects_aws_s3_bucketname_bucket_1_storagetype_allstoragetypes_average_AWS_S3_NumberOfObjects_BucketName_bucket_1_StorageType_AllStorageTypes_86400_Average_Count_True_bucketsizebytes_aws_s3_.json
index 11273b8167..956409f9a4 100644
--- a/plugins/aws/test/resources/files/cloudwatch/get-metric-data__2024_05_01_12_00_00_00_00_numberofobjects_aws_s3_bucketname_bucket_1_storagetype_allstoragetypes_average_AWS_S3_NumberOfObjects_BucketName_bucket_1_StorageType_AllStorageTypes_86400_Average_Count_True_bucketsizebytes_a.json
+++ b/plugins/aws/test/resources/files/cloudwatch/get-metric-data__2020_05_30_17_45_30_numberofobjects_aws_s3_bucketname_bucket_1_storagetype_allstoragetypes_average_AWS_S3_NumberOfObjects_BucketName_bucket_1_StorageType_AllStorageTypes_86400_Average_Count_True_bucketsizebytes_aws_s3_.json
@@ -3,24 +3,36 @@
{
"Id": "bucketsizebytes_aws_s3_bucketname_bucket_1_storagetype_standardstorage_average",
"Label": "BucketSizeBytes",
- "Timestamps": [ "2024-04-30T12:50:00+00:00" ],
- "Values": [ 1 ],
+ "Timestamps": [
+ "2024-04-30T12:50:00+00:00"
+ ],
+ "Values": [
+ 1
+ ],
"StatusCode": "Complete"
},
{
"Id": "bucketsizebytes_aws_s3_bucketname_bucket_1_storagetype_intelligenttieringstorage_average",
"Label": "BucketSizeBytes",
- "Timestamps": [ "2024-04-30T12:50:00+00:00" ],
- "Values": [ 2 ],
+ "Timestamps": [
+ "2024-04-30T12:50:00+00:00"
+ ],
+ "Values": [
+ 2
+ ],
"StatusCode": "Complete"
},
{
"Id": "bucketsizebytes_aws_s3_bucketname_bucket_1_storagetype_standardiastorage_average",
"Label": "BucketSizeBytes",
- "Timestamps": [ "2024-04-30T12:50:00+00:00" ],
- "Values": [ 3 ],
+ "Timestamps": [
+ "2024-04-30T12:50:00+00:00"
+ ],
+ "Values": [
+ 3
+ ],
"StatusCode": "Complete"
}
],
"Messages": []
-}
+}
\ No newline at end of file
diff --git a/plugins/aws/test/resources/files/ssm/list-resource-compliance-summaries__Status_COMPLIANT_EQUAL.json b/plugins/aws/test/resources/files/ssm/list-resource-compliance-summaries__Status_NON_COMPLIANT_EQUAL.json
similarity index 100%
rename from plugins/aws/test/resources/files/ssm/list-resource-compliance-summaries__Status_COMPLIANT_EQUAL.json
rename to plugins/aws/test/resources/files/ssm/list-resource-compliance-summaries__Status_NON_COMPLIANT_EQUAL.json
diff --git a/plugins/aws/test/resources/s3_test.py b/plugins/aws/test/resources/s3_test.py
index 645a0f4c47..fd9dd264f8 100644
--- a/plugins/aws/test/resources/s3_test.py
+++ b/plugins/aws/test/resources/s3_test.py
@@ -1,9 +1,14 @@
-from fixlib.graph import Graph
-from test.resources import round_trip_for
+from concurrent.futures import ThreadPoolExecutor
+from datetime import datetime, timedelta
from types import SimpleNamespace
-from typing import cast, Any, Callable
+from typing import cast, Any, Callable, List
+from fix_plugin_aws.resource.base import AwsRegion, GraphBuilder
+from fix_plugin_aws.resource.cloudwatch import update_resource_metrics, AwsCloudwatchMetricData, AwsCloudwatchQuery
from fix_plugin_aws.aws_client import AwsClient
from fix_plugin_aws.resource.s3 import AwsS3Bucket, AwsS3AccountSettings
+from fixlib.threading import ExecutorQueue
+from fixlib.graph import Graph
+from test.resources import round_trip_for
def test_buckets() -> None:
@@ -62,14 +67,42 @@ def validate_delete_args(aws_service: str, fn: Callable[[Any], None]) -> Any:
bucket.delete_resource(client, Graph())
-# TODO: fix 'RuntimeError: cannot schedule new futures after shutdown'
-# def test_s3_usage_metrics(account_collector: AwsAccountCollector) -> None:
-# bucket, builder = round_trip_for(AwsS3Bucket)
-# builder.all_regions.update({"us-east-1": AwsRegion(id="us-east-1", name="us-east-1")})
-# account_collector.collect_usage_metrics(builder)
-# bucket.complete_graph(builder, {})
-# assert bucket._resource_usage["standard_storage_bucket_size_bytes"]["avg"] == 1.0
-# assert bucket._resource_usage["intelligent_tiering_storage_bucket_size_bytes"]["avg"] == 2.0
-# assert bucket._resource_usage["standard_ia_storage_bucket_size_bytes"]["avg"] == 3.0
-# # This values is computed internally using the other values. If the number does not match, the logic is broken!
-# assert bucket._resource_usage["bucket_size_bytes"]["avg"] == 6.0
+def test_s3_usage_metrics() -> None:
+ bucket, builder = round_trip_for(AwsS3Bucket, "bucket_lifecycle_policy")
+ builder.all_regions.update({"us-east-1": AwsRegion(id="us-east-1", name="us-east-1")})
+ queries = bucket.collect_usage_metrics(builder)
+ lookup_map = {}
+ lookup_map[bucket.id] = bucket
+
+ # simulates the `collect_usage_metrics` method found in `AwsAccountCollector`.
+ def collect_and_set_metrics(start_at: datetime, region: AwsRegion, queries: List[AwsCloudwatchQuery]) -> None:
+ with ThreadPoolExecutor(max_workers=1) as executor:
+ queue = ExecutorQueue(executor, tasks_per_key=lambda _: 1, name="test")
+ g_builder = GraphBuilder(
+ builder.graph,
+ builder.cloud,
+ builder.account,
+ region,
+ {region.id: region},
+ builder.client,
+ queue,
+ builder.core_feedback,
+ last_run_started_at=builder.last_run_started_at,
+ )
+ result = AwsCloudwatchMetricData.query_for_multiple(
+ g_builder, start_at, start_at + timedelta(hours=2), queries
+ )
+ update_resource_metrics(lookup_map, result)
+ # compute bucket_size_bytes
+ for after_collect in builder.after_collect_actions:
+ after_collect()
+
+ start = datetime(2020, 5, 30, 15, 45, 30)
+
+ collect_and_set_metrics(start, AwsRegion(id="us-east-1", name="us-east-1"), queries)
+
+ assert bucket._resource_usage["standard_storage_bucket_size_bytes"]["avg"] == 1.0
+ assert bucket._resource_usage["intelligent_tiering_storage_bucket_size_bytes"]["avg"] == 2.0
+ assert bucket._resource_usage["standard_ia_storage_bucket_size_bytes"]["avg"] == 3.0
+ # This values is computed internally using the other values. If the number does not match, the logic is broken!
+ assert bucket._resource_usage["bucket_size_bytes"]["avg"] == 6.0
diff --git a/plugins/azure/fix_plugin_azure/resource/machinelearning.py b/plugins/azure/fix_plugin_azure/resource/machinelearning.py
index 949e5f00d7..3ef4599e02 100644
--- a/plugins/azure/fix_plugin_azure/resource/machinelearning.py
+++ b/plugins/azure/fix_plugin_azure/resource/machinelearning.py
@@ -26,9 +26,16 @@
from fix_plugin_azure.resource.network import AzureNetworkSubnet, AzureNetworkVirtualNetwork
from fix_plugin_azure.resource.storage import AzureStorageAccount
from fix_plugin_azure.resource.web import AzureWebApp
-from fixlib.baseresources import BaseInstanceType, ModelReference, BaseAIJob, BaseAIModel, PhantomBaseResource
+from fixlib.baseresources import (
+ BaseInstanceType,
+ ModelReference,
+ BaseAIJob,
+ AIJobStatus,
+ BaseAIModel,
+ PhantomBaseResource,
+)
from fixlib.graph import BySearchCriteria
-from fixlib.json_bender import Bender, S, ForallBend, Bend, K
+from fixlib.json_bender import MapEnum, Bender, S, ForallBend, Bend, K
from fixlib.types import Json
log = logging.getLogger("fix.plugins.azure")
@@ -56,6 +63,24 @@ def collect(
return result
+AZURE_ML_JOB_STATUS_MAPPING = {
+ "CancelRequested": AIJobStatus.STOPPING,
+ "Canceled": AIJobStatus.CANCELLED,
+ "Completed": AIJobStatus.COMPLETED,
+ "Failed": AIJobStatus.FAILED,
+ "Finalizing": AIJobStatus.STOPPING,
+ "NotResponding": AIJobStatus.UNKNOWN,
+ "NotStarted": AIJobStatus.PENDING,
+ "Paused": AIJobStatus.PAUSED,
+ "Preparing": AIJobStatus.PREPARING,
+ "Provisioning": AIJobStatus.PREPARING,
+ "Queued": AIJobStatus.PENDING,
+ "Running": AIJobStatus.RUNNING,
+ "Starting": AIJobStatus.PREPARING,
+ "Unknown": AIJobStatus.UNKNOWN,
+}
+
+
@define(eq=False, slots=False)
class AzureEndpointAuthKeys:
kind: ClassVar[str] = "azure_endpoint_auth_keys"
@@ -1495,7 +1520,7 @@ class AzureMachineLearningJob(BaseAIJob, MicrosoftResource, AzureProxyResource):
"job_type": S("properties", "jobType"),
"notification_setting": S("properties", "notificationSetting") >> Bend(AzureNotificationSetting.mapping),
"services": S("properties", "services"),
- "status": S("properties", "status"),
+ "status": S("properties", "status") >> MapEnum(AZURE_ML_JOB_STATUS_MAPPING, AIJobStatus.UNKNOWN),
"description": S("properties", "description"),
"properties": S("properties", "properties"),
}
@@ -1510,7 +1535,6 @@ class AzureMachineLearningJob(BaseAIJob, MicrosoftResource, AzureProxyResource):
job_type: Optional[str] = field(default=None, metadata={"description": "Enum to determine the type of job."})
notification_setting: Optional[AzureNotificationSetting] = field(default=None, metadata={'description': 'Configuration for notification.'}) # fmt: skip
services: Optional[Dict[str, AzureJobService]] = field(default=None, metadata={'description': 'List of JobEndpoints. For local jobs, a job endpoint will have an endpoint value of FileStreamObject.'}) # fmt: skip
- status: Optional[str] = field(default=None, metadata={"description": "The status of a job."})
def connect_in_graph(self, builder: GraphBuilder, source: Json) -> None:
if compute_id := self.compute_id:
@@ -1660,7 +1684,7 @@ class AzureMachineLearningLabelingJob(BaseAIJob, MicrosoftResource):
"progress_metrics": S("properties", "progressMetrics") >> Bend(AzureProgressMetrics.mapping),
"job_project_id": S("properties", "projectId"),
"properties": S("properties", "properties"),
- "status": S("properties", "status"),
+ "status": S("properties", "status") >> MapEnum(AZURE_ML_JOB_STATUS_MAPPING, AIJobStatus.UNKNOWN),
"status_messages": S("properties", "statusMessages") >> ForallBend(AzureStatusMessage.mapping),
"system_data": S("systemData") >> Bend(AzureSystemData.mapping),
}
@@ -1673,7 +1697,6 @@ class AzureMachineLearningLabelingJob(BaseAIJob, MicrosoftResource):
progress_metrics: Optional[AzureProgressMetrics] = field(default=None, metadata={'description': 'Progress metrics for a labeling job.'}) # fmt: skip
job_project_id: Optional[str] = field(default=None, metadata={'description': 'Internal id of the job(Previously called project).'}) # fmt: skip
properties: Optional[Dict[str, Any]] = field(default=None, metadata={'description': 'The job property dictionary. Properties can be added, but not removed or altered.'}) # fmt: skip
- status: Optional[str] = field(default=None, metadata={"description": "The status of a job."})
status_messages: Optional[List[AzureStatusMessage]] = field(default=None, metadata={'description': 'Status messages of the job.'}) # fmt: skip
system_data: Optional[AzureSystemData] = field(default=None, metadata={'description': 'Metadata pertaining to creation and last modification of the resource.'}) # fmt: skip
diff --git a/plugins/azure/fix_plugin_azure/resource/microsoft_graph.py b/plugins/azure/fix_plugin_azure/resource/microsoft_graph.py
index 9b90b9e145..b77d159a20 100644
--- a/plugins/azure/fix_plugin_azure/resource/microsoft_graph.py
+++ b/plugins/azure/fix_plugin_azure/resource/microsoft_graph.py
@@ -826,6 +826,7 @@ class MicrosoftGraphUser(MicrosoftGraphEntity, BaseUser):
"usage_location": S("usageLocation"),
"user_principal_name": S("userPrincipalName"),
"user_type": S("userType"),
+ "username": S("displayName"),
}
account_enabled: Optional[bool] = field(default=None, metadata={'description': 'true if the account is enabled; otherwise, false. This property is required when a user is created. Supports $filter (eq, ne, not, and in).'}) # fmt: skip
age_group: Optional[str] = field(default=None, metadata={'description': 'Sets the age group of the user. Allowed values: null, Minor, NotAdult, and Adult. For more information, see legal age group property definitions. Supports $filter (eq, ne, not, and in).'}) # fmt: skip
diff --git a/plugins/azure/fix_plugin_azure/resource/network.py b/plugins/azure/fix_plugin_azure/resource/network.py
index dbfbcbdc6e..8c2ed7c8a8 100644
--- a/plugins/azure/fix_plugin_azure/resource/network.py
+++ b/plugins/azure/fix_plugin_azure/resource/network.py
@@ -40,7 +40,7 @@
EdgeType,
PhantomBaseResource,
)
-from fixlib.json_bender import F, Bender, S, Bend, ForallBend, AsInt, StringToUnitNumber, Upper, Lower
+from fixlib.json_bender import F, MapValue, Bender, S, Bend, ForallBend, AsInt, StringToUnitNumber, Upper, Lower
from fixlib.types import Json
service_name = "networking"
@@ -2347,6 +2347,7 @@ class AzureNetworkSubnet(MicrosoftResource, BaseSubnet):
"service_endpoints": S("properties", "serviceEndpoints")
>> ForallBend(AzureServiceEndpointPropertiesFormat.mapping),
"type": S("type"),
+ "cidr_block": S("properties", "addressPrefix"),
}
address_prefix: Optional[str] = field(default=None, metadata={"description": "The address prefix for the subnet."})
address_prefixes: Optional[List[str]] = field(default=None, metadata={'description': 'List of address prefixes for the subnet.'}) # fmt: skip
@@ -5275,6 +5276,7 @@ class AzureNetworkVirtualNetwork(MicrosoftResource, BaseNetwork):
>> ForallBend(AzureVirtualNetworkPeering.mapping),
"location": S("location"),
"type": S("type"),
+ "cidr_blocks": S("properties", "addressSpace", "addressPrefixes", default=[]),
}
address_space: Optional[AzureAddressSpace] = field(default=None, metadata={'description': 'AddressSpace contains an array of IP address ranges that can be used by subnets of the virtual network.'}) # fmt: skip
bgp_communities: Optional[AzureVirtualNetworkBgpCommunities] = field(default=None, metadata={'description': 'Bgp Communities sent over ExpressRoute with each route corresponding to a prefix in this VNET.'}) # fmt: skip
@@ -6790,7 +6792,14 @@ class AzureNetworkDNSZone(MicrosoftResource, BaseDNSZone):
"resolution_virtual_networks": S("properties")
>> S("resolutionVirtualNetworks", default=[])
>> ForallBend(S("id")),
- "zone_type": S("properties", "zoneType"),
+ "private_zone": S("properties", "zoneType")
+ >> MapValue(
+ {
+ "Public": False,
+ "Private": True,
+ },
+ default=False,
+ ),
}
max_number_of_record_sets: Optional[int] = field(default=None, metadata={'description': 'The maximum number of record sets that can be created in this DNS zone. This is a read-only property and any attempt to set this value will be ignored.'}) # fmt: skip
max_number_of_records_per_record_set: Optional[int] = field(default=None, metadata={'description': 'The maximum number of records per record set that can be created in this DNS zone. This is a read-only property and any attempt to set this value will be ignored.'}) # fmt: skip
@@ -6798,7 +6807,6 @@ class AzureNetworkDNSZone(MicrosoftResource, BaseDNSZone):
number_of_record_sets: Optional[int] = field(default=None, metadata={'description': 'The current number of record sets in this DNS zone. This is a read-only property and any attempt to set this value will be ignored.'}) # fmt: skip
registration_virtual_networks: Optional[List[str]] = field(default=None, metadata={'description': 'A list of references to virtual networks that register hostnames in this DNS zone. This is a only when ZoneType is Private.'}) # fmt: skip
resolution_virtual_networks: Optional[List[str]] = field(default=None, metadata={'description': 'A list of references to virtual networks that resolve records in this DNS zone. This is a only when ZoneType is Private.'}) # fmt: skip
- zone_type: Optional[str] = field(default=None, metadata={'description': 'The type of this DNS zone (Public or Private).'}) # fmt: skip
def post_process(self, graph_builder: GraphBuilder, source: Json) -> None:
def collect_record_sets() -> None:
diff --git a/plugins/azure/fix_plugin_azure/resource/storage.py b/plugins/azure/fix_plugin_azure/resource/storage.py
index 1ae7ffe4f7..463c0a9f88 100644
--- a/plugins/azure/fix_plugin_azure/resource/storage.py
+++ b/plugins/azure/fix_plugin_azure/resource/storage.py
@@ -27,7 +27,7 @@
ModelReference,
PhantomBaseResource,
)
-from fixlib.json_bender import Bender, S, ForallBend, Bend
+from fixlib.json_bender import K, Bender, S, ForallBend, Bend, AsBool
from fixlib.types import Json
log = logging.getLogger("fix.plugins.azure")
@@ -165,6 +165,8 @@ class AzureStorageBlobContainer(MicrosoftResource, BaseBucket):
"public_access": S("properties", "publicAccess"),
"remaining_retention_days": S("properties", "remainingRetentionDays"),
"version": S("properties", "version"),
+ "encryption_enabled": S("properties", "defaultEncryptionScope") >> AsBool(),
+ "versioning_enabled": S("properties", "immutableStorageWithVersioning") >> AsBool(),
}
type: Optional[str] = field(default=None, metadata={'description': 'The type of the resource. E.g. Microsoft.Compute/virtualMachines or Microsoft.Storage/storageAccounts '}) # fmt: skip
default_encryption_scope: Optional[str] = field(default=None, metadata={'description': 'Default the container to use specified encryption scope for all writes.'}) # fmt: skip
@@ -317,10 +319,11 @@ class AzureStorageQueue(MicrosoftResource, BaseQueue):
"id": S("id"),
"tags": S("tags", default={}),
"name": S("name"),
- "approximate_message_count": S("properties", "approximateMessageCount"),
"queue_metadata": S("properties", "metadata"),
+ "queue_type": K("standard"),
+ "message_retention_period": K(7),
+ "approximate_message_count": S("properties", "approximateMessageCount"),
}
- approximate_message_count: Optional[int] = field(default=None, metadata={'description': 'Integer indicating an approximate number of messages in the queue. This number is not lower than the actual number of messages in the queue, but could be higher.'}) # fmt: skip
queue_metadata: Optional[Dict[str, str]] = field(default=None, metadata={'description': 'A name-value pair that represents queue metadata.'}) # fmt: skip
diff --git a/plugins/azure/fix_plugin_azure/resource/web.py b/plugins/azure/fix_plugin_azure/resource/web.py
index c1f0296c72..7920046fcf 100644
--- a/plugins/azure/fix_plugin_azure/resource/web.py
+++ b/plugins/azure/fix_plugin_azure/resource/web.py
@@ -1481,6 +1481,7 @@ class AzureWebApp(MicrosoftResource, BaseServerlessFunction):
"vnet_image_pull_enabled": S("properties", "vnetImagePullEnabled"),
"vnet_route_all_enabled": S("properties", "vnetRouteAllEnabled"),
"workload_profile_name": S("properties", "workloadProfileName"),
+ "memory_size": S("properties", "siteConfig", "limits", "maxMemoryInMb"),
}
availability_state: Optional[str] = field(default=None, metadata={'description': 'Management information availability state for the app.'}) # fmt: skip
client_affinity_enabled: Optional[bool] = field(default=None, metadata={'description': 'true
to enable client affinity; false
to stop sending session affinity cookies, which route client requests in the same session to the same instance. Default is true
.'}) # fmt: skip
diff --git a/plugins/gcp/fix_plugin_gcp/resources/aiplatform.py b/plugins/gcp/fix_plugin_gcp/resources/aiplatform.py
index e9c16e052f..d3b1d87c3e 100644
--- a/plugins/gcp/fix_plugin_gcp/resources/aiplatform.py
+++ b/plugins/gcp/fix_plugin_gcp/resources/aiplatform.py
@@ -12,8 +12,8 @@
GcpDeprecationStatus,
GraphBuilder,
)
-from fixlib.baseresources import BaseAIJob, ModelReference, BaseAIModel
-from fixlib.json_bender import Bender, S, Bend, ForallBend, MapDict
+from fixlib.baseresources import BaseAIJob, AIJobStatus, ModelReference, BaseAIModel
+from fixlib.json_bender import Bender, S, Bend, ForallBend, MapDict, MapEnum
from fixlib.types import Json
service_name = "aiplatform"
@@ -29,6 +29,21 @@
"Permission denied by location policies",
}
+GCP_AI_JOB_STATUS_MAPPING = {
+ "JOB_STATE_UNSPECIFIED": AIJobStatus.UNKNOWN,
+ "JOB_STATE_QUEUED": AIJobStatus.PENDING,
+ "JOB_STATE_PENDING": AIJobStatus.PENDING,
+ "JOB_STATE_RUNNING": AIJobStatus.RUNNING,
+ "JOB_STATE_SUCCEEDED": AIJobStatus.COMPLETED,
+ "JOB_STATE_FAILED": AIJobStatus.FAILED,
+ "JOB_STATE_CANCELLING": AIJobStatus.STOPPING,
+ "JOB_STATE_CANCELLED": AIJobStatus.CANCELLED,
+ "JOB_STATE_PAUSED": AIJobStatus.PAUSED,
+ "JOB_STATE_EXPIRED": AIJobStatus.FAILED,
+ "JOB_STATE_UPDATING": AIJobStatus.RUNNING,
+ "JOB_STATE_PARTIALLY_SUCCEEDED": AIJobStatus.COMPLETED,
+}
+
class AIPlatformRegionFilter:
@classmethod
@@ -530,7 +545,7 @@ class GcpVertexAIBatchPredictionJob(AIPlatformRegionFilter, BaseAIJob, GcpResour
"resources_consumed": S("resourcesConsumed", "replicaHours"),
"service_account": S("serviceAccount"),
"start_time": S("startTime"),
- "state": S("state"),
+ "state": S("state") >> MapEnum(GCP_AI_JOB_STATUS_MAPPING, AIJobStatus.UNKNOWN),
"unmanaged_container_model": S("unmanagedContainerModel", default={})
>> Bend(GcpVertexAIUnmanagedContainerModel.mapping),
"update_time": S("updateTime"),
@@ -557,7 +572,6 @@ class GcpVertexAIBatchPredictionJob(AIPlatformRegionFilter, BaseAIJob, GcpResour
resources_consumed: Optional[float] = field(default=None)
service_account: Optional[str] = field(default=None)
start_time: Optional[datetime] = field(default=None)
- state: Optional[str] = field(default=None)
unmanaged_container_model: Optional[GcpVertexAIUnmanagedContainerModel] = field(default=None)
update_time: Optional[datetime] = field(default=None)
@@ -724,7 +738,7 @@ class GcpVertexAICustomJob(AIPlatformRegionFilter, BaseAIJob, GcpResource):
"rpc_error": S("error", default={}) >> Bend(GcpGoogleRpcStatus.mapping),
"custom_job_spec": S("jobSpec", default={}) >> Bend(GcpVertexAICustomJobSpec.mapping),
"start_time": S("startTime"),
- "state": S("state"),
+ "state": S("state") >> MapEnum(GCP_AI_JOB_STATUS_MAPPING, AIJobStatus.UNKNOWN),
"update_time": S("updateTime"),
"web_access_uris": S("webAccessUris"),
}
@@ -735,7 +749,6 @@ class GcpVertexAICustomJob(AIPlatformRegionFilter, BaseAIJob, GcpResource):
rpc_error: Optional[GcpGoogleRpcStatus] = field(default=None)
custom_job_spec: Optional[GcpVertexAICustomJobSpec] = field(default=None)
start_time: Optional[datetime] = field(default=None)
- state: Optional[str] = field(default=None)
update_time: Optional[datetime] = field(default=None)
web_access_uris: Optional[Dict[str, str]] = field(default=None)
@@ -1634,7 +1647,7 @@ class GcpVertexAIHyperparameterTuningJob(AIPlatformRegionFilter, BaseAIJob, GcpR
"max_trial_count": S("maxTrialCount"),
"parallel_trial_count": S("parallelTrialCount"),
"start_time": S("startTime"),
- "state": S("state"),
+ "state": S("state") >> MapEnum(GCP_AI_JOB_STATUS_MAPPING, AIJobStatus.UNKNOWN),
"study_spec": S("studySpec", default={}) >> Bend(GcpVertexAIStudySpec.mapping),
"trial_job_spec": S("trialJobSpec", default={}) >> Bend(GcpVertexAICustomJobSpec.mapping),
"trials": S("trials", default=[]) >> ForallBend(GcpVertexAITrial.mapping),
@@ -1649,7 +1662,6 @@ class GcpVertexAIHyperparameterTuningJob(AIPlatformRegionFilter, BaseAIJob, GcpR
max_trial_count: Optional[int] = field(default=None)
parallel_trial_count: Optional[int] = field(default=None)
start_time: Optional[datetime] = field(default=None)
- state: Optional[str] = field(default=None)
study_spec: Optional[GcpVertexAIStudySpec] = field(default=None)
trial_job_spec: Optional[GcpVertexAICustomJobSpec] = field(default=None)
trials: Optional[List[GcpVertexAITrial]] = field(default=None)
@@ -2151,7 +2163,7 @@ class GcpVertexAIModelDeploymentMonitoringJob(AIPlatformRegionFilter, BaseAIJob,
"predict_instance_schema_uri": S("predictInstanceSchemaUri"),
"sample_predict_instance": S("samplePredictInstance"),
"schedule_state": S("scheduleState"),
- "state": S("state"),
+ "state": S("state") >> MapEnum(GCP_AI_JOB_STATUS_MAPPING, AIJobStatus.UNKNOWN),
"stats_anomalies_base_directory": S("statsAnomaliesBaseDirectory", "outputUriPrefix"),
"update_time": S("updateTime"),
}
@@ -2179,7 +2191,6 @@ class GcpVertexAIModelDeploymentMonitoringJob(AIPlatformRegionFilter, BaseAIJob,
predict_instance_schema_uri: Optional[str] = field(default=None)
sample_predict_instance: Optional[Any] = field(default=None)
schedule_state: Optional[str] = field(default=None)
- state: Optional[str] = field(default=None)
stats_anomalies_base_directory: Optional[str] = field(default=None)
update_time: Optional[datetime] = field(default=None)
@@ -2704,7 +2715,7 @@ class GcpVertexAIPipelineJob(AIPlatformRegionFilter, BaseAIJob, GcpResource):
"schedule_name": S("scheduleName"),
"service_account": S("serviceAccount"),
"start_time": S("startTime"),
- "state": S("state"),
+ "state": S("state") >> MapEnum(GCP_AI_JOB_STATUS_MAPPING, AIJobStatus.UNKNOWN),
"template_metadata": S("templateMetadata", "version"),
"template_uri": S("templateUri"),
"update_time": S("updateTime"),
@@ -2723,7 +2734,6 @@ class GcpVertexAIPipelineJob(AIPlatformRegionFilter, BaseAIJob, GcpResource):
schedule_name: Optional[str] = field(default=None)
service_account: Optional[str] = field(default=None)
start_time: Optional[datetime] = field(default=None)
- state: Optional[str] = field(default=None)
template_metadata: Optional[str] = field(default=None)
template_uri: Optional[str] = field(default=None)
update_time: Optional[datetime] = field(default=None)
@@ -3259,7 +3269,7 @@ class GcpVertexAITuningJob(AIPlatformRegionFilter, BaseAIJob, GcpResource):
"rpc_error": S("error", default={}) >> Bend(GcpGoogleRpcStatus.mapping),
"experiment": S("experiment"),
"start_time": S("startTime"),
- "state": S("state"),
+ "state": S("state") >> MapEnum(GCP_AI_JOB_STATUS_MAPPING, AIJobStatus.UNKNOWN),
"supervised_tuning_spec": S("supervisedTuningSpec", default={})
>> Bend(GcpVertexAISupervisedTuningSpec.mapping),
"tuned_model": S("tunedModel", default={}) >> Bend(GcpVertexAITunedModel.mapping),
@@ -3274,7 +3284,6 @@ class GcpVertexAITuningJob(AIPlatformRegionFilter, BaseAIJob, GcpResource):
rpc_error: Optional[GcpGoogleRpcStatus] = field(default=None)
experiment: Optional[str] = field(default=None)
start_time: Optional[datetime] = field(default=None)
- state: Optional[str] = field(default=None)
supervised_tuning_spec: Optional[GcpVertexAISupervisedTuningSpec] = field(default=None)
tuned_model: Optional[GcpVertexAITunedModel] = field(default=None)
tuned_model_display_name: Optional[str] = field(default=None)
diff --git a/plugins/gcp/fix_plugin_gcp/resources/compute.py b/plugins/gcp/fix_plugin_gcp/resources/compute.py
index d5df20cd8d..ee4656d2a2 100644
--- a/plugins/gcp/fix_plugin_gcp/resources/compute.py
+++ b/plugins/gcp/fix_plugin_gcp/resources/compute.py
@@ -7280,6 +7280,7 @@ class GcpSubnetwork(GcpResource, BaseSubnet):
"secondary_ip_ranges": S("secondaryIpRanges", default=[]) >> ForallBend(GcpSubnetworkSecondaryRange.mapping),
"stack_type": S("stackType"),
"subnetwork_state": S("state"),
+ "cidr_block": S("ipCidrRange"),
}
enable_flow_logs: Optional[bool] = field(default=None)
external_ipv6_prefix: Optional[str] = field(default=None)
diff --git a/plugins/gcp/fix_plugin_gcp/resources/storage.py b/plugins/gcp/fix_plugin_gcp/resources/storage.py
index f907077642..e453bba658 100644
--- a/plugins/gcp/fix_plugin_gcp/resources/storage.py
+++ b/plugins/gcp/fix_plugin_gcp/resources/storage.py
@@ -7,7 +7,7 @@
from fix_plugin_gcp.resources.base import GcpResource, GcpDeprecationStatus, get_client
from fixlib.baseresources import BaseBucket
from fixlib.graph import Graph
-from fixlib.json_bender import Bender, S, Bend, ForallBend
+from fixlib.json_bender import Bender, S, Bend, ForallBend, AsBool
service_name = "storage"
@@ -391,6 +391,7 @@ class GcpBucket(GcpResource, BaseBucket):
"updated": S("updated"),
"versioning_enabled": S("versioning", "enabled"),
"bucket_website": S("website", default={}) >> Bend(GcpWebsite.mapping),
+ "encryption_enabled": S("encryption", "defaultKmsKeyName") >> AsBool(),
}
acl: Optional[List[GcpBucketAccessControl]] = field(default=None)
autoclass: Optional[GcpAutoclass] = field(default=None)
@@ -415,7 +416,6 @@ class GcpBucket(GcpResource, BaseBucket):
updated: Optional[datetime] = field(default=None)
bucket_website: Optional[GcpWebsite] = field(default=None)
requester_pays: Optional[bool] = field(default=None)
- versioning_enabled: Optional[bool] = field(default=None)
lifecycle_rule: List[GcpRule] = field(factory=list)
def pre_delete(self, graph: Graph) -> bool:
diff --git a/requirements-all.txt b/requirements-all.txt
index 7fbe0e6b8c..ffd5e59667 100644
--- a/requirements-all.txt
+++ b/requirements-all.txt
@@ -1,7 +1,7 @@
aiodns==3.2.0
aiofiles==24.1.0
aiohappyeyeballs==2.4.3
-aiohttp[speedups]==3.10.10
+aiohttp[speedups]==3.10.11
aiohttp-jinja2==1.6
aiohttp-swagger3==0.9.0
aiosignal==1.3.1
@@ -18,8 +18,8 @@ azure-mgmt-resource==23.2.0
backoff==2.2.1
beautifulsoup4==4.12.3
black==24.10.0
-boto3==1.35.57
-botocore==1.35.57
+boto3==1.35.65
+botocore==1.35.65
brotli==1.1.0
build==1.2.2.post1
cached-property==2.0.1
@@ -36,11 +36,11 @@ click==8.1.7
click-option-group==0.5.6
cloudsplaining==0.7.0
colorama==0.4.6
-coverage[toml]==7.6.4
+coverage[toml]==7.6.7
cryptography==43.0.3
deepdiff==8.0.1
defusedxml==0.7.1
-deprecated==1.2.14
+deprecated==1.2.15
detect-secrets==1.5.0
dill==0.3.9
distlib==0.3.9
@@ -56,18 +56,18 @@ flexcache==0.3
flexparser==0.4
frozendict==2.4.6
frozenlist==1.5.0
-google-api-core==2.22.0
-google-api-python-client==2.151.0
+google-api-core==2.23.0
+google-api-python-client==2.153.0
google-auth==2.36.0
google-auth-httplib2==0.2.0
google-cloud-core==2.4.1
google-cloud-storage==2.18.2
google-crc32c==1.6.0
google-resumable-media==2.7.2
-googleapis-common-protos==1.65.0
+googleapis-common-protos==1.66.0
hcloud==2.2.0
httplib2==0.22.0
-hypothesis==6.118.7
+hypothesis==6.119.3
idna==3.10
importlib-metadata==8.5.0
iniconfig==2.0.0
@@ -89,7 +89,7 @@ mccabe==0.7.0
mdurl==0.1.2
monotonic==1.6
more-itertools==10.5.0
-msal==1.31.0
+msal==1.31.1
msal-extensions==1.2.0
mstache==0.2.0
multidict==6.1.0
@@ -114,7 +114,7 @@ pluggy==1.5.0
policy-sentry==0.13.1
portalocker==2.10.1
portend==3.2.0
-posthog==3.7.0
+posthog==3.7.2
prometheus-client==0.21.0
prompt-toolkit==3.0.48
propcache==0.2.0
@@ -131,7 +131,7 @@ pycparser==2.22
pyflakes==3.2.0
pygithub==2.5.0
pygments==2.18.0
-pyjwt[crypto]==2.9.0
+pyjwt[crypto]==2.10.0
pylint==3.3.1
pymysql==1.1.1
pynacl==1.5.0
@@ -156,9 +156,9 @@ rich==13.9.4
rsa==4.9
s3transfer==0.10.3
schema==0.7.7
-setuptools==75.3.0
+setuptools==75.5.0
six==1.16.0
-slack-sdk==3.33.3
+slack-sdk==3.33.4
snowflake-connector-python==3.12.3
snowflake-sqlalchemy==1.6.1
sortedcontainers==2.4.0
@@ -177,7 +177,7 @@ types-python-dateutil==2.9.0.20241003
types-pytz==2024.2.0.20241003
types-pyyaml==6.0.12.20240917
types-requests==2.31.0.6
-types-setuptools==75.3.0.20241107
+types-setuptools==75.5.0.20241122
types-six==1.16.21.20241105
types-tzlocal==5.1.0.1
types-urllib3==1.26.25.14
@@ -193,6 +193,6 @@ wcwidth==0.2.13
websocket-client==1.8.0
wheel==0.45.0
wrapt==1.16.0
-yarl==1.17.1
+yarl==1.17.2
zc-lockfile==3.0.post1
zipp==3.21.0
diff --git a/requirements-extra.txt b/requirements-extra.txt
index 2bf137915e..03ce8ad2f9 100644
--- a/requirements-extra.txt
+++ b/requirements-extra.txt
@@ -1,7 +1,7 @@
aiodns==3.2.0
aiofiles==24.1.0
aiohappyeyeballs==2.4.3
-aiohttp[speedups]==3.10.10
+aiohttp[speedups]==3.10.11
aiohttp-jinja2==1.6
aiohttp-swagger3==0.9.0
aiosignal==1.3.1
@@ -16,8 +16,8 @@ azure-mgmt-core==1.5.0
azure-mgmt-resource==23.2.0
backoff==2.2.1
beautifulsoup4==4.12.3
-boto3==1.35.57
-botocore==1.35.57
+boto3==1.35.65
+botocore==1.35.65
brotli==1.1.0
cached-property==2.0.1
cachetools==5.5.0
@@ -34,7 +34,7 @@ cloudsplaining==0.7.0
cryptography==43.0.3
deepdiff==8.0.1
defusedxml==0.7.1
-deprecated==1.2.14
+deprecated==1.2.15
detect-secrets==1.5.0
durationpy==0.9
fastjsonschema==2.19.1
@@ -47,15 +47,15 @@ flexcache==0.3
flexparser==0.4
frozendict==2.4.6
frozenlist==1.5.0
-google-api-core==2.22.0
-google-api-python-client==2.151.0
+google-api-core==2.23.0
+google-api-python-client==2.153.0
google-auth==2.36.0
google-auth-httplib2==0.2.0
google-cloud-core==2.4.1
google-cloud-storage==2.18.2
google-crc32c==1.6.0
google-resumable-media==2.7.2
-googleapis-common-protos==1.65.0
+googleapis-common-protos==1.66.0
hcloud==2.2.0
httplib2==0.22.0
idna==3.10
@@ -76,7 +76,7 @@ markupsafe==3.0.2
mdurl==0.1.2
monotonic==1.6
more-itertools==10.5.0
-msal==1.31.0
+msal==1.31.1
msal-extensions==1.2.0
mstache==0.2.0
multidict==6.1.0
@@ -94,7 +94,7 @@ platformdirs==4.3.6
policy-sentry==0.13.1
portalocker==2.10.1
portend==3.2.0
-posthog==3.7.0
+posthog==3.7.2
prometheus-client==0.21.0
prompt-toolkit==3.0.48
propcache==0.2.0
@@ -109,7 +109,7 @@ pycares==4.4.0
pycparser==2.22
pygithub==2.5.0
pygments==2.18.0
-pyjwt[crypto]==2.9.0
+pyjwt[crypto]==2.10.0
pymysql==1.1.1
pynacl==1.5.0
pyopenssl==24.2.1
@@ -127,9 +127,9 @@ rich==13.9.4
rsa==4.9
s3transfer==0.10.3
schema==0.7.7
-setuptools==75.3.0
+setuptools==75.5.0
six==1.16.0
-slack-sdk==3.33.3
+slack-sdk==3.33.4
snowflake-connector-python==3.12.3
snowflake-sqlalchemy==1.6.1
sortedcontainers==2.4.0
@@ -151,6 +151,6 @@ ustache==0.1.6
wcwidth==0.2.13
websocket-client==1.8.0
wrapt==1.16.0
-yarl==1.17.1
+yarl==1.17.2
zc-lockfile==3.0.post1
zipp==3.21.0
diff --git a/requirements.txt b/requirements.txt
index 672f7f15b6..727b5ce149 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -1,7 +1,7 @@
aiodns==3.2.0
aiofiles==24.1.0
aiohappyeyeballs==2.4.3
-aiohttp[speedups]==3.10.10
+aiohttp[speedups]==3.10.11
aiohttp-jinja2==1.6
aiohttp-swagger3==0.9.0
aiosignal==1.3.1
@@ -15,8 +15,8 @@ azure-mgmt-core==1.5.0
azure-mgmt-resource==23.2.0
backoff==2.2.1
beautifulsoup4==4.12.3
-boto3==1.35.57
-botocore==1.35.57
+boto3==1.35.65
+botocore==1.35.65
brotli==1.1.0
cached-property==2.0.1
cachetools==5.5.0
@@ -33,7 +33,7 @@ cloudsplaining==0.7.0
cryptography==43.0.3
deepdiff==8.0.1
defusedxml==0.7.1
-deprecated==1.2.14
+deprecated==1.2.15
detect-secrets==1.5.0
durationpy==0.9
fastjsonschema==2.19.1
@@ -45,11 +45,11 @@ flexcache==0.3
flexparser==0.4
frozendict==2.4.6
frozenlist==1.5.0
-google-api-core==2.22.0
-google-api-python-client==2.151.0
+google-api-core==2.23.0
+google-api-python-client==2.153.0
google-auth==2.36.0
google-auth-httplib2==0.2.0
-googleapis-common-protos==1.65.0
+googleapis-common-protos==1.66.0
hcloud==2.2.0
httplib2==0.22.0
idna==3.10
@@ -70,7 +70,7 @@ markupsafe==3.0.2
mdurl==0.1.2
monotonic==1.6
more-itertools==10.5.0
-msal==1.31.0
+msal==1.31.1
msal-extensions==1.2.0
mstache==0.2.0
multidict==6.1.0
@@ -88,7 +88,7 @@ platformdirs==4.3.6
policy-sentry==0.13.1
portalocker==2.10.1
portend==3.2.0
-posthog==3.7.0
+posthog==3.7.2
prometheus-client==0.21.0
prompt-toolkit==3.0.48
propcache==0.2.0
@@ -101,7 +101,7 @@ pycares==4.4.0
pycparser==2.22
pygithub==2.5.0
pygments==2.18.0
-pyjwt[crypto]==2.9.0
+pyjwt[crypto]==2.10.0
pynacl==1.5.0
pyparsing==3.2.0
python-arango==8.1.2
@@ -117,9 +117,9 @@ rich==13.9.4
rsa==4.9
s3transfer==0.10.3
schema==0.7.7
-setuptools==75.3.0
+setuptools==75.5.0
six==1.16.0
-slack-sdk==3.33.3
+slack-sdk==3.33.4
soupsieve==2.6
sqlalchemy==1.4.54
tempora==5.7.0
@@ -137,6 +137,6 @@ ustache==0.1.6
wcwidth==0.2.13
websocket-client==1.8.0
wrapt==1.16.0
-yarl==1.17.1
+yarl==1.17.2
zc-lockfile==3.0.post1
zipp==3.21.0