diff --git a/.github/workflows/housekeeping.yml b/.github/workflows/housekeeping.yml
index 0a89ed67..59a83b65 100644
--- a/.github/workflows/housekeeping.yml
+++ b/.github/workflows/housekeeping.yml
@@ -12,4 +12,4 @@ jobs:
delete_head_branch: true
squash_merge: true
branch_protection: true
- status_checks: true
+ status_checks: false
diff --git a/.github/workflows/qa.yml b/.github/workflows/qa.yml
index 72a32129..3547da8b 100644
--- a/.github/workflows/qa.yml
+++ b/.github/workflows/qa.yml
@@ -107,6 +107,7 @@ jobs:
release:
runs-on: ubuntu-latest
needs: qa-test
+ if: github.ref == 'refs/heads/main'
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
steps:
diff --git a/.gitignore b/.gitignore
index 6d9e56a5..ad17cb82 100644
--- a/.gitignore
+++ b/.gitignore
@@ -196,3 +196,6 @@ cython_debug/
.trunk/
*.pem
+
+# Auto-generated files
+auto-generated
\ No newline at end of file
diff --git a/README.md b/README.md
index 927e95ff..6559177c 100644
--- a/README.md
+++ b/README.md
@@ -88,17 +88,16 @@ Terraform module for AWS RDS instances
| [environment](#input\_environment) | Specify the staging environment.
Valid Values: "dev", "test", "staging", "uat", "training", "prod".
Notes: The value will set configuration defaults according to DFDS policies. | `string` | n/a | yes |
| [final\_snapshot\_identifier\_prefix](#input\_final\_snapshot\_identifier\_prefix) | Specifies the name which is prefixed to the final snapshot on cluster destroy.
Valid Values: .
Notes: . | `string` | `"final"` | no |
| [iam\_database\_authentication\_enabled](#input\_iam\_database\_authentication\_enabled) | Set this to true to enable authentication using IAM.
Valid Values: .
Notes: This requires creating mappings between IAM users/roles and database accounts in the RDS instance for this to work properly. | `bool` | `false` | no |
-| [identifier](#input\_identifier) | Specify the name of the RDS instance to create.
Valid Values: .
Notes: . | `string` | n/a | yes |
+| [identifier](#input\_identifier) | Specify the name of the RDS instance to create.
Valid Values: .
Notes: This | `string` | n/a | yes |
| [instance\_class](#input\_instance\_class) | Specify instance type of the RDS instance.
Valid Values:
"db.t3.micro",
"db.t3.small",
"db.t3.medium",
"db.t3.large",
"db.t3.xlarge",
"db.t3.2xlarge",
"db.r6g.xlarge",
"db.m6g.large",
"db.m6g.xlarge",
"db.t2.micro",
"db.t2.small",
"db.t2.medium",
"db.m4.large",
"db.m5d.large",
"db.m6i.large",
"db.m5.xlarge",
"db.t4g.micro",
"db.t4g.small",
"db.t4g.large",
"db.t4g.xlarge"
Notes: If omitted, the instance type will be set to db.t3.micro. | `string` | `null` | no |
| [instance\_is\_multi\_az](#input\_instance\_is\_multi\_az) | Specify if the RDS instance is multi-AZ.
Valid Values: .
Notes:
- This creates a primary DB instance and a standby DB instance in a different AZ for high availability and data redundancy.
- Standby DB instance doesn't support connections for read workloads.
- If this variable is omitted:
- This value is set to true by default for production environments.
- This value is set to false by default for non-production environments. | `bool` | `null` | no |
| [instance\_parameters](#input\_instance\_parameters) | Specify a list of DB parameters (map) to modify.
Valid Values: Example:
instance\_parameters = [{
name = "rds.force\_ssl"
value = 1
apply\_method = "pending-reboot",
... # Other parameters
}]
Notes: See [documentation](https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/Appendix.PostgreSQL.CommonDBATasks.html#Appendix.PostgreSQL.CommonDBATasks.Parameters) for more information. | `list(map(string))` | `[]` | no |
| [instance\_terraform\_timeouts](#input\_instance\_terraform\_timeouts) | Specify Terraform resource management timeouts.
Valid Values: .
Notes: Applies to `aws_db_instance` in particular to permit resource management times. See [documentation](https://www.terraform.io/docs/configuration/resources.html#operation-timeouts) for more information. | `map(string)` | `{}` | no |
| [iops](#input\_iops) | Specify The amount of provisioned IOPS.
Valid Values: .
Notes: Setting this implies a storage\_type of 'io1' or `gp3`. See `notes` for limitations regarding this variable for `gp3`" | `number` | `null` | no |
-| [is\_cluster](#input\_is\_cluster) | n/a | `bool` | `false` | no |
-| [is\_instance](#input\_is\_instance) | n/a | `bool` | `true` | no |
+| [is\_cluster](#input\_is\_cluster) | [Experiemental Feature] Specify whether or not to deploy the instance as multi-az database cluster.
Valid Values: .
Notes:
- This feature is currently in beta and is subject to change.
- It creates a DB cluster with a primary DB instance and two readable standby DB instances,
- Each DB instance in a different Availability Zone (AZ).
- Provides high availability, data redundancy and increases capacity to serve read workloads
- Proxy is not supported for cluster instances.
- For smaller workloads we recommend considering using a single instance instead of a cluster. | `bool` | `false` | no |
| [is\_kubernetes\_app\_enabled](#input\_is\_kubernetes\_app\_enabled) | Specify whether or not to enable access from Kubernetes pods.
Valid Values: .
Notes: Enabling this will create the following resources:
- IAM role for service account (IRSA)
- IAM policy for service account (IRSA) | `bool` | `false` | no |
| [is\_proxy\_included](#input\_is\_proxy\_included) | Specify whether or not to include proxy.
Valid Values: .
Notes: Proxy helps managing database connections. See [documentation](https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/rds-proxy-planning.html) for more information. | `bool` | `false` | no |
-| [is\_serverless](#input\_is\_serverless) | n/a | `bool` | `false` | no |
+| [is\_publicly\_accessible](#input\_is\_publicly\_accessible) | Specify whether or not this instance is publicly accessible.
Valid Values: .
Notes:
- Setting this to true will do the followings:
- Assign a public IP address and the host name of the DB instance will resolve to the public IP address.
- Access from within the VPC can be achived by using the private IP address of the assigned Network Interface. | `bool` | `false` | no |
| [maintenance\_window](#input\_maintenance\_window) | Specify the window to perform maintenance in.
Valid Values: Syntax: `ddd:hh24:mi-ddd:hh24:mi`. Eg: `"Mon:00:00-Mon:03:00"`.
Notes: Default value is set to `"Sat:18:00-Sat:20:00"`. This is adjusted in accordance with AWS Backup schedule, see info [here](https://wiki.dfds.cloud/en/playbooks/aws-backup/aws-backup-getting-started). | `string` | `"Sat:18:00-Sat:20:00"` | no |
| [manage\_master\_user\_password](#input\_manage\_master\_user\_password) | Set to true to allow RDS to manage the master user password in Secrets Manager
Valid Values: .
Notes:
- Default value is set to true. It is recommended to use this feature.
- If set to true, the `password` variable will be ignored. | `bool` | `true` | no |
| [max\_allocated\_storage](#input\_max\_allocated\_storage) | Set the value to enable Storage Autoscaling and to set the max allocated storage.
Valid Values: .
Notes:
- If this variable is omitted:
- This value is set to 50 by default for production environments.
- This value is set to 0 by default for non-production environments. | `number` | `null` | no |
@@ -117,7 +116,6 @@ Terraform module for AWS RDS instances
| [proxy\_idle\_client\_timeout](#input\_proxy\_idle\_client\_timeout) | Specify idle client timeout of the RDS proxy (keep connection alive).
Valid Values: .
Notes: . | `number` | `1800` | no |
| [proxy\_require\_tls](#input\_proxy\_require\_tls) | Specify whether or not to require TLS for the proxy.
Valid Values: .
Notes: Default value is set to true. | `bool` | `true` | no |
| [proxy\_security\_group\_rules](#input\_proxy\_security\_group\_rules) | Specify additional security group rules for the RDS proxy.
Valid Values: .
Notes:
- Only ingress(inbound) rules are supported.
- Ingress rules are set to "Allow outbound traffic to PostgreSQL instance"
– Ingress rules are set to "Allow inbound traffic from same security group on specified database port" |
object({
ingress_rules = list(any)
ingress_with_self = optional(list(any), [])
})
| {
"ingress_rules": []
}
| no |
-| [publicly\_accessible](#input\_publicly\_accessible) | Specify whether or not this instance is publicly accessible.
Valid Values: .
Notes:
- Setting this to true will do the followings:
- Assign a public IP address and the host name of the DB instance will resolve to the public IP address.
- Access from within the VPC can be achived by using the private IP address of the assigned Network Interface. | `bool` | `false` | no |
| [rds\_security\_group\_rules](#input\_rds\_security\_group\_rules) | Specify additional security group rules for the RDS instance.
Valid Values: .
Notes: . | object({
ingress_rules = list(any)
ingress_with_self = optional(list(any), [])
egress_rules = optional(list(any), [])
})
| n/a | yes |
| [replicate\_source\_db](#input\_replicate\_source\_db) | Inidicate that this resource is a Replicate database, and to use this value as the source database.
Valid Values: The identifier of another Amazon RDS Database to replicate in the same region.
Notes: In case of cross-region replication, specify the ARN of the source DB instance. | `string` | `null` | no |
| [resource\_owner\_contact\_email](#input\_resource\_owner\_contact\_email) | Provide an email address for the resource owner (e.g. team or individual).
Valid Values: .
Notes: This set the dfds.owner tag. See recommendations [here](https://wiki.dfds.cloud/en/playbooks/standards/tagging_policy). | `string` | `null` | no |
diff --git a/locals.tf b/locals.tf
index e5a310c6..b5bdc94d 100644
--- a/locals.tf
+++ b/locals.tf
@@ -72,7 +72,7 @@ locals {
########################################################################
iops = var.iops == null && var.storage_type == "io1" ? 1000 : var.iops # The minimum value is 1,000 IOPS and the maximum value is 256,000 IOPS. The IOPS to GiB ratio must be between 0.5 and 50
- is_serverless = var.is_serverless # temporary controlled by variable. TODO: Replace by calculation
+ is_serverless = false # temporary controlled by variable. TODO: Replace by calculation
final_snapshot_identifier = var.skip_final_snapshot ? null : "${var.final_snapshot_identifier_prefix}-${var.identifier}-${try(random_id.snapshot_identifier[0].hex, "")}"
engine = "postgres"
diff --git a/main.tf b/main.tf
index 85d7dd35..8bfbf1fb 100644
--- a/main.tf
+++ b/main.tf
@@ -95,7 +95,7 @@ module "db_instance" {
multi_az = local.instance_is_multi_az
iops = var.iops
storage_throughput = var.storage_throughput
- publicly_accessible = var.publicly_accessible
+ publicly_accessible = var.is_publicly_accessible
ca_cert_identifier = var.ca_cert_identifier
allow_major_version_upgrade = var.allow_major_version_upgrade
auto_minor_version_upgrade = var.auto_minor_version_upgrade
diff --git a/tests/instance/main.tf b/tests/instance/main.tf
index a94bd5e0..a68653df 100644
--- a/tests/instance/main.tf
+++ b/tests/instance/main.tf
@@ -26,7 +26,7 @@ module "rds_instance_test" {
username = "instance_user"
apply_immediately = true
- publicly_accessible = true
+ is_publicly_accessible = true
subnet_ids = concat(module.vpc.public_subnets)
enabled_cloudwatch_logs_exports = ["upgrade", "postgresql"]
cloudwatch_log_group_retention_in_days = 1
diff --git a/tests/qa/main.tf b/tests/qa/main.tf
index b0e1c6c1..fa07bd56 100644
--- a/tests/qa/main.tf
+++ b/tests/qa/main.tf
@@ -29,7 +29,7 @@ module "rds_instance_test" { # TODO: change to only use defaults and required va
iam_database_authentication_enabled = true
ca_cert_identifier = "rds-ca-ecc384-g1"
apply_immediately = true
- publicly_accessible = true
+ is_publicly_accessible = true
subnet_ids = ["subnet-04d5d42ac21fd8e8f", "subnet-0e50a82dec5fc0272", "subnet-0a49d384ff2e8a580"]
enabled_cloudwatch_logs_exports = ["upgrade", "postgresql"]
cloudwatch_log_group_retention_in_days = 1
diff --git a/tools/Dockerfile b/tools/Dockerfile
new file mode 100644
index 00000000..1c1e664b
--- /dev/null
+++ b/tools/Dockerfile
@@ -0,0 +1,40 @@
+FROM python:slim
+
+RUN apt update && \
+ apt install -y curl libpq-dev less jq tar unzip
+
+
+# Adding GitHub public SSH key to known hosts
+RUN ssh -T -o "StrictHostKeyChecking no" -o "PubkeyAuthentication no" git@github.com || true
+
+# ========================================
+# TERRAFORM DOCS
+# ========================================
+ENV TERRAFORM_DOCS_VERSION=0.17.0
+RUN export BUILD_ARCHITECTURE=$(uname -m); \
+ if [ "$BUILD_ARCHITECTURE" = "x86_64" ]; then export BUILD_ARCHITECTURE_ARCH=amd64; fi; \
+ if [ "$BUILD_ARCHITECTURE" = "aarch64" ]; then export BUILD_ARCHITECTURE_ARCH=arm64; fi; \
+ curl -sSLo ./terraform-docs.tar.gz https://terraform-docs.io/dl/v${TERRAFORM_DOCS_VERSION}/terraform-docs-v${TERRAFORM_DOCS_VERSION}-linux-${BUILD_ARCHITECTURE_ARCH}.tar.gz
+RUN tar -xzf terraform-docs.tar.gz
+RUN chmod +x terraform-docs
+RUN mv terraform-docs /usr/local/bin/
+
+# ========================================
+# TERRAFORM
+# ========================================
+
+ENV TERRAFORM_VERSION=1.4.6
+
+RUN export BUILD_ARCHITECTURE=$(uname -m); \
+ if [ "$BUILD_ARCHITECTURE" = "x86_64" ]; then export BUILD_ARCHITECTURE_ARCH=amd64; fi; \
+ if [ "$BUILD_ARCHITECTURE" = "aarch64" ]; then export BUILD_ARCHITECTURE_ARCH=arm64; fi; \
+ curl -Os https://releases.hashicorp.com/terraform/${TERRAFORM_VERSION}/terraform_${TERRAFORM_VERSION}_linux_${BUILD_ARCHITECTURE_ARCH}.zip \
+ && unzip terraform_${TERRAFORM_VERSION}_linux_${BUILD_ARCHITECTURE_ARCH}.zip \
+ && mv terraform /usr/local/bin/ \
+ && terraform -install-autocomplete
+
+
+COPY scaffolding/scripts /scripts
+COPY scaffolding/templates /templates
+
+ENTRYPOINT [ "bash", "/scripts/entrypoint.sh"]
diff --git a/tools/README.md b/tools/README.md
new file mode 100644
index 00000000..aa9b6218
--- /dev/null
+++ b/tools/README.md
@@ -0,0 +1,13 @@
+Run docker file:
+
+cd /dfds/aws-modules-rds/tools
+
+```bash
+docker build -t scaffold .
+```
+
+mkdir auto-generated
+
+```bash
+docker run -v /aws-modules-rds/:/input -v /aws-modules-rds/tools/auto-generated:/output scaffold:latest
+```
diff --git a/tools/scaffolding/scripts/entrypoint.sh b/tools/scaffolding/scripts/entrypoint.sh
new file mode 100755
index 00000000..a70183ba
--- /dev/null
+++ b/tools/scaffolding/scripts/entrypoint.sh
@@ -0,0 +1,51 @@
+scripts_path="/scripts"
+source_module_path="/input"
+
+if [ ! -d "/output" ]; then
+ echo "output folder does not exist"
+ exit 1
+fi
+
+# TERRAFORM DOCS
+output_json_file="/tmp/doc.json"
+
+# TERRAFORM
+source_json_doc=$output_json_file
+generated_tf_module_data="/tmp/tf_module.json"
+tf_module_template="/templates/main.tf.template"
+tf_module_output="/output/terraform/module.tf"
+tf_output_folders="/output/terraform"
+mkdir -p $tf_output_folders
+
+# DOCKER
+docker_compose_template="/templates/compose.yml.template"
+docker_compose_output="/output/docker/compose.yml"
+docker_env_template="/templates/.env.template"
+docker_env_output="/output/docker/.env"
+docker_script_template="/templates/restore.sh.template"
+docker_script_output="/output/docker/restore.sh"
+docker_output_folders="/output/docker"
+
+mkdir -p $docker_output_folders
+
+if [ -z "$(ls -a $source_module_path)" ]; then
+ echo "empty $source_module_path"
+ exit 1
+fi
+
+
+# TODO: CHECK FOR output folder mount
+
+# 1) Generate docs for all modules in a repo
+terraform-docs json --show "all" $source_module_path --output-file $output_json_file
+
+# # 2) Generate TF files
+python3 $scripts_path/generate_tf_module.py --source-tf-doc $source_json_doc --temp-work-folder $generated_tf_module_data --tf-module-template $tf_module_template --tf-output-path $tf_module_output
+
+# # 3) Format TF files
+terraform fmt $tf_output_folders
+
+# 3) Generate Docker files
+python3 $scripts_path/generate_docker.py --docker-compose-template $docker_compose_template --docker-compose-output $docker_compose_output --env-template $docker_env_template --env-output $docker_env_output --docker-script-template $docker_script_template --docker-script-output $docker_script_output
+# 4) Generate pipeline files
+# TODO: generate pipeline
diff --git a/tools/scaffolding/scripts/generate_docker.py b/tools/scaffolding/scripts/generate_docker.py
new file mode 100644
index 00000000..59a0b3f5
--- /dev/null
+++ b/tools/scaffolding/scripts/generate_docker.py
@@ -0,0 +1,42 @@
+"""This scripts generates boiler plates for using docker compose files. Input: Template files, Output: Docker compose files."""
+from string import Template
+import shutil
+import argparse
+
+parser = argparse.ArgumentParser(
+ prog='Docker Compose Generator',
+ description='This scripts generates boiler plates for using docker compose files. Input: Template files, Output: Docker compose files.',
+ epilog='.')
+parser.add_argument('--docker-compose-template', type=str, required=True, help='The template file for the docker compose.')
+parser.add_argument('--docker-compose-output', type=str, required=True, help='The output path for the docker compose.')
+parser.add_argument('--env-template', type=str, required=True, help='The template file for the env file.')
+parser.add_argument('--env-output', type=str, required=True, help='The output path for the env file.')
+parser.add_argument('--docker-script-template', type=str, required=True, help='The template file for the script that is used by the generated docker compose file.')
+parser.add_argument('--docker-script-output', type=str, required=True, help='The output path for the script used that is by the generated docker compose file.')
+args = parser.parse_args()
+
+docker_template = args.docker_compose_template
+output_docker = args.docker_compose_output
+env_template = args.env_template
+output_env = args.env_output
+docker_script_template = args.docker_script_template
+output_docker_script = args.docker_script_output
+
+vars_sub = {
+ 'pgpassword': 'example',
+ 'pgdatabase': 'example',
+ 'pghost': 'example',
+ 'pgport': 'example',
+ 'pguser': 'example'
+}
+
+with open(env_template, 'r', encoding='UTF-8') as f:
+ src = Template(f.read())
+ result = src.substitute(vars_sub)
+
+with open(output_env, "w", encoding='UTF-8') as f:
+ f.write(result)
+
+shutil.copy(docker_template, output_docker)
+
+shutil.copy(docker_script_template, output_docker_script)
diff --git a/tools/scaffolding/scripts/generate_tf_module.py b/tools/scaffolding/scripts/generate_tf_module.py
new file mode 100644
index 00000000..56b428c9
--- /dev/null
+++ b/tools/scaffolding/scripts/generate_tf_module.py
@@ -0,0 +1,76 @@
+"""This scripts generates boiler plates for using Terraform files. Input: Template files, Output: Terraform module."""
+import json
+from string import Template
+import re
+
+import argparse
+
+parser = argparse.ArgumentParser(
+ prog='Terraform Module Generator',
+ description='This scripts generates boiler plates for using Terraform files. Input: Template files, Output: Terraform module.',
+ epilog='.')
+parser.add_argument('--source-tf-doc', type=str, required=True, help='The json file generated by terraform-docs tool.')
+parser.add_argument('--temp-work-folder', type=str, required=True, help='The temporary folder to store the intermediate files.')
+parser.add_argument('--tf-module-template', type=str, required=True, help='The template file for the terraform module.')
+parser.add_argument('--tf-output-path', type=str, required=True, help='The output path for the terraform module.')
+args = parser.parse_args()
+
+source_doc = args.source_tf_doc
+work_folder = args.temp_work_folder
+tf_template = args.tf_module_template
+output_folder = args.tf_output_path
+
+with open(source_doc, "r", encoding='UTF-8') as f:
+ lines = f.readlines()
+
+with open(work_folder, "w", encoding='UTF-8') as f:
+ for line in lines:
+ if not(line.strip("\n") == "" or line.strip("\n") == ""):
+ f.write(line)
+input_list = []
+output_list = []
+OUTPUT_TEMPLATE = """output "$out_name" {
+ description = "$output_description"
+ value = try(module.db_instance_example.$out_value, null)
+}"""
+
+with open(work_folder, "r", encoding='UTF-8') as f:
+ data = json.load(f)
+ for i in data['inputs']:
+ if i['name'].startswith('is_'):
+ extracted_feature = re.search('(?<=is_)(.*?)(?=_|$)', i['name'])
+ if extracted_feature:
+ desc = i['description']
+ input_list.append("")
+ for line in desc.splitlines():
+ input_list.append('# ' + line)
+ feature = extracted_feature.group(0)
+ if i['required'] is False:
+ if i['type'] == 'bool':
+ param_val = i['default']
+ input_list.append(i['name'] + ' = ' + str(param_val).lower())
+ elif i['required'] is True:
+ desc = i['description']
+ input_list.append("")
+ for line in desc.splitlines():
+ input_list.append('# ' + line)
+ input_list.append(i['name'] + ' = "example"')
+ for y in data['outputs']:
+ output_sub = {
+ 'out_name': y['name'],
+ 'output_description': y['description'],
+ 'out_value': y['name'],
+ }
+ output_list.append(Template(OUTPUT_TEMPLATE).substitute(output_sub))
+
+vars_sub = {
+ 'inputs': '\n'.join(input_list),
+ 'outputs': '\n'.join(output_list),
+}
+
+with open(tf_template, 'r', encoding='UTF-8') as f:
+ src = Template(f.read())
+ result = src.substitute(vars_sub)
+
+with open(output_folder, "w", encoding='UTF-8') as f:
+ f.write(result)
diff --git a/tools/scaffolding/templates/.env.template b/tools/scaffolding/templates/.env.template
new file mode 100644
index 00000000..d094f168
--- /dev/null
+++ b/tools/scaffolding/templates/.env.template
@@ -0,0 +1,5 @@
+PGPASSWORD=$pgpassword
+PGDATABASE=$pgdatabase
+PGHOST=$pghost
+PGPORT=$pgport
+PGUSER=$pguser
\ No newline at end of file
diff --git a/tools/scaffolding/templates/compose.yml.template b/tools/scaffolding/templates/compose.yml.template
new file mode 100644
index 00000000..585e15e0
--- /dev/null
+++ b/tools/scaffolding/templates/compose.yml.template
@@ -0,0 +1,32 @@
+services:
+ export-data:
+ healthcheck:
+ test: exit 0
+ interval: 10s
+ timeout: 5s
+ retries: 5
+ image: postgres:15.4
+ container_name: import-data
+ volumes:
+ - ./postgres/export:/export
+ command: bash -c "cd /export && pg_dump -Fc -b -v -f dump.sql"
+ env_file:
+ - .env
+
+ database:
+ depends_on:
+ export-data:
+ condition: service_completed_successfully
+ image: postgres:15.4
+ restart: always
+ container_name: dev-db
+ ports:
+ - "5433:5432"
+ volumes:
+ # - ./postgres/data:/var/lib/postgresql/data
+ - ./postgres/export/dump.sql:/dump.sql
+ - ./restore.sh:/docker-entrypoint-initdb.d/restore.sh
+ environment:
+ POSTGRES_USER: 'postgres'
+ POSTGRES_PASSWORD: 'postgres'
+ POSTGRES_DB: 'dev_database'
diff --git a/tools/scaffolding/templates/main.tf.template b/tools/scaffolding/templates/main.tf.template
new file mode 100644
index 00000000..f8f8e62b
--- /dev/null
+++ b/tools/scaffolding/templates/main.tf.template
@@ -0,0 +1,22 @@
+terraform {
+ backend "s3" {
+ bucket = "-state-bucket"
+ encrypt = true
+ key = "/terraform.tfstate" # This is the path to the state file inside the bucket. You can change it to whatever you want.
+ region = "eu-central-1"
+ dynamodb_table = "terraform-locks"
+ }
+}
+
+
+provider "aws" {
+ region = "eu-central-1"
+}
+
+
+module "db_instance" {
+ source = "git::https://github.com/dfds/aws-modules-rds.git?ref="
+$inputs
+}
+
+$outputs
\ No newline at end of file
diff --git a/tools/scaffolding/templates/pipeline.yml.template b/tools/scaffolding/templates/pipeline.yml.template
new file mode 100644
index 00000000..5376d39f
--- /dev/null
+++ b/tools/scaffolding/templates/pipeline.yml.template
@@ -0,0 +1,4 @@
+on:
+ push:
+ branches:
+ - main
diff --git a/tools/scaffolding/templates/restore.sh.template b/tools/scaffolding/templates/restore.sh.template
new file mode 100755
index 00000000..b18f4a72
--- /dev/null
+++ b/tools/scaffolding/templates/restore.sh.template
@@ -0,0 +1 @@
+pg_restore -v -U $POSTGRES_USER -d $POSTGRES_DB -j 2 dump.sql
\ No newline at end of file
diff --git a/variables.tf b/variables.tf
index cd04dc27..86309a89 100644
--- a/variables.tf
+++ b/variables.tf
@@ -4,10 +4,6 @@
# Instance specific variables - applicable to cluster instances as well
################################################################################
-variable "is_instance" { # TODO: Remove this variable if not used
- default = true
-}
-
variable "environment" {
description = <