diff --git a/.github/workflows/sync-and-process-files.yml b/.github/workflows/sync-and-process-files.yml new file mode 100644 index 00000000000..ec3afc0fae6 --- /dev/null +++ b/.github/workflows/sync-and-process-files.yml @@ -0,0 +1,67 @@ +name: sync and process files from another repo +on: + repository_dispatch: + types: [sync_files] +jobs: + sync-and-process-files: + env: + # The body text of the PR requests that will be created + BODY: "Automated changes to pull in and process updates from repo: ${{ github.event.client_payload.repo }} ref: ${{ github.event.client_payload.ref }}" + + # The name of the branch that will be created + BRANCH_NAME: automatic_docs_update/repo_${{ github.event.client_payload.repo }}/ref_${{ github.event.client_payload.ref }} + + # The users that should be assigned to the PR as a comma separated list of github usernames. + REVIEWERS: + + # The title of the PR request that will be created + TITLE: "Process changes to docs from: repo: ${{ github.event.client_payload.repo }} ref: ${{ github.event.client_payload.ref }}" + + runs-on: ubuntu-latest + steps: + - name: Checkout destination + uses: actions/checkout@v2 + with: + path: destination + + - name: Checkout source repo + uses: actions/checkout@v2 + with: + ref: ${{ github.event.client_payload.sha }} + repository: ${{ github.event.client_payload.repo }} + token: ${{ secrets.SYNC_FILES_TOKEN }} + path: source + + - name: setup node + uses: actions/setup-node@v2 + with: + node-version: '14' + + - name: Process changes + run: | + case ${{ github.event.client_payload.repo }} in + + EnterpriseDB/cloud-native-postgresql) + ${{ github.workspace }}/destination/scripts/source/process-cnp-docs.sh ${{ github.workspace }}/source ${{ github.workspace }}/destination + ;; + + EnterpriseDB/fe) + mkdir -p ${{ github.workspace }}/destination/icons-pkg && \ + cp -fr utils/icons-placeholder/output/* ${{ github.workspace }}/destination/icons-pkg/ + ;; + + *) + echo "The workflow has not been configured for the ${{ github.event.client_payload.repo }} repo" + exit 1 + ;; + esac + working-directory: source + + - name: Create pull request + uses: peter-evans/create-pull-request@v3.10.0 + with: + body: ${{ env.BODY }} + branch: ${{ env.BRANCH_NAME }} + path: destination/ + reviewers: ${{ env.REVIEWERS }} + title: ${{ env.TITLE }} diff --git a/.github/workflows/sync-files.yml b/.github/workflows/sync-files.yml deleted file mode 100644 index 4e19cc86354..00000000000 --- a/.github/workflows/sync-files.yml +++ /dev/null @@ -1,52 +0,0 @@ -name: sync files from another repo -on: - repository_dispatch: - types: [sync_files] -jobs: - sync-files: - env: - # The body text of the PR requests that will be created - PR_BODY: Automated changes to pull in updates from ${{ github.event.client_payload.repo }} repo - - # The name of the branch that will be created - PR_BRANCH_NAME: ${{ github.workflow }}/${{ github.event.client_payload.repo }} - - # The title of the PR request that will be created - PR_TITLE: Automated changes from ${{ github.event.client_payload.repo }} - runs-on: ubuntu-latest - steps: - - name: Checkout destination - uses: actions/checkout@v2 - with: - path: destination - - - name: Checkout source repo - uses: actions/checkout@v2 - with: - repository: ${{ github.event.client_payload.repo }} - ref: feat/icons-placeholder - token: ${{ secrets.GH_TOKEN }} - path: source - - - name: Process changes - run: | - case ${{ github.event.client_payload.repo }} in - - EnterpriseDB/fe) - cp -f utils/icons-placeholder/output/* ${{ github.workspace }}/icons-pkg/ - ;; - - *) - echo "The workflow has not been configured for the ${{ github.event.client_payload.repo }} repo" - exit 1 - ;; - esac - working-directory: source - - - name: Create pull request - uses: peter-evans/create-pull-request@v3.10.0 - with: - body: ${{ env.PR_BODY }} - branch: ${{ env.PR_BRANCH_NAME }} - path: destination/ - title: ${{ env.PR_TITLE }} diff --git a/docs/how-tos/sync-cnp-docs.md b/docs/how-tos/sync-cnp-docs.md index d03fe730119..ea4772cf31e 100644 --- a/docs/how-tos/sync-cnp-docs.md +++ b/docs/how-tos/sync-cnp-docs.md @@ -1,23 +1,15 @@ # Sync Cloud-Native-PostgreSQL Docs -Currently we need to manually sync over [cloud-native-postgresql][cnp]("CNP") -docs whenever there's a new release. The long term goal is to automate this via -GitHub action dispatch and automated event handling. +Documentation from [cloud-native-postgresql][cnp]("CNP") should be synced over automatically when there is a new release, however in the event that needs to be done manually, the following process can be used: -1. The CNP team informs us that there's a new version. 1. Check out the appropriate version from the [CNP][] repo. -1. Replace `docs:temp_kubernetes/docs/` with `cloud-native-postgresql:docs/`. - - `temp_kubernetes/docs/` is not tracked via Git, so if it's not present - already, you'll need to create the directory yourself. - -1. Transpile original source documentation into MDX format: +1. Run the processor script + **note:** replace `path/to/cnp/checkout` below to the actual path of your CNP checkout. If you are not running the script from this project's root, you will need to update `.` below to be the path to this project's checkout. ```sh - python scripts/source/source_cloud_native_operator.py + scripts/source/process-cnp-docs.sh path/to/cnp/checkout . ``` +1. The script will handle updating and moving the files from the [CNP][] repo into place. -1. Replace `advocacy_docs/kubernetes/cloud-native-postgresql/` with - `temp_kubernetes/build/`. [cnp]: https://github.com/EnterpriseDB/cloud-native-postgresql diff --git a/product_docs/docs/eprs/6.2/02_overview/02_replication_concepts_and_definitions/13_table_filters.mdx b/product_docs/docs/eprs/6.2/02_overview/02_replication_concepts_and_definitions/13_table_filters.mdx index d236d1bcb46..72ae973966a 100644 --- a/product_docs/docs/eprs/6.2/02_overview/02_replication_concepts_and_definitions/13_table_filters.mdx +++ b/product_docs/docs/eprs/6.2/02_overview/02_replication_concepts_and_definitions/13_table_filters.mdx @@ -106,6 +106,21 @@ The `REPLICA IDENTITY FULL` setting on a source table ensures that certain types Table filters are not supported on binary data type columns. A binary data type is the Postgres data type `BYTEA`. In addition, table filters are not supported on Advanced Server columns with data types `BINARY`, `VARBINARY`, `BLOB`, `LONG RAW`, and `RAW` as these are alias names for the `BYTEA` data type. +**Filtering Restrictions on Operators** + +XDB supports modulus operator (denoted by %) to define a filter clause. However, the following restrictions apply: + +- You can have only a single filter condition using the modulus operator + +- You cannot use it with any other conditions using `AND` or `OR` operators + +XDB supports the modulus filter in the following formats: + +`deptno%3=0` + +`@deptno%3=0` + + ## Roadmap for Further Instructions The specific details on implementing table filtering depend upon whether you are using a single-master replication system or a multi-master replication system. The following is a roadmap to the relevant sections for each type of replication system. diff --git a/product_docs/docs/eprs/6.2/02_overview/04_design_replication_system/03_restrictions_on_replicated_database_objects.mdx b/product_docs/docs/eprs/6.2/02_overview/04_design_replication_system/03_restrictions_on_replicated_database_objects.mdx index 55fe9124a25..757cb0a2085 100644 --- a/product_docs/docs/eprs/6.2/02_overview/04_design_replication_system/03_restrictions_on_replicated_database_objects.mdx +++ b/product_docs/docs/eprs/6.2/02_overview/04_design_replication_system/03_restrictions_on_replicated_database_objects.mdx @@ -78,6 +78,12 @@ For replicating Postgres partitioned tables see [Replicating Postgres Partitione - `BYTEA` - `RAW` +PostgreSQL or EDB Postgres Advanced Server database tables that include the following data types cannot be replicated to the Oracle database: + +- `JSON` +- `JSONB` + + Postgres tables that include `OID` based large objects cannot be replicated. For information on `OID` based large objects see `pg_largeobject` in the PostgreSQL Core Documentation located at: > diff --git a/scripts/fileProcessor/.prettierignore b/scripts/fileProcessor/.prettierignore new file mode 100644 index 00000000000..dd449725e18 --- /dev/null +++ b/scripts/fileProcessor/.prettierignore @@ -0,0 +1 @@ +*.md diff --git a/scripts/fileProcessor/.prettierrc.json b/scripts/fileProcessor/.prettierrc.json new file mode 100644 index 00000000000..bf357fbbc08 --- /dev/null +++ b/scripts/fileProcessor/.prettierrc.json @@ -0,0 +1,3 @@ +{ + "trailingComma": "all" +} diff --git a/scripts/fileProcessor/README.md b/scripts/fileProcessor/README.md new file mode 100644 index 00000000000..1a4880c8473 --- /dev/null +++ b/scripts/fileProcessor/README.md @@ -0,0 +1,22 @@ +# File Processor + +This tool can be used to automatically modify files. This works by opening specified files, and applying a processor script to the file's name and content. It is intended to be used by workflows which pull content from other repositories into this one. + +## Usage + +In the directory that you'd like to modify files in, run something like the following: +``` +node fileProcessor/main.mjs -f **/*.md -p dummy +``` + +### options +| flag | alias | description | +|---------------|-------|-------------| +| `--files` | `-f` | The glob the script uses to look for files.More than one `--files` flag can be passed in, but the processor will only run on files which match all of the globs passed in| +| `--processor` | `-p` | The processor to apply to files. The script will look for these in the `processors` directory. More than one processor can be added, and they will be run in the order they are passed in. + +## adding new processors + +The main script will attempt to import processors passed in with `--processor` flags by looking for a file with a matching name in the `processors` directory. + +A processor should be saved with the `.mjs` extension, and export a function named `process` which accepts two arguments. The file name will be passed into the first argument, and file contents will be passed into the second argument. This function should return an object with the keys `newFileContent` and `newFilename`. diff --git a/scripts/fileProcessor/main.mjs b/scripts/fileProcessor/main.mjs new file mode 100644 index 00000000000..a802b8bd386 --- /dev/null +++ b/scripts/fileProcessor/main.mjs @@ -0,0 +1,76 @@ +import arg from "arg"; +import fs from "fs/promises"; +import { globby } from "globby"; +import { dirname } from "path"; +import { fileURLToPath } from "url"; + +const args = arg({ + "--files": [String], + "--processor": [String], + + "-f": "--files", + "-p": "--processor", +}); + +const __dirname = dirname(fileURLToPath(import.meta.url)); + +const processFiles = async () => { + const paths = await globby(args["--files"]); + + console.log(`Processing ${paths.length} files`); + + paths.forEach(processSingleFile); +}; + +const processSingleFile = async (filename) => { + console.log(`Processing ${filename}`); + + // run the processor scripts + const { newFilename, newContent } = await runProcessorsForFile( + filename, + await fs.readFile(filename, "utf8"), + ); + + if (newFilename != filename) { + console.log(`Writing ${newFilename} (previously ${filename})`); + } else { + console.log(`Writing ${newFilename}`); + } + + fs.writeFile(newFilename, newContent) + .catch((err) => { + console.error(err); + process.exit(1); + }) + .then(() => { + // if the filename has changed, then remove the old one + if (newFilename != filename) { + console.log(`Removing ${filename}`); + + fs.rm(filename).catch((err) => { + console.error(err); + process.exit(1); + }); + } + }); +}; + +const runProcessorsForFile = async (filename, content) => { + let newFilename = filename; + let newContent = content; + + for (const index in args["--processor"]) { + await import( + `${__dirname}/processors/${args["--processor"][index]}.mjs` + ).then(async (module) => { + const output = await module.process(newFilename, newContent); + + newFilename = output.newFilename; + newContent = output.newContent; + }); + } + + return { newFilename, newContent }; +}; + +processFiles(); diff --git a/scripts/fileProcessor/package.json b/scripts/fileProcessor/package.json new file mode 100644 index 00000000000..11dc124e8f2 --- /dev/null +++ b/scripts/fileProcessor/package.json @@ -0,0 +1,20 @@ +{ + "name": "fileprocessor", + "version": "1.0.0", + "description": "", + "main": "index.js", + "scripts": { + "test": "echo \"Error: no test specified\" && exit 1" + }, + "keywords": [], + "author": "", + "license": "ISC", + "dependencies": { + "arg": "^5.0.1", + "globby": "^12.0.1", + "js-yaml": "^4.1.0" + }, + "devDependencies": { + "prettier": "^2.3.2" + } +} diff --git a/scripts/fileProcessor/processors/cnp/add-frontmatters.mjs b/scripts/fileProcessor/processors/cnp/add-frontmatters.mjs new file mode 100644 index 00000000000..05ad6d07a2c --- /dev/null +++ b/scripts/fileProcessor/processors/cnp/add-frontmatters.mjs @@ -0,0 +1,69 @@ +import fs from "fs/promises"; +import yaml from "js-yaml"; + +export const process = async (filename, content) => { + const trimmedContent = content.trim(); + if (trimmedContent.charAt(0) !== "#") { + console.warn( + "File does not begin with title - frontmatter will not be valid: " + + filename, + ); + } + + const endOfFirstLine = trimmedContent.indexOf("\n"); + + // Get the first line of content, which should be the header. + // This will exclude the very first character, which should be '#' + const header = trimmedContent.slice(1, endOfFirstLine).trim(); + + // add the frontmatter to the file. This will replace the first line of the file. + let newContent = await getFrontmatter(header, filename); + newContent = newContent + trimmedContent.slice(endOfFirstLine); + + return { + newFilename: filename, + newContent, + }; +}; + +const getFrontmatter = async (header, filename) => { + let frontmatter = `--- +title: '${header}' +originalFilePath: '${filename}' +product: 'Cloud Native Operator' +`; + + if (filename.slice(-8) === "index.md") { + frontmatter = await addIndexFrontmatterSection(frontmatter); + } + + return frontmatter + "---"; +}; + +const addIndexFrontmatterSection = async (frontmatter) => { + let modifiedFrontmatter = + frontmatter + + `indexCards: none +directoryDefaults: + prevNext: true + iconName: kubernetes + +navigation: +`; + + // read the mkdocs.yml file to figure out the nav entries for the frontmatter + const mkdocsYaml = yaml.load( + await fs.readFile("mkdocs.yml", { encoding: "utf8" }), + ); + mkdocsYaml.nav.forEach((line) => { + // make sure file extensions are stripped off. + modifiedFrontmatter = `${modifiedFrontmatter} - ${line.slice(0, -3)}\n`; + + // Make sure the interactive demo page is included in the right spot. + if (line === "quickstart.md") { + modifiedFrontmatter = modifiedFrontmatter + " - interactive_demo\n"; + } + }); + + return modifiedFrontmatter; +}; diff --git a/scripts/fileProcessor/processors/cnp/add-quickstart-content.mjs b/scripts/fileProcessor/processors/cnp/add-quickstart-content.mjs new file mode 100644 index 00000000000..b8c4040a2f2 --- /dev/null +++ b/scripts/fileProcessor/processors/cnp/add-quickstart-content.mjs @@ -0,0 +1,20 @@ +const additionalContent = ` + +!!! Tip "Live demonstration" + Don't want to install anything locally just yet? Try a demonstration directly in your browser: + + [Cloud Native PostgreSQL Operator Interactive Quickstart](interactive_demo) + +`; + +const paragraphDelimiter = "\n\n"; + +export const process = (filename, content) => { + const paragraphs = content.split(paragraphDelimiter); + paragraphs.splice(2, 0, additionalContent); + + return { + newFilename: filename, + newContent: paragraphs.join(paragraphDelimiter), + }; +}; diff --git a/scripts/fileProcessor/processors/cnp/rename-to-mdx.mjs b/scripts/fileProcessor/processors/cnp/rename-to-mdx.mjs new file mode 100644 index 00000000000..f31d94d5552 --- /dev/null +++ b/scripts/fileProcessor/processors/cnp/rename-to-mdx.mjs @@ -0,0 +1,6 @@ +export const process = (filename, content) => { + return { + newFilename: filename.replace(/\.md$/, ".mdx"), + newContent: content, + }; +}; diff --git a/scripts/fileProcessor/processors/cnp/update-yaml-links.mjs b/scripts/fileProcessor/processors/cnp/update-yaml-links.mjs new file mode 100644 index 00000000000..319f44af33f --- /dev/null +++ b/scripts/fileProcessor/processors/cnp/update-yaml-links.mjs @@ -0,0 +1,18 @@ +export const process = (filename, content) => { + const newContent = content.split("\n").map(rewriteLink).join("\n"); + + return { + newFilename: filename, + newContent, + }; +}; + +const rewriteLink = (line) => { + const regex = /\[.+\]\((.+)\.yaml\)/; + const match = line.match(regex); + if (match === null) { + return line; + } + + return line.replace(match[1], match[1].replace("samples/", "../samples/")); +}; diff --git a/scripts/fileProcessor/processors/example.mjs b/scripts/fileProcessor/processors/example.mjs new file mode 100644 index 00000000000..dcc3a0f71e0 --- /dev/null +++ b/scripts/fileProcessor/processors/example.mjs @@ -0,0 +1,6 @@ +export const process = (filename, content) => { + return { + newFilename: filename, + newContent: content, + }; +}; diff --git a/scripts/source/process-cnp-docs.sh b/scripts/source/process-cnp-docs.sh new file mode 100755 index 00000000000..87a6d2ba1b8 --- /dev/null +++ b/scripts/source/process-cnp-docs.sh @@ -0,0 +1,28 @@ +#!/bin/bash + +if [ -z $1 ] || [ -z $2 ] +then + echo "the path to the source and destination checkouts must be provided" + exit 1 +fi + +# convert inputs to actual directory names, in case a relative path is passed in. +SOURCE_CHECKOUT=`cd $1 && pwd` +DESTINATION_CHECKOUT=`cd $2 && pwd` + +cd $DESTINATION_CHECKOUT/scripts/fileProcessor +npm install --production + +cd $SOURCE_CHECKOUT/docs + +node $DESTINATION_CHECKOUT/scripts/fileProcessor/main.mjs \ + -f "src/**/quickstart.md" \ + -p cnp/add-quickstart-content + +node $DESTINATION_CHECKOUT/scripts/fileProcessor/main.mjs \ + -f "src/**/*.md" \ + -p "cnp/update-yaml-links" \ + -p "cnp/add-frontmatters" \ + -p "cnp/rename-to-mdx" + +cp -rf src/* $DESTINATION_CHECKOUT/advocacy_docs/kubernetes/cloud_native_postgresql/ diff --git a/scripts/source/source_cloud_native_operator.py b/scripts/source/source_cloud_native_operator.py deleted file mode 100644 index f1c58c8449e..00000000000 --- a/scripts/source/source_cloud_native_operator.py +++ /dev/null @@ -1,106 +0,0 @@ -import os -import re -from pathlib import Path - -STANDARD_FRONTMATTER = """--- -title: '{0}' -originalFilePath: '{1}' -product: 'Cloud Native Operator'{2} ---- -""" - -INDEX_FRONTMATTER = """ -indexCards: none -directoryDefaults: - prevNext: true - iconName: kubernetes - -navigation: -{0}""" - - -def rewrite_yaml_links(line): - match = re.search(r"\[.+\]\((.+)\)", line) - if match and match[1] and match[1].endswith(".yaml"): - return line.replace(match[1], match[1].replace("samples/", "../samples/")) - return line - - -def index_frontmatter(): - nav = [] - with open("temp_kubernetes/docs/mkdocs.yml") as mkdocs: - readingNav = False - for line in mkdocs: - if "-" not in line: - readingNav = False - if line.startswith("nav"): - readingNav = True - elif readingNav: - nav.append(line.replace(".md", "")) - if "quickstart.md" in line: - nav.append(" - interactive_demo\n") - - return INDEX_FRONTMATTER.format("".join(nav)) - - -def process_md(file_path): - new_file_path = file_path.with_suffix(".mdx") - - with open(new_file_path, "w") as new_file: - with open(file_path, "r") as md_file: - copying = False - quickstart = file_path.name == "quickstart.md" - paragraph = 0 - gh_relative_path = Path("src/") / file_path.relative_to("temp_kubernetes/build/") - - for line in md_file: - if quickstart and not line.strip(): - paragraph = paragraph + 1 - - if paragraph == 2: - line = """ - - -!!! Tip "Live demonstration" - Don't want to install anything locally just yet? Try a demonstration directly in your browser: - - [Cloud Native PostgreSQL Operator Interactive Quickstart](interactive_demo) - - -""" - elif copying: - line = rewrite_yaml_links(line) - elif line.startswith("#"): - copying = True - line = STANDARD_FRONTMATTER.format( - re.sub(r"#+ ", "", line).strip(), - gh_relative_path, - index_frontmatter() - if new_file_path.name == "index.mdx" - else "", - ) - elif not line.strip(): - line = "" - else: - print("File does not begin with title - frontmatter will not be valid: " + file_path) - - new_file.write(line) - - os.remove(file_path) - - -def source_cloud_native_postgresql_docs(): - os.system("rm -r temp_kubernetes/build") - os.system("cp -r temp_kubernetes/docs/src temp_kubernetes/build") - os.system( - "cp -r merge_sources/kubernetes/cloud_native_postgresql/* temp_kubernetes/build" - ) - - print("Processing cloud_native_postgresql...") - files = Path("temp_kubernetes/build/").glob("**/*.md") - for file_path in files: - process_md(file_path) - - -if __name__ == "__main__": - source_cloud_native_postgresql_docs()