Skip to content

Commit

Permalink
Merge branch 'main' into fix/issue_276_and_add_pattern
Browse files Browse the repository at this point in the history
  • Loading branch information
dmaresma committed Aug 9, 2024
2 parents 3c51743 + 363d27a commit 9e4a9c0
Show file tree
Hide file tree
Showing 13 changed files with 203 additions and 126 deletions.
5 changes: 4 additions & 1 deletion CHANGELOG.txt
Original file line number Diff line number Diff line change
Expand Up @@ -10,10 +10,13 @@
2.

**v1.5.2**
### Improvements
#### MySQL
1. Added support for COLLATE - https://github.com/xnuinside/simple-ddl-parser/pull/266/files

### Fixes

1. In Snowflake Fix unexpected behaviour when file_format name given - https://github.com/xnuinside/simple-ddl-parser/issues/273
2.

**v1.5.1**
### Improvements
Expand Down
2 changes: 1 addition & 1 deletion CONTRIBUTING.md
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ There is a lot of way how you can contribute to any project (not only in this),

- Add more tests to the code

It's always needed, I have only functional tests right now, so if you want to help wiht covering library for example, with unittests - please welcome, open the PR.
It's always needed, I have only functional tests right now, so if you want to help with covering library for example, with unittests - please welcome, open the PR.

For ANY type of contributinon I will really really appritiate. Each of them are important.

Expand Down
12 changes: 8 additions & 4 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -489,14 +489,18 @@ for help with debugging & testing support for BigQuery dialect DDLs:
* https://github.com/kalyan939

## Changelog

**v1.5.2**

**v1.5.3**
### Fixes

1. Fix Snowflake unexpected behaviour when file_format name given - https://github.com/xnuinside/simple-ddl-parser/issues/273
1. In Snowflake Fix unexpected behaviour when file_format name given - https://github.com/xnuinside/simple-ddl-parser/issues/273
2.

**v1.5.2**
### Improvements
#### MySQL
1. Added support for COLLATE - https://github.com/xnuinside/simple-ddl-parser/pull/266/files


**v1.5.1**
### Improvements
#### MySQL
Expand Down
26 changes: 22 additions & 4 deletions docs/README.rst
Original file line number Diff line number Diff line change
Expand Up @@ -96,10 +96,8 @@ How to use
Extract additional information from HQL (& other dialects)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^

In some dialects like HQL there is a lot of additional information about table like, fore example, is it external table,
STORED AS, location & etc. This property will be always empty in 'classic' SQL DB like PostgreSQL or MySQL
and this is the reason, why by default this information is 'hidden'.
Also some fields are hidden in HQL, because they are simple not exists in HIVE, for example 'deferrable_initially'
In some dialects like HQL there is a lot of additional information about table like, fore example, is it external table, STORED AS, location & etc. This property will be always empty in 'classic' SQL DB like PostgreSQL or MySQL and this is the reason, why by default this information are 'hidden'.
Also some fields hidden in HQL, because they are simple not exists in HIVE, for example 'deferrable_initially'
To get this 'hql' specific details about table in output please use 'output_mode' argument in run() method.

example:
Expand Down Expand Up @@ -557,6 +555,26 @@ for help with debugging & testing support for BigQuery dialect DDLs:
Changelog
---------

**v1.5.3**

Fixes
^^^^^


#. In Snowflake Fix unexpected behaviour when file_format name given - https://github.com/xnuinside/simple-ddl-parser/issues/273
2.

**v1.5.2**

Improvements
^^^^^^^^^^^^

MySQL
~~~~~


#. Added support for COLLATE - https://github.com/xnuinside/simple-ddl-parser/pull/266/files

**v1.5.1**

Improvements
Expand Down
15 changes: 13 additions & 2 deletions simple_ddl_parser/__init__.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,17 @@
from simple_ddl_parser.ddl_parser import DDLParser, DDLParserError, parse_from_file
from simple_ddl_parser.ddl_parser import (
DDLParser,
DDLParserError,
SimpleDDLParserException,
parse_from_file,
)
from simple_ddl_parser.output.dialects import dialect_by_name

supported_dialects = dialect_by_name

__all__ = ["DDLParser", "parse_from_file", "DDLParserError", "supported_dialects"]
__all__ = [
"DDLParser",
"parse_from_file",
"DDLParserError",
"supported_dialects",
"SimpleDDLParserException",
]
8 changes: 6 additions & 2 deletions simple_ddl_parser/ddl_parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,11 +16,15 @@
Snowflake,
SparkSQL,
)
# "DDLParserError" is an alias for backward compatibility
from simple_ddl_parser.exception import SimpleDDLParserException as DDLParserError
from simple_ddl_parser.exception import SimpleDDLParserException
from simple_ddl_parser.parser import Parser


# "DDLParserError" is an alias for backward compatibility
class DDLParserError(SimpleDDLParserException):
pass


class Dialects(
SparkSQL,
Snowflake,
Expand Down
19 changes: 10 additions & 9 deletions simple_ddl_parser/parsetab.py

Large diffs are not rendered by default.

10 changes: 5 additions & 5 deletions simple_ddl_parser/utils.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import re
from typing import List, Tuple, Optional, Union, Any
from typing import Any, List, Optional, Tuple, Union

# Backward compatibility import
from simple_ddl_parser.exception import SimpleDDLParserException
Expand All @@ -10,10 +10,10 @@
"find_first_unpair_closed_par",
"normalize_name",
"get_table_id",
"SimpleDDLParserException"
"SimpleDDLParserException",
]

_parentheses = ('(', ')')
_parentheses = ("(", ")")


def remove_par(p_list: List[Union[str, Any]]) -> List[Union[str, Any]]:
Expand Down Expand Up @@ -59,9 +59,9 @@ def find_first_unpair_closed_par(str_: str) -> Optional[int]:
"""
count_open = 0
for i, char in enumerate(str_):
if char == '(':
if char == "(":
count_open += 1
if char == ')':
if char == ")":
count_open -= 1
if count_open < 0:
return i
Expand Down
3 changes: 1 addition & 2 deletions tests/dialects/test_mysql.py
Original file line number Diff line number Diff line change
Expand Up @@ -627,8 +627,7 @@ def test_table_properties():
"schema": None,
"table_name": "`posts`",
"tablespace": None,
"table_properties": {"collate": "utf8mb4_unicode_ci"}
"table_properties": {"collate": "utf8mb4_unicode_ci"},
}
]
assert result == expected

3 changes: 2 additions & 1 deletion tests/dialects/test_snowflake.py
Original file line number Diff line number Diff line change
Expand Up @@ -1114,6 +1114,7 @@ def test_external_table_with_nullif():

assert result == expected


def test_external_table_file_format_without_parenthesis():
ddl = """create or replace external table if not exists ${database_name}.MySchemaName.MyTableName(
"Filename" VARCHAR(16777216) AS (METADATA$FILENAME))
Expand Down Expand Up @@ -1154,7 +1155,7 @@ def test_external_table_file_format_without_parenthesis():
"table_properties": {
"project": "${database_name}",
"auto_refresh": False,
"file_format":"MyFormatName",
"file_format": "MyFormatName",
},
"replace": True,
"location": "@ADL_DH_DL_PTS/",
Expand Down
85 changes: 52 additions & 33 deletions tests/test_checks.py
Original file line number Diff line number Diff line change
Expand Up @@ -179,39 +179,58 @@ def test_check_function_with_schema():

result = DDLParser(ddl).run(group_by_type=True)
expected = {
'tables': [
{'alter': {},
'checks': [{'constraint_name': 'my_constraint', 'statement': 'v2.my_function(name) IS TRUE'}],
'columns': [{'check': None,
'default': 'public.getId()',
'name': 'entity_id',
'nullable': False,
'references': None,
'size': None,
'type': 'UUID',
'unique': False},
{'check': None,
'default': None,
'name': 'name',
'nullable': True,
'references': None,
'size': None,
'type': 'TEXT',
'unique': False}],
'constraints': {'checks': [{'constraint_name': 'my_constraint',
'statement': 'v2.my_function(name) IS '
'TRUE'}]},
'index': [],
'partitioned_by': [],
'primary_key': ['entity_id'],
'schema': None,
'table_name': 'foo',
'tablespace': None}],
'types': [],
'ddl_properties': [],
'domains': [],
'schemas': [],
'sequences': [],
"tables": [
{
"alter": {},
"checks": [
{
"constraint_name": "my_constraint",
"statement": "v2.my_function(name) IS TRUE",
}
],
"columns": [
{
"check": None,
"default": "public.getId()",
"name": "entity_id",
"nullable": False,
"references": None,
"size": None,
"type": "UUID",
"unique": False,
},
{
"check": None,
"default": None,
"name": "name",
"nullable": True,
"references": None,
"size": None,
"type": "TEXT",
"unique": False,
},
],
"constraints": {
"checks": [
{
"constraint_name": "my_constraint",
"statement": "v2.my_function(name) IS " "TRUE",
}
]
},
"index": [],
"partitioned_by": [],
"primary_key": ["entity_id"],
"schema": None,
"table_name": "foo",
"tablespace": None,
}
],
"types": [],
"ddl_properties": [],
"domains": [],
"schemas": [],
"sequences": [],
}

assert result == expected
Loading

0 comments on commit 9e4a9c0

Please sign in to comment.