Skip to content

Commit

Permalink
new fixes for bunch of issues
Browse files Browse the repository at this point in the history
  • Loading branch information
xnuinside committed Oct 31, 2022
1 parent 8f7b3e6 commit 762fea7
Show file tree
Hide file tree
Showing 6 changed files with 227 additions and 4 deletions.
8 changes: 8 additions & 0 deletions CHANGELOG.txt
Original file line number Diff line number Diff line change
@@ -1,3 +1,11 @@
**v0.28.1**
Imporvements:
1. Lines started with INSERT INTO statement now successfully ignored by parser (so you can keep them in ddl - they will be just skiped)

Fixes:
1. Important fix for multiline comments


**v0.28.0**

Important Changes (Pay attention):
Expand Down
8 changes: 8 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -484,6 +484,14 @@ https://github.com/swiatek25


## Changelog
**v0.28.1**
Imporvements:
1. Lines started with INSERT INTO statement now successfully ignored by parser (so you can keep them in ddl - they will be just skiped)

Fixes:
1. Important fix for multiline comments


**v0.28.0**

Important Changes (Pay attention):
Expand Down
11 changes: 11 additions & 0 deletions docs/README.rst
Original file line number Diff line number Diff line change
Expand Up @@ -552,6 +552,17 @@ https://github.com/swiatek25
Changelog
---------

**v0.28.1**
Imporvements:


#. Lines started with INSERT INTO statement now successfully ignored by parser (so you can keep them in ddl - they will be just skiped)

Fixes:


#. Important fix for multiline comments

**v0.28.0**

Important Changes (Pay attention):
Expand Down
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[tool.poetry]
name = "simple-ddl-parser"
version = "0.28.0"
version = "0.28.1"
description = "Simple DDL Parser to parse SQL & dialects like HQL, TSQL (MSSQL), Oracle, AWS Redshift, Snowflake, MySQL, PostgreSQL, etc ddl files to json/python dict with full information about columns: types, defaults, primary keys, etc.; sequences, alters, custom types & other entities from ddl."
authors = ["Iuliia Volkova <[email protected]>"]
license = "MIT"
Expand Down
9 changes: 6 additions & 3 deletions simple_ddl_parser/parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -95,9 +95,12 @@ def __init__(
self.comments = []

def catch_comment_or_process_line(self, code_line: str) -> str:
if self.multi_line_comment and CL_COM not in self.line:
if self.multi_line_comment:
self.comments.append(self.line)
if CL_COM in self.line:
self.multi_line_comment = False
return ''

elif not (
self.line.strip().startswith(MYSQL_COM)
or self.line.strip().startswith(IN_COM)
Expand Down Expand Up @@ -218,7 +221,7 @@ def check_new_statement_start(self, line: str) -> bool:
return self.new_statement

def check_line_on_skip_words(self) -> bool:
skip_regex = r"^(GO|USE)\b"
skip_regex = r"^(GO|USE|INSERT)\b"

self.skip = False

Expand Down Expand Up @@ -272,7 +275,7 @@ def process_line(
final_line = self.line.endswith(";") and not self.set_was_in_line
self.add_line_to_statement()

if final_line or self.new_statement:
if (final_line or self.new_statement) and self.statement:
# end of sql operation, remove ; from end of line
self.statement = self.statement[:-1]
elif last_line and not self.skip:
Expand Down
193 changes: 193 additions & 0 deletions tests/test_simple_ddl_parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -2897,3 +2897,196 @@ def test_floats():
'types': []}

assert expected == results


def test_fix_multiline_comments_not_joined_with_table():
result = DDLParser(
"""/************************
@Author: Azat Erol
Always happy coding!
************************/
CREATE TABLE Kunde (
KundenID INT PRIMARY KEY,
KundenName VARCHAR(40),
AbteilungID INT
FOREIGN KEY(AbteilungID) REFERENCES Abteilung(AbteilungID) ON DELETE SET NULL
);
""", normalize_names=True).run(group_by_type=True)
expected = {'comments': ['***********************',
'@Author: Azat Erol',
'Always happy coding!',
'************************/'],
'ddl_properties': [],
'domains': [],
'schemas': [],
'sequences': [],
'tables': [{'alter': {},
'checks': [],
'columns': [{'check': None,
'default': None,
'name': 'KundenID',
'nullable': False,
'references': None,
'size': None,
'type': 'INT',
'unique': False},
{'check': None,
'default': None,
'name': 'KundenName',
'nullable': True,
'references': None,
'size': 40,
'type': 'VARCHAR',
'unique': False},
{'check': None,
'default': None,
'name': 'AbteilungID',
'nullable': True,
'references': {'column': 'AbteilungID',
'deferrable_initially': None,
'on_delete': 'SET',
'on_update': None,
'schema': None,
'table': 'Abteilung'},
'size': None,
'type': 'INT',
'unique': False}],
'index': [],
'partitioned_by': [],
'primary_key': ['KundenID'],
'schema': None,
'table_name': 'Kunde',
'tablespace': None}],
'types': []}
assert expected == result


def test_inserts_skipped_validly():
result = DDLParser(
"""
INSERT INTO "autofill_profiles" VALUES('Jim Johnson, 789 4th Street',1,'Jim','','Johnson','[email protected]','Acme Inc.','789 4th Street','Apt. #4','San Francisco','CA','94102','USA','4155512255','4155512233',1287508123);
INSERT INTO "autofill_profiles" VALUES('Billy Jean, 1 Ghost Blvd.',3,'Billy','','Jean','[email protected]','Thriller Inc.','1 Ghost Blvd.','','Santa Monica','CA','98990','USA','4431110000','',1287508123);
CREATE TABLE credit_cards ( label VARCHAR, unique_id INTEGER PRIMARY KEY, name_on_card VARCHAR, type VARCHAR, card_number VARCHAR, expiration_month INTEGER, expiration_year INTEGER, verification_code VARCHAR, billing_address VARCHAR, shipping_address VARCHAR, card_number_encrypted BLOB, verification_code_encrypted BLOB, date_modified INTEGER NOT NULL DEFAULT 0);
COMMIT;
""", normalize_names=True).run(group_by_type=True)
expected = {'ddl_properties': [],
'domains': [],
'schemas': [],
'sequences': [],
'tables': [{'alter': {},
'checks': [],
'columns': [{'check': None,
'default': None,
'name': 'label',
'nullable': True,
'references': None,
'size': None,
'type': 'VARCHAR',
'unique': False},
{'check': None,
'default': None,
'name': 'unique_id',
'nullable': False,
'references': None,
'size': None,
'type': 'INTEGER',
'unique': False},
{'check': None,
'default': None,
'name': 'name_on_card',
'nullable': True,
'references': None,
'size': None,
'type': 'VARCHAR',
'unique': False},
{'check': None,
'default': None,
'name': 'type',
'nullable': True,
'references': None,
'size': None,
'type': 'VARCHAR',
'unique': False},
{'check': None,
'default': None,
'name': 'card_number',
'nullable': True,
'references': None,
'size': None,
'type': 'VARCHAR',
'unique': False},
{'check': None,
'default': None,
'name': 'expiration_month',
'nullable': True,
'references': None,
'size': None,
'type': 'INTEGER',
'unique': False},
{'check': None,
'default': None,
'name': 'expiration_year',
'nullable': True,
'references': None,
'size': None,
'type': 'INTEGER',
'unique': False},
{'check': None,
'default': None,
'name': 'verification_code',
'nullable': True,
'references': None,
'size': None,
'type': 'VARCHAR',
'unique': False},
{'check': None,
'default': None,
'name': 'billing_address',
'nullable': True,
'references': None,
'size': None,
'type': 'VARCHAR',
'unique': False},
{'check': None,
'default': None,
'name': 'shipping_address',
'nullable': True,
'references': None,
'size': None,
'type': 'VARCHAR',
'unique': False},
{'check': None,
'default': None,
'name': 'card_number_encrypted',
'nullable': True,
'references': None,
'size': None,
'type': 'BLOB',
'unique': False},
{'check': None,
'default': None,
'name': 'verification_code_encrypted',
'nullable': True,
'references': None,
'size': None,
'type': 'BLOB',
'unique': False},
{'check': None,
'default': 0,
'name': 'date_modified',
'nullable': False,
'references': None,
'size': None,
'type': 'INTEGER',
'unique': False}],
'index': [],
'partitioned_by': [],
'primary_key': ['unique_id'],
'schema': None,
'table_name': 'credit_cards',
'tablespace': None}],
'types': []}
assert expected == result

0 comments on commit 762fea7

Please sign in to comment.