Skip to content

Commit

Permalink
Merge pull request #582 from YAtOff/fix/parse-default-column-charset
Browse files Browse the repository at this point in the history
Fix an issue in parsing default column charset
  • Loading branch information
sean-k1 authored Dec 1, 2023
2 parents 82240b7 + 4b31b5b commit bedcad7
Show file tree
Hide file tree
Showing 2 changed files with 48 additions and 7 deletions.
9 changes: 6 additions & 3 deletions pymysqlreplication/row_event.py
Original file line number Diff line number Diff line change
Expand Up @@ -1005,14 +1005,17 @@ def _parsed_column_charset_by_default_charset(
column_type_detect_function,
):
column_charset = []
position = 0
for i in range(self.column_count):
column_type = self.columns[i].type
if not column_type_detect_function(column_type, dbms=self.dbms):
continue
elif i not in column_charset_collation.keys():
column_charset.append(default_charset_collation)
else:
column_charset.append(column_charset_collation[i])
if position not in column_charset_collation.keys():
column_charset.append(default_charset_collation)
else:
column_charset.append(column_charset_collation[position])
position += 1

return column_charset

Expand Down
46 changes: 42 additions & 4 deletions pymysqlreplication/tests/test_basic.py
Original file line number Diff line number Diff line change
Expand Up @@ -420,6 +420,44 @@ def test_minimal_image_update_row_event(self):
self.assertEqual(event.rows[0]["after_values"]["id"], None)
self.assertEqual(event.rows[0]["after_values"]["data"], "World")

def test_default_charset_parsing(self):
"""
Here, we want the database to include the binary charset into
the DEFAULT_CHARSET optional metadata block.
Also, we are adding an int field and two text fields to force
a difference in the index of the blob column in the table
and in the list of columns that have charset.
"""
query = """CREATE TABLE test (
id INT NOT NULL AUTO_INCREMENT,
text1 VARCHAR(255) NOT NULL,
text2 VARCHAR(255) NOT NULL,
data LONGBLOB NOT NULL,
PRIMARY KEY (id)
) DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci;"""
self.execute(query)
query = "INSERT INTO test (text1, text2, data) VALUES(%s, %s, %s)"
self.execute_with_args(query, ("text", "text", b"data"))
self.execute("COMMIT")

self.assertIsInstance(self.stream.fetchone(), RotateEvent)
self.assertIsInstance(self.stream.fetchone(), FormatDescriptionEvent)
# QueryEvent for the Create Table
self.assertIsInstance(self.stream.fetchone(), QueryEvent)
# QueryEvent for the BEGIN
self.assertIsInstance(self.stream.fetchone(), QueryEvent)

event = self.stream.fetchone()
self.assertIsInstance(event, TableMapEvent)
if event.table_map[event.table_id].column_name_flag:
columns = {c.name: c for c in event.columns}
assert columns["text1"].character_set_name == "utf8"
assert columns["text1"].collation_name.startswith("utf8")
assert columns["text2"].character_set_name == "utf8"
assert columns["text2"].collation_name.startswith("utf8")
assert columns["data"].character_set_name == "binary"
assert columns["data"].collation_name == "binary"

def test_log_pos(self):
query = "CREATE TABLE test (id INT NOT NULL AUTO_INCREMENT, data VARCHAR (50) NOT NULL, PRIMARY KEY (id))"
self.execute(query)
Expand Down Expand Up @@ -1859,7 +1897,7 @@ def setUp(self):
def test_json_partial_update(self):
create_query = "CREATE TABLE test_json_v2 (id INT, c JSON,PRIMARY KEY (id)) ;"
column_add_query = "ALTER TABLE test_json_v2 ADD COLUMN d JSON DEFAULT NULL, ADD COLUMN e JSON DEFAULT NULL;"
insert_query = """INSERT INTO test_json_v2 VALUES
insert_query = """INSERT INTO test_json_v2 VALUES
(101
,'{"a":"aaaaaaaaaaaaa", "c":"ccccccccccccccc", "ab":["abababababababa", "babababababab"]}'
,'{"a":"aaaaaaaaaaaaa", "c":"ccccccccccccccc", "ab":["abababababababa", "babababababab"]}'
Expand Down Expand Up @@ -1896,7 +1934,7 @@ def test_json_partial_update_column_value_none(self):
drop_table_if_exists_query = "DROP TABLE IF EXISTS test_json_v2;"
create_query = "CREATE TABLE test_json_v2 (id INT, c JSON,PRIMARY KEY (id)) ;"
column_add_query = "ALTER TABLE test_json_v2 ADD COLUMN d JSON DEFAULT NULL, ADD COLUMN e JSON DEFAULT NULL;"
insert_query = """INSERT INTO test_json_v2 VALUES
insert_query = """INSERT INTO test_json_v2 VALUES
(101
,'{"a":"aaaaaaaaaaaaa", "c":"ccccccccccccccc", "ab":["abababababababa", "babababababab"]}'
,'{"a":"aaaaaaaaaaaaa", "c":"ccccccccccccccc", "ab":["abababababababa", "babababababab"]}'
Expand Down Expand Up @@ -1934,7 +1972,7 @@ def test_json_partial_update_json_remove(self):
drop_table_if_exists_query = "DROP TABLE IF EXISTS test_json_v2;"
create_query = "CREATE TABLE test_json_v2 (id INT, c JSON,PRIMARY KEY (id)) ;"
column_add_query = "ALTER TABLE test_json_v2 ADD COLUMN d JSON DEFAULT NULL, ADD COLUMN e JSON DEFAULT NULL;"
insert_query = """INSERT INTO test_json_v2 VALUES
insert_query = """INSERT INTO test_json_v2 VALUES
(101
,'{"a":"aaaaaaaaaaaaa", "c":"ccccccccccccccc", "ab":["abababababababa", "babababababab"]}'
,'{"a":"aaaaaaaaaaaaa", "c":"ccccccccccccccc", "ab":["abababababababa", "babababababab"]}'
Expand Down Expand Up @@ -1974,7 +2012,7 @@ def test_json_partial_update_two_column(self):
drop_table_if_exists_query = "DROP TABLE IF EXISTS test_json_v2;"
create_query = "CREATE TABLE test_json_v2 (id INT, c JSON,PRIMARY KEY (id)) ;"
column_add_query = "ALTER TABLE test_json_v2 ADD COLUMN d JSON DEFAULT NULL, ADD COLUMN e JSON DEFAULT NULL;"
insert_query = """INSERT INTO test_json_v2 VALUES
insert_query = """INSERT INTO test_json_v2 VALUES
(101
,'{"a":"aaaaaaaaaaaaa", "c":"ccccccccccccccc", "ab":["abababababababa", "babababababab"]}'
,'{"a":"aaaaaaaaaaaaa", "c":"ccccccccccccccc", "ab":["abababababababa", "babababababab"]}'
Expand Down

0 comments on commit bedcad7

Please sign in to comment.