Skip to content

Commit

Permalink
Fix handling huge int numbers via PG numeric, close #38
Browse files Browse the repository at this point in the history
  • Loading branch information
exAspArk committed Jan 30, 2025
1 parent d7907ee commit eaf9883
Show file tree
Hide file tree
Showing 4 changed files with 65 additions and 2 deletions.
6 changes: 5 additions & 1 deletion scripts/test-data-types.sql
Original file line number Diff line number Diff line change
Expand Up @@ -19,11 +19,12 @@ CREATE TABLE test_table (
int2_column INT2,
int4_column INT4,
int8_column INT8,
hugeint_column NUMERIC(20, 0),
xid_column XID,
xid8_column XID8,
float4_column FLOAT4,
float8_column FLOAT8,
numeric_column NUMERIC(10, 2),
numeric_column NUMERIC(40, 2),
date_column DATE,
time_column TIME,
time_ms_column TIME(3),
Expand Down Expand Up @@ -58,6 +59,7 @@ INSERT INTO test_table (
int2_column,
int4_column,
int8_column,
hugeint_column,
xid_column,
xid8_column,
float4_column,
Expand Down Expand Up @@ -95,6 +97,7 @@ INSERT INTO test_table (
32767::INT2, -- int2_column
2147483647::INT4, -- int4_column
9223372036854775807::INT8, -- int8_column
10000000000000000000, -- hugeint_column
'4294967295'::XID, -- xid_column
'18446744073709551615'::XID8, -- xid8_column
3.14::FLOAT4, -- float4_column
Expand Down Expand Up @@ -132,6 +135,7 @@ INSERT INTO test_table (
-32767::INT2, -- int2_column
NULL, -- int4_column
-9223372036854775807::INT8, -- int8_column
NULL, -- hugeint_column
NULL, -- xid_column
NULL, -- xid8_column
'NaN', -- float4_column
Expand Down
10 changes: 10 additions & 0 deletions src/init_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -62,6 +62,14 @@ var TEST_PG_SCHEMA_COLUMNS = []PgSchemaColumn{
NumericScale: "0",
Namespace: "pg_catalog",
},
{
ColumnName: "hugeint_column",
DataType: "numeric",
UdtName: "numeric",
NumericPrecision: "20", // Will be capped to 38
NumericScale: "0",
Namespace: "pg_catalog",
},
{
ColumnName: "xid_column",
DataType: "xid",
Expand Down Expand Up @@ -263,6 +271,7 @@ var TEST_LOADED_ROWS = [][]string{
"32767", // int2_column
"2147483647", // int4_column
"9223372036854775807", // int8_column
"10000000000000000000", // hugeint_column
"4294967295", // xid_column
"18446744073709551615", // xid8_column
"3.14", // float4_column
Expand Down Expand Up @@ -302,6 +311,7 @@ var TEST_LOADED_ROWS = [][]string{
"-32767", // int2_column
PG_NULL_STRING, // int4_column
"-9223372036854775807", // int8_column
PG_NULL_STRING, // hugeint_column
PG_NULL_STRING, // xid_column
PG_NULL_STRING, // xid8_column
"NaN", // float4_column
Expand Down
41 changes: 40 additions & 1 deletion src/query_handler.go
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@ import (
"encoding/csv"
"errors"
"fmt"
"math/big"
"strconv"
"strings"

Expand Down Expand Up @@ -118,6 +119,31 @@ func (nullUint64 NullUint64) String() string {

////////////////////////////////////////////////////////////////////////////////////////////////////

type NullBigInt struct {
Present bool
Value *big.Int
}

func (nullBigInt *NullBigInt) Scan(value interface{}) error {
if value == nil {
nullBigInt.Present = false
return nil
}

nullBigInt.Present = true
nullBigInt.Value = value.(*big.Int)
return nil
}

func (nullBigInt NullBigInt) String() string {
if nullBigInt.Present {
return fmt.Sprintf("%v", nullBigInt.Value)
}
return ""
}

////////////////////////////////////////////////////////////////////////////////////////////////////

type NullArray struct {
Present bool
Value []interface{}
Expand Down Expand Up @@ -474,6 +500,10 @@ func (queryHandler *QueryHandler) columnTypeOid(col *sql.ColumnType) uint32 {
return pgtype.XID8OID
case "UBIGINT[]":
return pgtype.XID8ArrayOID
case "HUGEINT":
return pgtype.NumericOID
case "HUGEINT[]":
return pgtype.NumericArrayOID
case "FLOAT":
return pgtype.Float4OID
case "FLOAT[]":
Expand Down Expand Up @@ -540,7 +570,7 @@ func (queryHandler *QueryHandler) generateDataRow(rows *sql.Rows, cols []*sql.Co
case "int32":
var value sql.NullInt32
valuePtrs[i] = &value
case "int64", "*big.Int":
case "int64":
var value sql.NullInt64
valuePtrs[i] = &value
case "uint32": // xid
Expand All @@ -561,6 +591,9 @@ func (queryHandler *QueryHandler) generateDataRow(rows *sql.Rows, cols []*sql.Co
case "time.Time":
var value sql.NullTime
valuePtrs[i] = &value
case "*big.Int":
var value NullBigInt
valuePtrs[i] = &value
case "duckdb.Decimal":
var value NullDecimal
valuePtrs[i] = &value
Expand Down Expand Up @@ -643,6 +676,12 @@ func (queryHandler *QueryHandler) generateDataRow(rows *sql.Rows, cols []*sql.Co
} else {
values = append(values, nil)
}
case *NullBigInt:
if value.Present {
values = append(values, []byte(value.String()))
} else {
values = append(values, nil)
}
case *NullDecimal:
if value.Present {
values = append(values, []byte(value.String()))
Expand Down
10 changes: 10 additions & 0 deletions src/query_handler_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -352,6 +352,16 @@ func TestHandleQuery(t *testing.T) {
"types": {Uint32ToString(pgtype.Int8OID)},
"values": {"-9223372036854775807"},
},
"SELECT hugeint_column FROM public.test_table WHERE hugeint_column IS NOT NULL": {
"description": {"hugeint_column"},
"types": {Uint32ToString(pgtype.NumericOID)},
"values": {"1e+19"},
},
"SELECT hugeint_column FROM public.test_table WHERE hugeint_column IS NULL": {
"description": {"hugeint_column"},
"types": {Uint32ToString(pgtype.NumericOID)},
"values": {""},
},
"SELECT xid_column FROM public.test_table WHERE xid_column IS NOT NULL": {
"description": {"xid_column"},
"types": {Uint32ToString(pgtype.XIDOID)},
Expand Down

0 comments on commit eaf9883

Please sign in to comment.