Uploaded image for project: 'Debezium'
  1. Debezium
  2. DBZ-1205

Overflowed Timestamp in Postgres Connection

    XMLWordPrintable

Details

    • Hide

      Create a postgres db and insert a row:

      CREATE TABLE public."BugTest"
      (
        id SERIAL,
        "timeField" timestamp with time zone,
        "dummyField" character varying(255),
        CONSTRAINT id PRIMARY KEY (id)
      );
      INSERT INTO public."BugTest"(
                  id, "timeField", "dummyField")
          VALUES (1, '3016-01-01 06:59:59+00', 'dummy1');
      

      Start the connector. The read operation works fine with the timestamp:

      1 {
      	"before": null,
      	"after": {
      		"test.test.BugTest": {
      			"id": 1,
      			"timeField": {
      				"string": "3016-01-01T06:59:59Z"
      			},
      			"dummyField": {
      				"string": "dummy1"
      			}
      		}
      	},
      	"source": {
      		"version": {
      			"string": "0.9.0.Final"
      		},
      		"connector": {
      			"string": "postgresql"
      		},
      		"name": "test.test",
      		"db": "test",
      		"ts_usec": {
      			"long": 1553882596288000
      		},
      		"txId": {
      			"long": 1870
      		},
      		"lsn": {
      			"long": 34543208
      		},
      		"schema": {
      			"string": "public"
      		},
      		"table": {
      			"string": "BugTest"
      		},
      		"snapshot": {
      			"boolean": true
      		},
      		"last_snapshot_record": {
      			"boolean": false
      		}
      	},
      	"op": "r",
      	"ts_ms": {
      		"long": 1553882596288
      	}
      }
      

      Change something in that row:

      UPDATE public."BugTest"
         SET "dummyField"='newdummy'
       WHERE id = 1;
      

      The connector reads a wrong timestamp:

      1 {
      	"before": {
      		"test.test.BugTest": {
      			"id": 1,
      			"timeField": null,
      			"dummyField": null
      		}
      	},
      	"after": {
      		"test.test.BugTest": {
      			"id": 1,
      			"timeField": {
      				"string": "1846-11-22T07:50:51.580896768Z"
      			},
      			"dummyField": {
      				"string": "newdummy"
      			}
      		}
      	},
      	"source": {
      		"version": {
      			"string": "0.9.0.Final"
      		},
      		"connector": {
      			"string": "postgresql"
      		},
      		"name": "test.test",
      		"db": "test",
      		"ts_usec": {
      			"long": 1553882894493567000
      		},
      		"txId": {
      			"long": 1871
      		},
      		"lsn": {
      			"long": 34600437
      		},
      		"schema": {
      			"string": "public"
      		},
      		"table": {
      			"string": "BugTest"
      		},
      		"snapshot": {
      			"boolean": false
      		},
      		"last_snapshot_record": null
      	},
      	"op": "u",
      	"ts_ms": {
      		"long": 1553882894621
      	}
      }
      
      Show
      Create a postgres db and insert a row: CREATE TABLE public . "BugTest" ( id SERIAL, "timeField" timestamp with time zone , "dummyField" character varying (255), CONSTRAINT id PRIMARY KEY (id) ); INSERT INTO public . "BugTest" ( id, "timeField" , "dummyField" ) VALUES (1, '3016-01-01 06:59:59+00' , 'dummy1' ); Start the connector. The read operation works fine with the timestamp: 1 { "before" : null , "after" : { "test.test.BugTest" : { "id" : 1, "timeField" : { "string" : "3016-01-01T06:59:59Z" }, "dummyField" : { "string" : "dummy1" } } }, "source" : { "version" : { "string" : "0.9.0.Final" }, "connector" : { "string" : "postgresql" }, "name" : "test.test" , "db" : "test" , "ts_usec" : { " long " : 1553882596288000 }, "txId" : { " long " : 1870 }, "lsn" : { " long " : 34543208 }, "schema" : { "string" : " public " }, "table" : { "string" : "BugTest" }, "snapshot" : { " boolean " : true }, "last_snapshot_record" : { " boolean " : false } }, "op" : "r" , "ts_ms" : { " long " : 1553882596288 } } Change something in that row: UPDATE public . "BugTest" SET "dummyField" = 'newdummy' WHERE id = 1; The connector reads a wrong timestamp: 1 { "before" : { "test.test.BugTest" : { "id" : 1, "timeField" : null , "dummyField" : null } }, "after" : { "test.test.BugTest" : { "id" : 1, "timeField" : { "string" : "1846-11-22T07:50:51.580896768Z" }, "dummyField" : { "string" : "newdummy" } } }, "source" : { "version" : { "string" : "0.9.0.Final" }, "connector" : { "string" : "postgresql" }, "name" : "test.test" , "db" : "test" , "ts_usec" : { " long " : 1553882894493567000 }, "txId" : { " long " : 1871 }, "lsn" : { " long " : 34600437 }, "schema" : { "string" : " public " }, "table" : { "string" : "BugTest" }, "snapshot" : { " boolean " : false }, "last_snapshot_record" : null }, "op" : "u" , "ts_ms" : { " long " : 1553882894621 } }

    Description

      I am using Debezium to connect to a Postgres table that has a column with type "timestamp with time zone". I have noticed that if the date in that column is later than year 2262, debezium will not be able to correctly transform it into a string, probably due to an overflow. Strangely enough, this does not happen during read operations after the connector is initialized, but for create, update and delete operations. For example, "3016-01-01 06:59:59+00" will be translated into "1846-11-22T07:50:51.580896768Z". I have played with changing 'time.precision.mode' to 'adaptive_time_microseconds' and 'connect' too, but the problem persisted.

      Attachments

        Issue Links

          Activity

            People

              Unassigned Unassigned
              soheylv S Vakili (Inactive)
              Votes:
              0 Vote for this issue
              Watchers:
              2 Start watching this issue

              Dates

                Created:
                Updated:
                Resolved: