@@ -371,6 +371,248 @@ class CheckConstraintSuite extends QueryTest with CommandSuiteBase with DDLComma
371
371
}
372
372
}
373
373
374
+ test(" Check constraint with current_timestamp function" ) {
375
+ withNamespaceAndTable(" ns" , " tbl" , nonPartitionCatalog) { t =>
376
+ // Create table with a constraint using current_timestamp
377
+ sql(s " CREATE TABLE $t (id INT, creation_time TIMESTAMP, " +
378
+ s " CONSTRAINT valid_time CHECK (creation_time <= current_timestamp())) $defaultUsing" )
379
+
380
+ // Insert valid data (current or past timestamp)
381
+ sql(s " INSERT INTO $t VALUES (1, current_timestamp()), (2, TIMESTAMP '2020-01-01 00:00:00') " )
382
+ checkAnswer(spark.table(t).select(" id" ), Seq (Row (1 ), Row (2 )))
383
+
384
+ // Insert valid data with null timestamp
385
+ sql(s " INSERT INTO $t VALUES (3, null) " )
386
+ checkAnswer(spark.table(t).select(" id" ), Seq (Row (1 ), Row (2 ), Row (3 )))
387
+
388
+ // Future timestamp should fail validation
389
+ val tomorrow = " current_timestamp() + INTERVAL 1 DAY"
390
+ val error = intercept[SparkRuntimeException ] {
391
+ sql(s " INSERT INTO $t VALUES (4, $tomorrow) " )
392
+ }
393
+ assert(error.getMessage.contains(" CHECK_CONSTRAINT_VIOLATION" ))
394
+ assert(error.getMessageParameters.get(" constraintName" ) == " valid_time" )
395
+ assert(error.getMessageParameters.get(" expression" ) == " creation_time <= current_timestamp()" )
396
+ }
397
+ }
398
+
399
+ test(" Check constraint with current_timestamp function - update operation" ) {
400
+ withNamespaceAndTable(" ns" , " tbl" , rowLevelOPCatalog) { t =>
401
+ // Create table with a constraint using current_timestamp
402
+ sql(s " CREATE TABLE $t (id INT, creation_time TIMESTAMP, " +
403
+ s " CONSTRAINT valid_time CHECK (creation_time <= current_timestamp())) $defaultUsing" )
404
+
405
+ // Insert initial data
406
+ sql(s " INSERT INTO $t VALUES (1, current_timestamp()), (2, TIMESTAMP '2020-01-01 00:00:00') " )
407
+
408
+ // Valid update with current or past timestamp
409
+ sql(s " UPDATE $t SET creation_time = TIMESTAMP '2021-01-01 00:00:00' WHERE id = 1 " )
410
+ checkAnswer(spark.table(t).select(" id" )
411
+ .where(" creation_time = TIMESTAMP '2021-01-01 00:00:00'" ),
412
+ Seq (Row (1 )))
413
+
414
+ // Valid update with null timestamp
415
+ sql(s " UPDATE $t SET creation_time = null WHERE id = 2 " )
416
+ checkAnswer(sql(s " SELECT * FROM $t WHERE id = 2 " ), Seq (Row (2 , null )))
417
+
418
+ // Future timestamp should fail validation
419
+ val tomorrow = " current_timestamp() + INTERVAL 1 DAY"
420
+ val error = intercept[SparkRuntimeException ] {
421
+ sql(s " UPDATE $t SET creation_time = $tomorrow WHERE id = 1 " )
422
+ }
423
+ assert(error.getMessage.contains(" CHECK_CONSTRAINT_VIOLATION" ))
424
+ assert(error.getMessageParameters.get(" constraintName" ) == " valid_time" )
425
+ assert(error.getMessageParameters.get(" expression" ) == " creation_time <= current_timestamp()" )
426
+ }
427
+ }
428
+
429
+ test(" Check constraint with current_timestamp function - merge operation" ) {
430
+ withNamespaceAndTable(" ns" , " tbl" , rowLevelOPCatalog) { target =>
431
+ withNamespaceAndTable(" ns" , " source" , rowLevelOPCatalog) { source =>
432
+ // Create target table with constraint using current_timestamp
433
+ sql(s " CREATE TABLE $target (id INT, creation_time TIMESTAMP, " +
434
+ s " CONSTRAINT valid_time CHECK (creation_time <= current_timestamp())) $defaultUsing" )
435
+
436
+ // Create source table without constraints
437
+ sql(s " CREATE TABLE $source (id INT, creation_time TIMESTAMP) $defaultUsing" )
438
+
439
+ // Insert initial data
440
+ sql(s " INSERT INTO $target VALUES (1, TIMESTAMP '2020-01-01 00:00:00') " )
441
+ sql(s " INSERT INTO $source VALUES " +
442
+ s " (2, TIMESTAMP '2021-01-01 00:00:00'), " +
443
+ s " (3, current_timestamp()), " +
444
+ s " (4, null) " )
445
+
446
+ // Valid merge with past timestamps or null
447
+ sql(
448
+ s """
449
+ |MERGE INTO $target t
450
+ |USING (SELECT * FROM $source WHERE id IN (2, 4)) s
451
+ |ON t.id = s.id
452
+ |WHEN MATCHED THEN UPDATE SET creation_time = s.creation_time
453
+ |WHEN NOT MATCHED THEN INSERT (id, creation_time) VALUES (s.id, s.creation_time)
454
+ | """ .stripMargin)
455
+
456
+ checkAnswer(spark.table(target).orderBy(" id" ),
457
+ Seq (Row (1 , java.sql.Timestamp .valueOf(" 2020-01-01 00:00:00" )),
458
+ Row (2 , java.sql.Timestamp .valueOf(" 2021-01-01 00:00:00" )),
459
+ Row (4 , null )))
460
+
461
+ // Future timestamp should fail validation
462
+ val tomorrow = " current_timestamp() + INTERVAL 1 DAY"
463
+ sql(s " INSERT INTO $source VALUES (5, $tomorrow) " )
464
+
465
+ val error = intercept[SparkRuntimeException ] {
466
+ sql(
467
+ s """
468
+ |MERGE INTO $target t
469
+ |USING (SELECT * FROM $source WHERE id = 5) s
470
+ |ON t.id = s.id
471
+ |WHEN NOT MATCHED THEN INSERT (id, creation_time) VALUES (s.id, s.creation_time)
472
+ | """ .stripMargin)
473
+ }
474
+
475
+ assert(error.getMessage.contains(" CHECK_CONSTRAINT_VIOLATION" ))
476
+ assert(error.getMessageParameters.get(" constraintName" ) == " valid_time" )
477
+ assert(error.getMessageParameters.get(" expression" ) ==
478
+ " creation_time <= current_timestamp()" )
479
+ }
480
+ }
481
+ }
482
+
483
+ test(" Check constraint with current_date function" ) {
484
+ // Create another table with other current_* functions
485
+ withNamespaceAndTable(" ns" , " tbl" , nonPartitionCatalog) { t =>
486
+ sql(s " CREATE TABLE $t (id INT, creation_date DATE, " +
487
+ s " CONSTRAINT valid_date CHECK (creation_date <= current_date())) $defaultUsing" )
488
+
489
+ // Insert valid data (current or past timestamp)
490
+ sql(s " INSERT INTO $t VALUES (1, current_date()), (2, DATE'2020-01-01') " )
491
+ checkAnswer(spark.table(t).select(" id" ), Seq (Row (1 ), Row (2 )))
492
+
493
+ // Future date should fail validation
494
+ val tomorrow = " DATE'9999-12-31'"
495
+ val error = intercept[SparkRuntimeException ] {
496
+ sql(s " INSERT INTO $t VALUES (3, $tomorrow) " )
497
+ }
498
+ checkError(
499
+ exception = error,
500
+ condition = " CHECK_CONSTRAINT_VIOLATION" ,
501
+ sqlState = " 23001" ,
502
+ parameters = Map (
503
+ " constraintName" -> " valid_date" ,
504
+ " expression" -> " creation_date <= current_date()" ,
505
+ " values" -> " - creation_date : 2932896"
506
+ )
507
+ )
508
+ }
509
+ }
510
+
511
+ test(" Check constraint with current_database function" ) {
512
+ withNamespaceAndTable(" test_db" , " tbl" , nonPartitionCatalog) { t =>
513
+ sql(s " USE $nonPartitionCatalog.test_db " )
514
+ sql(s " CREATE TABLE $t (id INT, db STRING, " +
515
+ s " CONSTRAINT valid_db CHECK (db = current_database())) $defaultUsing" )
516
+
517
+ // Insert valid data (current database)
518
+ sql(s " INSERT INTO $t VALUES (1, current_database()), (2, 'test_db') " )
519
+ checkAnswer(spark.table(t).select(" id" ), Seq (Row (1 ), Row (2 )))
520
+
521
+ // Invalid database should fail validation
522
+ val error = intercept[SparkRuntimeException ] {
523
+ sql(s " INSERT INTO $t VALUES (3, 'invalid_db') " )
524
+ }
525
+ checkError(
526
+ exception = error,
527
+ condition = " CHECK_CONSTRAINT_VIOLATION" ,
528
+ sqlState = " 23001" ,
529
+ parameters = Map (
530
+ " constraintName" -> " valid_db" ,
531
+ " expression" -> " db = current_database()" ,
532
+ " values" -> " - db : invalid_db"
533
+ )
534
+ )
535
+ }
536
+ }
537
+
538
+ test(" Check constraint with current_database function - update operation" ) {
539
+ withNamespaceAndTable(" test_db" , " tbl" , rowLevelOPCatalog) { t =>
540
+ sql(s " USE $rowLevelOPCatalog.test_db " )
541
+ sql(s " CREATE TABLE $t (id INT, db STRING, " +
542
+ s " CONSTRAINT valid_db CHECK (db = current_database())) $defaultUsing" )
543
+
544
+ // Insert initial valid data
545
+ sql(s " INSERT INTO $t VALUES (1, current_database()), (2, 'test_db') " )
546
+ checkAnswer(spark.table(t).select(" id" ), Seq (Row (1 ), Row (2 )))
547
+
548
+ // Valid update with current database value
549
+ sql(s " UPDATE $t SET db = 'test_db' WHERE id = 1 " )
550
+ checkAnswer(spark.table(t).where(" id = 1" ), Seq (Row (1 , " test_db" )))
551
+
552
+ // Invalid database should fail validation
553
+ val error = intercept[SparkRuntimeException ] {
554
+ sql(s " UPDATE $t SET db = 'invalid_db' WHERE id = 2 " )
555
+ }
556
+ checkError(
557
+ exception = error,
558
+ condition = " CHECK_CONSTRAINT_VIOLATION" ,
559
+ sqlState = " 23001" ,
560
+ parameters = Map (
561
+ " constraintName" -> " valid_db" ,
562
+ " expression" -> " db = current_database()" ,
563
+ " values" -> " - db : invalid_db"
564
+ )
565
+ )
566
+ }
567
+ }
568
+
569
+ test(" Check constraint with current_database function - merge operation" ) {
570
+ withNamespaceAndTable(" test_db" , " target" , rowLevelOPCatalog) { target =>
571
+ withNamespaceAndTable(" test_db" , " source" , rowLevelOPCatalog) { source =>
572
+ sql(s " USE $rowLevelOPCatalog.test_db " )
573
+ sql(s " CREATE TABLE $target (id INT, db STRING, " +
574
+ s " CONSTRAINT valid_db CHECK (db = current_database())) $defaultUsing" )
575
+ sql(s " CREATE TABLE $source (id INT, db STRING) $defaultUsing" )
576
+
577
+ // Insert initial valid data
578
+ sql(s " INSERT INTO $target VALUES (1, current_database()), (2, 'test_db') " )
579
+ sql(s " INSERT INTO $source VALUES (3, 'test_db'), (4, 'invalid_db') " )
580
+
581
+ // Valid merge with current database value
582
+ sql(
583
+ s """
584
+ |MERGE INTO $target t
585
+ |USING (SELECT * FROM $source WHERE id = 3) s
586
+ |ON t.id = s.id
587
+ |WHEN NOT MATCHED THEN INSERT (id, db) VALUES (s.id, s.db)
588
+ | """ .stripMargin)
589
+ checkAnswer(spark.table(target).orderBy(" id" ),
590
+ Seq (Row (1 , " test_db" ), Row (2 , " test_db" ), Row (3 , " test_db" )))
591
+
592
+ // Invalid database should fail validation
593
+ val error = intercept[SparkRuntimeException ] {
594
+ sql(
595
+ s """
596
+ |MERGE INTO $target t
597
+ |USING (SELECT * FROM $source WHERE id = 4) s
598
+ |ON t.id = s.id
599
+ |WHEN NOT MATCHED THEN INSERT (id, db) VALUES (s.id, s.db)
600
+ | """ .stripMargin)
601
+ }
602
+ checkError(
603
+ exception = error,
604
+ condition = " CHECK_CONSTRAINT_VIOLATION" ,
605
+ sqlState = " 23001" ,
606
+ parameters = Map (
607
+ " constraintName" -> " valid_db" ,
608
+ " expression" -> " db = current_database()" ,
609
+ " values" -> " - db : invalid_db"
610
+ )
611
+ )
612
+ }
613
+ }
614
+ }
615
+
374
616
test(" Check constraint violation on table insert - nested column" ) {
375
617
withNamespaceAndTable(" ns" , " tbl" , nonPartitionCatalog) { t =>
376
618
sql(s " CREATE TABLE $t (id INT, " +
0 commit comments