123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215 |
- # 2001 September 23
- #
- # The author disclaims copyright to this source code. In place of
- # a legal notice, here is a blessing:
- #
- # May you do good and not evil.
- # May you find forgiveness for yourself and forgive others.
- # May you share freely, never taking more than you give.
- #
- #***********************************************************************
- # This file implements regression tests for SQLite library. The
- # focus of this file is stressing the library by putting large amounts
- # of data in a single row of a table.
- #
- # $Id: bigrow.test,v 1.4 2001/11/24 00:31:47 drh Exp $
- set testdir [file dirname $argv0]
- source $testdir/tester.tcl
- # Make a big string that we can use for test data
- #
- do_test bigrow-1.0 {
- set ::bigstr {}
- for {set i 1} {$i<=9999} {incr i} {
- set sep [string index "abcdefghijklmnopqrstuvwxyz" [expr {$i%26}]]
- append ::bigstr "$sep [format %04d $i] "
- }
- string length $::bigstr
- } {69993}
- # Make a table into which we can insert some but records.
- #
- do_test bigrow-1.1 {
- execsql {
- CREATE TABLE t1(a text, b text, c text);
- SELECT name FROM sqlite_master
- WHERE type='table' OR type='index'
- ORDER BY name
- }
- } {t1}
- do_test bigrow-1.2 {
- set ::big1 [string range $::bigstr 0 65519]
- set sql "INSERT INTO t1 VALUES('abc',"
- append sql "'$::big1', 'xyz');"
- execsql $sql
- execsql {SELECT a, c FROM t1}
- } {abc xyz}
- do_test bigrow-1.3 {
- execsql {SELECT b FROM t1}
- } [list $::big1]
- do_test bigrow-1.4 {
- set ::big2 [string range $::bigstr 0 65520]
- set sql "INSERT INTO t1 VALUES('abc2',"
- append sql "'$::big2', 'xyz2');"
- set r [catch {execsql $sql} msg]
- lappend r $msg
- } {0 {}}
- do_test bigrow-1.4.1 {
- execsql {SELECT b FROM t1 ORDER BY c}
- } [list $::big1 $::big2]
- do_test bigrow-1.4.2 {
- execsql {SELECT c FROM t1 ORDER BY c}
- } {xyz xyz2}
- do_test bigrow-1.4.3 {
- execsql {DELETE FROM t1 WHERE a='abc2'}
- execsql {SELECT c FROM t1}
- } {xyz}
- do_test bigrow-1.5 {
- execsql {
- UPDATE t1 SET a=b, b=a;
- SELECT b,c FROM t1
- }
- } {abc xyz}
- do_test bigrow-1.6 {
- execsql {
- SELECT * FROM t1
- }
- } [list $::big1 abc xyz]
- do_test bigrow-1.7 {
- execsql {
- INSERT INTO t1 VALUES('1','2','3');
- INSERT INTO t1 VALUES('A','B','C');
- SELECT b FROM t1 WHERE a=='1';
- }
- } {2}
- do_test bigrow-1.8 {
- execsql "SELECT b FROM t1 WHERE a=='$::big1'"
- } {abc}
- do_test bigrow-1.9 {
- execsql "SELECT b FROM t1 WHERE a!='$::big1' ORDER BY a"
- } {2 B}
- # Try doing some indexing on big columns
- #
- do_test bigrow-2.1 {
- execsql {
- CREATE INDEX i1 ON t1(a)
- }
- execsql "SELECT b FROM t1 WHERE a=='$::big1'"
- } {abc}
- do_test bigrow-2.2 {
- execsql {
- UPDATE t1 SET a=b, b=a
- }
- execsql "SELECT b FROM t1 WHERE a=='abc'"
- } [list $::big1]
- do_test bigrow-2.3 {
- execsql {
- UPDATE t1 SET a=b, b=a
- }
- execsql "SELECT b FROM t1 WHERE a=='$::big1'"
- } {abc}
- catch {unset ::bigstr}
- catch {unset ::big1}
- catch {unset ::big2}
- # Mosts of the tests above were created back when rows were limited in
- # size to 64K. Now rows can be much bigger. Test that logic. Also
- # make sure things work correctly at the transition boundries between
- # row sizes of 256 to 257 bytes and from 65536 to 65537 bytes.
- #
- # We begin by testing the 256..257 transition.
- #
- do_test bigrow-3.1 {
- execsql {
- DELETE FROM t1;
- INSERT INTO t1(a,b,c) VALUES('one','abcdefghijklmnopqrstuvwxyz0123','hi');
- }
- execsql {SELECT a,length(b),c FROM t1}
- } {one 30 hi}
- do_test bigrow-3.2 {
- execsql {
- UPDATE t1 SET b=b||b;
- UPDATE t1 SET b=b||b;
- UPDATE t1 SET b=b||b;
- }
- execsql {SELECT a,length(b),c FROM t1}
- } {one 240 hi}
- for {set i 1} {$i<10} {incr i} {
- do_test bigrow-3.3.$i {
- execsql "UPDATE t1 SET b=b||'$i'"
- execsql {SELECT a,length(b),c FROM t1}
- } "one [expr {240+$i}] hi"
- }
- # Now test the 65536..65537 row-size transition.
- #
- do_test bigrow-4.1 {
- execsql {
- DELETE FROM t1;
- INSERT INTO t1(a,b,c) VALUES('one','abcdefghijklmnopqrstuvwxyz0123','hi');
- }
- execsql {SELECT a,length(b),c FROM t1}
- } {one 30 hi}
- do_test bigrow-4.2 {
- execsql {
- UPDATE t1 SET b=b||b;
- UPDATE t1 SET b=b||b;
- UPDATE t1 SET b=b||b;
- UPDATE t1 SET b=b||b;
- UPDATE t1 SET b=b||b;
- UPDATE t1 SET b=b||b;
- UPDATE t1 SET b=b||b;
- UPDATE t1 SET b=b||b;
- UPDATE t1 SET b=b||b;
- UPDATE t1 SET b=b||b;
- UPDATE t1 SET b=b||b;
- UPDATE t1 SET b=b||b;
- }
- execsql {SELECT a,length(b),c FROM t1}
- } {one 122880 hi}
- do_test bigrow-4.3 {
- execsql {
- UPDATE t1 SET b=substr(b,1,65515)
- }
- execsql {SELECT a,length(b),c FROM t1}
- } {one 65515 hi}
- for {set i 1} {$i<10} {incr i} {
- do_test bigrow-4.4.$i {
- execsql "UPDATE t1 SET b=b||'$i'"
- execsql {SELECT a,length(b),c FROM t1}
- } "one [expr {65515+$i}] hi"
- }
- # Check to make sure the library recovers safely if a row contains
- # too much data.
- #
- do_test bigrow-5.1 {
- execsql {
- DELETE FROM t1;
- INSERT INTO t1(a,b,c) VALUES('one','abcdefghijklmnopqrstuvwxyz0123','hi');
- }
- execsql {SELECT a,length(b),c FROM t1}
- } {one 30 hi}
- set i 1
- for {set sz 60} {$sz<1048560} {incr sz $sz} {
- do_test bigrow-5.2.$i {
- execsql {
- UPDATE t1 SET b=b||b;
- SELECT a,length(b),c FROM t1;
- }
- } "one $sz hi"
- incr i
- }
- do_test bigrow-5.3 {
- set r [catch {execsql {UPDATE t1 SET b=b||b}} msg]
- lappend r $msg
- } {1 {too much data for one table row}}
- do_test bigrow-5.4 {
- execsql {DROP TABLE t1}
- } {}
- finish_test
|