/ Check-in [42c03851]
Login

Many hyperlinks are disabled.
Use anonymous login to enable hyperlinks.

Overview
Comment:Get the -DSQLITE_OMIT_XFER_OPT option working. Run speed tests on a full regression. Add the script for generating sqlite3.c. (CVS 3723)
Downloads: Tarball | ZIP archive | SQL archive
Timelines: family | ancestors | descendants | both | trunk
Files: files | file ages | folders
SHA1:42c038518c4ba0ef827a5717d450f95165b3c729
User & Date: drh 2007-03-27 12:04:05
Context
2007-03-27
12:04
Get the -DSQLITE_OMIT_XFER_OPT option working. Run speed tests on a full regression. Add the script for generating sqlite3.c. (CVS 3724) check-in: 1dd9d077 user: drh tags: trunk
12:04
Get the -DSQLITE_OMIT_XFER_OPT option working. Run speed tests on a full regression. Add the script for generating sqlite3.c. (CVS 3723) check-in: 42c03851 user: drh tags: trunk
2007-03-26
22:05
Modify sources to that they can be combined into a single sqlite3.c source file. Eliminate all type-pruned pointer warnings. (CVS 3722) check-in: 0b832e21 user: drh tags: trunk
Changes
Hide Diffs Side-by-Side Diffs Ignore Whitespace Patch

Changes to src/insert.c.

     8      8   **    May you find forgiveness for yourself and forgive others.
     9      9   **    May you share freely, never taking more than you give.
    10     10   **
    11     11   *************************************************************************
    12     12   ** This file contains C code routines that are called by the parser
    13     13   ** to handle INSERT statements in SQLite.
    14     14   **
    15         -** $Id: insert.c,v 1.176 2007/02/24 15:18:50 drh Exp $
           15  +** $Id: insert.c,v 1.177 2007/03/27 12:04:05 drh Exp $
    16     16   */
    17     17   #include "sqliteInt.h"
    18     18   
    19     19   /*
    20     20   ** Set P3 of the most recently inserted opcode to a column affinity
    21     21   ** string for index pIdx. A column affinity string has one character
    22     22   ** for each column in the table, according to the affinity of the column:
................................................................................
  1241   1241       VdbeComment((v, "# %s", pIdx->zName));
  1242   1242       sqlite3VdbeOp3(v, op, i+base, pIdx->tnum, (char*)pKey, P3_KEYINFO_HANDOFF);
  1243   1243     }
  1244   1244     if( pParse->nTab<=base+i ){
  1245   1245       pParse->nTab = base+i;
  1246   1246     }
  1247   1247   }
         1248  +
         1249  +
         1250  +#ifdef SQLITE_TEST
         1251  +/*
         1252  +** The following global variable is incremented whenever the
         1253  +** transfer optimization is used.  This is used for testing
         1254  +** purposes only - to make sure the transfer optimization really
         1255  +** is happening when it is suppose to.
         1256  +*/
         1257  +int sqlite3_xferopt_count;
         1258  +#endif /* SQLITE_TEST */
         1259  +
  1248   1260   
  1249   1261   #ifndef SQLITE_OMIT_XFER_OPT
  1250   1262   /*
  1251   1263   ** Check to collation names to see if they are compatible.
  1252   1264   */
  1253   1265   static int xferCompatibleCollation(const char *z1, const char *z2){
  1254   1266     if( z1==0 ){
................................................................................
  1293   1305       }
  1294   1306     }
  1295   1307   
  1296   1308     /* If no test above fails then the indices must be compatible */
  1297   1309     return 1;
  1298   1310   }
  1299   1311   
  1300         -#ifdef SQLITE_TEST
  1301         -/*
  1302         -** The following global variable is incremented whenever the
  1303         -** transfer optimization is used.  This is used for testing
  1304         -** purposes only - to make sure the transfer optimization really
  1305         -** is happening when it is suppose to.
  1306         -*/
  1307         -int sqlite3_xferopt_count;
  1308         -#endif /* SQLITE_TEST */
  1309         -
  1310   1312   /*
  1311   1313   ** Attempt the transfer optimization on INSERTs of the form
  1312   1314   **
  1313   1315   **     INSERT INTO tab1 SELECT * FROM tab2;
  1314   1316   **
  1315   1317   ** This optimization is only attempted if
  1316   1318   **

Changes to test/all.test.

     6      6   #    May you do good and not evil.
     7      7   #    May you find forgiveness for yourself and forgive others.
     8      8   #    May you share freely, never taking more than you give.
     9      9   #
    10     10   #***********************************************************************
    11     11   # This file runs all tests.
    12     12   #
    13         -# $Id: all.test,v 1.37 2007/03/17 10:26:59 danielk1977 Exp $
           13  +# $Id: all.test,v 1.38 2007/03/27 12:04:06 drh Exp $
    14     14   
    15     15   set testdir [file dirname $argv0]
    16     16   source $testdir/tester.tcl
    17     17   rename finish_test really_finish_test
    18     18   proc finish_test {} {memleak_check}
    19     19   
    20     20   if {[file exists ./sqlite_test_count]} {
................................................................................
    53     53     crash.test
    54     54     crash2.test
    55     55     autovacuum_crash.test
    56     56     quick.test
    57     57     malloc.test
    58     58     misuse.test
    59     59     memleak.test
    60         -  speed1.test
    61     60   }
    62     61   
    63     62   # Files to include in the test.  If this list is empty then everything
    64     63   # that is not in the EXCLUDE list is run.
    65     64   #
    66     65   set INCLUDE {
    67     66   }

Changes to test/speed2.test.

     7      7   #    May you find forgiveness for yourself and forgive others.
     8      8   #    May you share freely, never taking more than you give.
     9      9   #
    10     10   #*************************************************************************
    11     11   # This file implements regression tests for SQLite library.  The
    12     12   # focus of this script is measuring executing speed.
    13     13   #
    14         -# $Id: speed2.test,v 1.2 2007/03/26 16:30:16 drh Exp $
           14  +# $Id: speed2.test,v 1.3 2007/03/27 12:04:06 drh Exp $
    15     15   #
    16     16   
    17     17   set testdir [file dirname $argv0]
    18     18   source $testdir/tester.tcl
    19     19   
    20     20   # Set a uniform random seed
    21     21   expr srand(0)
    22     22   
    23         -set sqlout [open speed1.txt w]
           23  +set sqlout [open speed2.txt w]
    24     24   proc tracesql {sql} {
    25     25     puts $::sqlout $sql\;
    26     26   }
    27     27   #db trace tracesql
    28     28   
    29     29   # The number_name procedure below converts its argment (an integer)
    30     30   # into a string which is the English-language name for that number.
................................................................................
    58     58     set txt [string trim $txt]
    59     59     if {$txt==""} {set txt zero}
    60     60     return $txt
    61     61   }
    62     62   
    63     63   # Create a database schema.
    64     64   #
    65         -do_test speed1-1.0 {
           65  +do_test speed2-1.0 {
    66     66     execsql {
    67     67       PRAGMA page_size=1024;
    68     68       PRAGMA cache_size=8192;
    69     69       PRAGMA locking_mode=EXCLUSIVE;
    70     70       CREATE TABLE t1(a INTEGER, b INTEGER, c TEXT);
    71     71       CREATE TABLE t2(a INTEGER, b INTEGER, c TEXT);
    72     72       CREATE INDEX i2a ON t2(a);
................................................................................
    82     82   #
    83     83   set sql {}
    84     84   for {set i 1} {$i<=50000} {incr i} {
    85     85     set r [expr {int(rand()*500000)}]
    86     86     append sql "INSERT INTO t1 VALUES($i,$r,'[number_name $r]');\n"
    87     87   }
    88     88   db eval BEGIN
    89         -speed_trial speed1-insert1 50000 row $sql
           89  +speed_trial speed2-insert1 50000 row $sql
    90     90   db eval COMMIT
    91     91   
    92     92   # 50000 INSERTs on an indexed table
    93     93   #
    94     94   set sql {}
    95     95   for {set i 1} {$i<=50000} {incr i} {
    96     96     set r [expr {int(rand()*500000)}]
    97     97     append sql "INSERT INTO t2 VALUES($i,$r,'[number_name $r]');\n"
    98     98   }
    99     99   db eval BEGIN
   100         -speed_trial speed1-insert2 50000 row $sql
          100  +speed_trial speed2-insert2 50000 row $sql
   101    101   db eval COMMIT
   102    102   
   103    103   
   104    104   
   105    105   # 50 SELECTs on an integer comparison.  There is no index so
   106    106   # a full table scan is required.
   107    107   #
   108    108   set sql {}
   109    109   for {set i 0} {$i<50} {incr i} {
   110    110     set lwr [expr {$i*100}]
   111    111     set upr [expr {($i+10)*100}]
   112    112     append sql "SELECT count(*), avg(b) FROM t1 WHERE b>=$lwr AND b<$upr;"
   113    113   }
   114         -speed_trial speed1-select1 [expr {50*50000}] row $sql
          114  +speed_trial speed2-select1 [expr {50*50000}] row $sql
   115    115   
   116    116   # 50 SELECTs on an LIKE comparison.  There is no index so a full
   117    117   # table scan is required.
   118    118   #
   119    119   set sql {}
   120    120   for {set i 0} {$i<50} {incr i} {
   121    121     append sql \
   122    122       "SELECT count(*), avg(b) FROM t1 WHERE c LIKE '%[number_name $i]%';"
   123    123   }
   124         -speed_trial speed1-select2 [expr {50*50000}] row $sql
          124  +speed_trial speed2-select2 [expr {50*50000}] row $sql
   125    125   
   126    126   # Create indices
   127    127   #
   128    128   db eval BEGIN
   129         -speed_trial speed1-createidx 150000 row {
          129  +speed_trial speed2-createidx 150000 row {
   130    130     CREATE INDEX i1a ON t1(a);
   131    131     CREATE INDEX i1b ON t1(b);
   132    132     CREATE INDEX i1c ON t1(c);
   133    133   }
   134    134   db eval COMMIT
   135    135   
   136    136   # 5000 SELECTs on an integer comparison where the integer is
................................................................................
   138    138   #
   139    139   set sql {}
   140    140   for {set i 0} {$i<5000} {incr i} {
   141    141     set lwr [expr {$i*100}]
   142    142     set upr [expr {($i+10)*100}]
   143    143     append sql "SELECT count(*), avg(b) FROM t1 WHERE b>=$lwr AND b<$upr;"
   144    144   }
   145         -speed_trial speed1-select3 5000 stmt $sql
          145  +speed_trial speed2-select3 5000 stmt $sql
   146    146   
   147    147   # 100000 random SELECTs against rowid.
   148    148   #
   149    149   set sql {}
   150    150   for {set i 1} {$i<=100000} {incr i} {
   151    151     set id [expr {int(rand()*50000)+1}]
   152    152     append sql "SELECT c=='hi' FROM t1 WHERE rowid=$id;\n"
   153    153   }
   154         -speed_trial speed1-select4 100000 row $sql
          154  +speed_trial speed2-select4 100000 row $sql
   155    155   
   156    156   # 100000 random SELECTs against a unique indexed column.
   157    157   #
   158    158   set sql {}
   159    159   for {set i 1} {$i<=100000} {incr i} {
   160    160     set id [expr {int(rand()*50000)+1}]
   161    161     append sql "SELECT c FROM t1 WHERE a=$id;"
   162    162   }
   163         -speed_trial speed1-select5 100000 row $sql
          163  +speed_trial speed2-select5 100000 row $sql
   164    164   
   165    165   # 50000 random SELECTs against an indexed column text column
   166    166   #
   167    167   set sql {}
   168    168   db eval {SELECT c FROM t1 ORDER BY random() LIMIT 50000} {
   169    169     append sql "SELECT c FROM t1 WHERE c='$c';"
   170    170   }
   171         -speed_trial speed1-select6 50000 row $sql
          171  +speed_trial speed2-select6 50000 row $sql
   172    172   
   173    173   # Vacuum
   174         -speed_trial speed1-vacuum 100000 row VACUUM
          174  +speed_trial speed2-vacuum 100000 row VACUUM
   175    175   
   176    176   # 5000 updates of ranges where the field being compared is indexed.
   177    177   #
   178    178   set sql {}
   179    179   for {set i 0} {$i<5000} {incr i} {
   180    180     set lwr [expr {$i*2}]
   181    181     set upr [expr {($i+1)*2}]
   182    182     append sql "UPDATE t1 SET b=b*2 WHERE a>=$lwr AND a<$upr;"
   183    183   }
   184    184   db eval BEGIN
   185         -speed_trial speed1-update1 5000 stmt $sql
          185  +speed_trial speed2-update1 5000 stmt $sql
   186    186   db eval COMMIT
   187    187   
   188    188   # 50000 single-row updates.  An index is used to find the row quickly.
   189    189   #
   190    190   set sql {}
   191    191   for {set i 0} {$i<50000} {incr i} {
   192    192     set r [expr {int(rand()*500000)}]
   193    193     append sql "UPDATE t1 SET b=$r WHERE a=$i;"
   194    194   }
   195    195   db eval BEGIN
   196         -speed_trial speed1-update2 50000 row $sql
          196  +speed_trial speed2-update2 50000 row $sql
   197    197   db eval COMMIT
   198    198   
   199    199   # 1 big text update that touches every row in the table.
   200    200   #
   201         -speed_trial speed1-update3 50000 row {
          201  +speed_trial speed2-update3 50000 row {
   202    202     UPDATE t1 SET c=a;
   203    203   }
   204    204   
   205    205   # Many individual text updates.  Each row in the table is
   206    206   # touched through an index.
   207    207   #
   208    208   set sql {}
   209    209   for {set i 1} {$i<=50000} {incr i} {
   210    210     set r [expr {int(rand()*500000)}]
   211    211     append sql "UPDATE t1 SET c='[number_name $r]' WHERE a=$i;"
   212    212   }
   213    213   db eval BEGIN
   214         -speed_trial speed1-update4 50000 row $sql
          214  +speed_trial speed2-update4 50000 row $sql
   215    215   db eval COMMIT
   216    216   
   217    217   # Delete all content in a table.
   218    218   #
   219         -speed_trial speed1-delete1 50000 row {DELETE FROM t1}
          219  +speed_trial speed2-delete1 50000 row {DELETE FROM t1}
   220    220   
   221    221   # Copy one table into another
   222    222   #
   223         -speed_trial speed1-copy1 50000 row {INSERT INTO t1 SELECT * FROM t2}
          223  +speed_trial speed2-copy1 50000 row {INSERT INTO t1 SELECT * FROM t2}
   224    224   
   225    225   # Delete all content in a table, one row at a time.
   226    226   #
   227         -speed_trial speed1-delete2 50000 row {DELETE FROM t1 WHERE 1}
          227  +speed_trial speed2-delete2 50000 row {DELETE FROM t1 WHERE 1}
   228    228   
   229    229   # Refill the table yet again
   230    230   #
   231         -speed_trial speed1-copy2 50000 row {INSERT INTO t1 SELECT * FROM t2}
          231  +speed_trial speed2-copy2 50000 row {INSERT INTO t1 SELECT * FROM t2}
   232    232   
   233    233   # Drop the table and recreate it without its indices.
   234    234   #
   235    235   db eval BEGIN
   236         -speed_trial speed1-drop1 50000 row {
          236  +speed_trial speed2-drop1 50000 row {
   237    237      DROP TABLE t1;
   238    238      CREATE TABLE t1(a INTEGER, b INTEGER, c TEXT);
   239    239   }
   240    240   db eval COMMIT
   241    241   
   242    242   # Refill the table yet again.  This copy should be faster because
   243    243   # there are no indices to deal with.
   244    244   #
   245         -speed_trial speed1-copy3 50000 row {INSERT INTO t1 SELECT * FROM t2}
          245  +speed_trial speed2-copy3 50000 row {INSERT INTO t1 SELECT * FROM t2}
   246    246   
   247    247   # Select 20000 rows from the table at random.
   248    248   #
   249         -speed_trial speed1-random1 50000 row {
          249  +speed_trial speed2-random1 50000 row {
   250    250     SELECT rowid FROM t1 ORDER BY random() LIMIT 20000
   251    251   }
   252    252   
   253    253   # Delete 20000 random rows from the table.
   254    254   #
   255         -speed_trial speed1-random-del1 20000 row {
          255  +speed_trial speed2-random-del1 20000 row {
   256    256     DELETE FROM t1 WHERE rowid IN
   257    257       (SELECT rowid FROM t1 ORDER BY random() LIMIT 20000)
   258    258   }
   259         -do_test speed1-1.1 {
          259  +do_test speed2-1.1 {
   260    260     db one {SELECT count(*) FROM t1}
   261    261   } 30000
   262    262   
   263    263       
   264    264   # Delete 20000 more rows at random from the table.
   265    265   #
   266         -speed_trial speed1-random-del2 20000 row {
          266  +speed_trial speed2-random-del2 20000 row {
   267    267     DELETE FROM t1 WHERE rowid IN
   268    268       (SELECT rowid FROM t1 ORDER BY random() LIMIT 20000)
   269    269   }
   270         -do_test speed1-1.2 {
          270  +do_test speed2-1.2 {
   271    271     db one {SELECT count(*) FROM t1}
   272    272   } 10000
   273    273   
   274    274   finish_test

Added tool/mksqlite3c.tcl.

            1  +#!/usr/bin/tclsh
            2  +#
            3  +# To build a single huge source file holding all of SQLite (or at
            4  +# least the core components - the test harness, shell, and TCL 
            5  +# interface are omitted.) first do
            6  +#
            7  +#      make target_source
            8  +#
            9  +# Then run this script
           10  +#
           11  +#      tclsh mkonebigsourcefile.tcl
           12  +#
           13  +# The combined SQLite source code will be written into sqlite3.c
           14  +#
           15  +
           16  +# Open the output file and write a header comment at the beginning
           17  +# of the file.
           18  +#
           19  +set out [open sqlite3.c w]
           20  +puts $out \
           21  +"/******************************************************************************
           22  +** This file is a amalgamation of many separate source files from SQLite.  By
           23  +** pulling all the source files into this single unified source file, the
           24  +** entire code can be compiled as a single translation unit, which allows the
           25  +** compiler to do a better job of optimizing.
           26  +*/"
           27  +
           28  +# These are the header files used by SQLite.  The first time any of these 
           29  +# files are seen in a #include statement in the C code, include the complete
           30  +# text of the file in-line.  The file only needs to be included once.
           31  +#
           32  +foreach hdr {
           33  +   btree.h
           34  +   hash.h
           35  +   keywordhash.h
           36  +   opcodes.h
           37  +   os_common.h
           38  +   os.h
           39  +   os_os2.h
           40  +   pager.h
           41  +   parse.h
           42  +   sqlite3ext.h
           43  +   sqlite3.h
           44  +   sqliteInt.h
           45  +   vdbe.h
           46  +   vdbeInt.h
           47  +} {
           48  +  set available_hdr($hdr) 1
           49  +}
           50  +
           51  +# 78 stars used for comment formatting.
           52  +set s78 \
           53  +{*****************************************************************************}
           54  +
           55  +# Insert a comment into the code
           56  +#
           57  +proc section_comment {text} {
           58  +  global out s78
           59  +  set n [string length $text]
           60  +  set nstar [expr {60 - $n}]
           61  +  set stars [string range $s78 0 $nstar]
           62  +  puts $out "/************** $text $stars/"
           63  +}
           64  +
           65  +# Read the source file named $filename and write it into the
           66  +# sqlite3.c output file.  If any #include statements are seen,
           67  +# process them approprately.
           68  +#
           69  +proc copy_file {filename} {
           70  +  global seen_hdr available_hdr out
           71  +  set tail [file tail $filename]
           72  +  section_comment "Begin file $tail"
           73  +  set in [open $filename r]
           74  +  while {![eof $in]} {
           75  +    set line [gets $in]
           76  +    if {[regexp {^#\s*include\s+["<]([^">]+)[">]} $line all hdr]} {
           77  +      if {[info exists available_hdr($hdr)]} {
           78  +        if {$available_hdr($hdr)} {
           79  +          if {$hdr!="os_common.h"} {
           80  +            set available_hdr($hdr) 0
           81  +          }
           82  +          section_comment "Include $hdr in the middle of $tail"
           83  +          copy_file tsrc/$hdr
           84  +          section_comment "Continuing where we left off in $tail"
           85  +        }
           86  +      } elseif {![info exists seen_hdr($hdr)]} {
           87  +        set seen_hdr($hdr) 1
           88  +        puts $out $line
           89  +      }
           90  +    } elseif {[regexp {^#ifdef __cplusplus} $line]} {
           91  +      puts $out "#if 0"
           92  +    } elseif {[regexp {^#line} $line]} {
           93  +      # Skip #line directives.
           94  +    } else {
           95  +      puts $out $line
           96  +    }
           97  +  }
           98  +  close $in
           99  +  section_comment "End of $tail"
          100  +}
          101  +
          102  +
          103  +# Process the source files.  Process files containing commonly
          104  +# used subroutines first in order to help the compiler find
          105  +# inlining opportunities.
          106  +#
          107  +foreach file {
          108  +   printf.c
          109  +   random.c
          110  +   utf.c
          111  +   util.c
          112  +   hash.c
          113  +   opcodes.c
          114  +
          115  +   os.c
          116  +   os_os2.c
          117  +   os_unix.c
          118  +   os_win.c
          119  +
          120  +   pager.c
          121  +   
          122  +   btree.c
          123  +
          124  +   vdbefifo.c
          125  +   vdbemem.c
          126  +   vdbeaux.c
          127  +   vdbeapi.c
          128  +   vdbe.c
          129  +
          130  +   expr.c
          131  +   alter.c
          132  +   analyze.c
          133  +   attach.c
          134  +   auth.c
          135  +   build.c
          136  +   callback.c
          137  +   complete.c
          138  +   date.c
          139  +   delete.c
          140  +   func.c
          141  +   insert.c
          142  +   legacy.c
          143  +   loadext.c
          144  +   pragma.c
          145  +   prepare.c
          146  +   select.c
          147  +   table.c
          148  +   trigger.c
          149  +   update.c
          150  +   vacuum.c
          151  +   vtab.c
          152  +   where.c
          153  +
          154  +   parse.c
          155  +
          156  +   tokenize.c
          157  +
          158  +   main.c
          159  +} {
          160  +  copy_file tsrc/$file
          161  +}
          162  +
          163  +if 0 {
          164  +puts $out "#ifdef SQLITE_TEST"
          165  +foreach file {
          166  +   test1.c
          167  +   test2.c
          168  +   test3.c
          169  +   test4.c
          170  +   test5.c
          171  +   test6.c
          172  +   test7.c
          173  +   test8.c
          174  +   test_async.c
          175  +   test_autoext.c
          176  +   test_loadext.c
          177  +   test_md5.c
          178  +   test_schema.c
          179  +   test_server.c
          180  +   test_tclvar.c
          181  +} {
          182  +  copy_file ../sqlite/src/$file
          183  +}
          184  +puts $out "#endif /* SQLITE_TEST */"
          185  +puts $out "#ifdef SQLITE_TCL"
          186  +copy_file ../sqlite/src/tclsqlite.c
          187  +puts $out "#endif /* SQLITE_TCL */"
          188  +}
          189  +
          190  +close $out