aboutsummaryrefslogtreecommitdiff
path: root/test
diff options
context:
space:
mode:
Diffstat (limited to 'test')
-rw-r--r--test/bind.test15
-rw-r--r--test/tokenize.test65
2 files changed, 79 insertions, 1 deletions
diff --git a/test/bind.test b/test/bind.test
index 075ffd10c..3b7499866 100644
--- a/test/bind.test
+++ b/test/bind.test
@@ -11,7 +11,7 @@
# This file implements regression tests for SQLite library. The
# focus of this script testing the sqlite_bind API.
#
-# $Id: bind.test,v 1.42 2008/04/16 16:11:49 drh Exp $
+# $Id: bind.test,v 1.43 2008/07/08 00:06:51 drh Exp $
#
set testdir [file dirname $argv0]
@@ -91,6 +91,19 @@ do_test bind-1.9 {
execsql {SELECT rowid, * FROM t1}
} {1 123 abcdefg {} 2 456 abcdefg {}}
+do_test bind-1.10 {
+ set rc [catch {
+ sqlite3_prepare db {INSERT INTO t1 VALUES($abc:123,?,:abc)} -1 TAIL
+ } msg]
+ lappend rc $msg
+} {1 {(1) near ":123": syntax error}}
+do_test bind-1.11 {
+ set rc [catch {
+ sqlite3_prepare db {INSERT INTO t1 VALUES(@abc:xyz,?,:abc)} -1 TAIL
+ } msg]
+ lappend rc $msg
+} {1 {(1) near ":xyz": syntax error}}
+
do_test bind-1.99 {
sqlite3_finalize $VM
} SQLITE_OK
diff --git a/test/tokenize.test b/test/tokenize.test
new file mode 100644
index 000000000..46fd4eeb7
--- /dev/null
+++ b/test/tokenize.test
@@ -0,0 +1,65 @@
+# 2008 July 7
+#
+# The author disclaims copyright to this source code. In place of
+# a legal notice, here is a blessing:
+#
+# May you do good and not evil.
+# May you find forgiveness for yourself and forgive others.
+# May you share freely, never taking more than you give.
+#
+#***********************************************************************
+# This file implements regression tests for SQLite library. The
+# focus of this script testing the tokenizer
+#
+# $Id: tokenize.test,v 1.1 2008/07/08 00:06:51 drh Exp $
+#
+
+set testdir [file dirname $argv0]
+source $testdir/tester.tcl
+
+do_test tokenize-1.1 {
+ catchsql {SELECT 1.0e+}
+} {1 {unrecognized token: "1.0e"}}
+do_test tokenize-1.2 {
+ catchsql {SELECT 1.0E+}
+} {1 {unrecognized token: "1.0E"}}
+do_test tokenize-1.3 {
+ catchsql {SELECT 1.0e-}
+} {1 {unrecognized token: "1.0e"}}
+do_test tokenize-1.4 {
+ catchsql {SELECT 1.0E-}
+} {1 {unrecognized token: "1.0E"}}
+do_test tokenize-1.5 {
+ catchsql {SELECT 1.0e+/}
+} {1 {unrecognized token: "1.0e"}}
+do_test tokenize-1.6 {
+ catchsql {SELECT 1.0E+:}
+} {1 {unrecognized token: "1.0E"}}
+do_test tokenize-1.7 {
+ catchsql {SELECT 1.0e-:}
+} {1 {unrecognized token: "1.0e"}}
+do_test tokenize-1.8 {
+ catchsql {SELECT 1.0E-/}
+} {1 {unrecognized token: "1.0E"}}
+do_test tokenize-1.9 {
+ catchsql {SELECT 1.0F+5}
+} {1 {unrecognized token: "1.0F"}}
+do_test tokenize-1.10 {
+ catchsql {SELECT 1.0d-10}
+} {1 {unrecognized token: "1.0d"}}
+do_test tokenize-1.11 {
+ catchsql {SELECT 1.0e,5}
+} {1 {unrecognized token: "1.0e"}}
+do_test tokenize-1.12 {
+ catchsql {SELECT 1.0E.10}
+} {1 {unrecognized token: "1.0E"}}
+
+do_test tokenize-2.1 {
+ catchsql {SELECT 1, 2 /*}
+} {1 {near "*": syntax error}}
+do_test tokenize-2.2 {
+ catchsql {SELECT 1, 2 /* }
+} {0 {1 2}}
+
+
+finish_test