From 200a81dcb51bb5c02625a4fe6b6dabbf630a069c Mon Sep 17 00:00:00 2001 From: drh Date: Fri, 8 Aug 2008 14:19:41 +0000 Subject: Disallow the ON CONFLICT clause on CHECK constraints. The syntax used to be allowed but never worked, so this should not present compatibility problems. Other internal grammar simplifications. (CVS 5546) FossilOrigin-Name: 4cedc641ed39982ae8cbb9200aa1e2f37c878b73 --- src/alter.c | 4 ++-- src/parse.y | 12 +++++------- src/tokenize.c | 9 ++++----- 3 files changed, 11 insertions(+), 14 deletions(-) (limited to 'src') diff --git a/src/alter.c b/src/alter.c index ba929bedd..ec315da48 100644 --- a/src/alter.c +++ b/src/alter.c @@ -12,7 +12,7 @@ ** This file contains C code routines that used to generate VDBE code ** that implements the ALTER TABLE command. ** -** $Id: alter.c,v 1.47 2008/07/28 19:34:53 drh Exp $ +** $Id: alter.c,v 1.48 2008/08/08 14:19:41 drh Exp $ */ #include "sqliteInt.h" #include @@ -74,7 +74,7 @@ static void renameTableFunc( do { zCsr += len; len = sqlite3GetToken(zCsr, &token); - } while( token==TK_SPACE || token==TK_COMMENT ); + } while( token==TK_SPACE ); assert( len>0 ); } while( token!=TK_LP && token!=TK_USING ); diff --git a/src/parse.y b/src/parse.y index b2cbe9bca..8b6d1632e 100644 --- a/src/parse.y +++ b/src/parse.y @@ -14,7 +14,7 @@ ** the parser. Lemon will also generate a header file containing ** numeric codes for all of the tokens. ** -** @(#) $Id: parse.y,v 1.248 2008/07/31 01:40:42 shane Exp $ +** @(#) $Id: parse.y,v 1.249 2008/08/08 14:19:41 drh Exp $ */ // All token codes are small integers with #defines that begin with "TK_" @@ -91,7 +91,6 @@ struct AttachKey { int type; Token key; }; input ::= cmdlist. cmdlist ::= cmdlist ecmd. cmdlist ::= ecmd. -cmdx ::= cmd. { sqlite3FinishCoding(pParse); } ecmd ::= SEMI. ecmd ::= explain cmdx SEMI. explain ::= . { sqlite3BeginParse(pParse, 0); } @@ -99,6 +98,7 @@ explain ::= . { sqlite3BeginParse(pParse, 0); } explain ::= EXPLAIN. { sqlite3BeginParse(pParse, 1); } explain ::= EXPLAIN QUERY PLAN. { sqlite3BeginParse(pParse, 2); } %endif SQLITE_OMIT_EXPLAIN +cmdx ::= cmd. { sqlite3FinishCoding(pParse); } ///////////////////// Begin and end transactions. //////////////////////////// // @@ -313,7 +313,7 @@ tcons ::= PRIMARY KEY LP idxlist(X) autoinc(I) RP onconf(R). {sqlite3AddPrimaryKey(pParse,X,R,I,0);} tcons ::= UNIQUE LP idxlist(X) RP onconf(R). {sqlite3CreateIndex(pParse,0,0,0,X,R,0,0,0,0);} -tcons ::= CHECK LP expr(E) RP onconf. {sqlite3AddCheckConstraint(pParse,E);} +tcons ::= CHECK LP expr(E) RP. {sqlite3AddCheckConstraint(pParse,E);} tcons ::= FOREIGN KEY LP idxlist(FA) RP REFERENCES nm(T) idxlist_opt(TA) refargs(R) defer_subclause_opt(D). { sqlite3CreateForeignKey(pParse, FA, &T, TA, R); @@ -885,11 +885,10 @@ uniqueflag(A) ::= . {A = OE_None;} %destructor idxlist {sqlite3ExprListDelete(pParse->db, $$);} %type idxlist_opt {ExprList*} %destructor idxlist_opt {sqlite3ExprListDelete(pParse->db, $$);} -%type idxitem {Token} idxlist_opt(A) ::= . {A = 0;} idxlist_opt(A) ::= LP idxlist(X) RP. {A = X;} -idxlist(A) ::= idxlist(X) COMMA idxitem(Y) collate(C) sortorder(Z). { +idxlist(A) ::= idxlist(X) COMMA nm(Y) collate(C) sortorder(Z). { Expr *p = 0; if( C.n>0 ){ p = sqlite3PExpr(pParse, TK_COLUMN, 0, 0, 0); @@ -899,7 +898,7 @@ idxlist(A) ::= idxlist(X) COMMA idxitem(Y) collate(C) sortorder(Z). { sqlite3ExprListCheckLength(pParse, A, "index"); if( A ) A->a[A->nExpr-1].sortOrder = Z; } -idxlist(A) ::= idxitem(Y) collate(C) sortorder(Z). { +idxlist(A) ::= nm(Y) collate(C) sortorder(Z). { Expr *p = 0; if( C.n>0 ){ p = sqlite3PExpr(pParse, TK_COLUMN, 0, 0, 0); @@ -909,7 +908,6 @@ idxlist(A) ::= idxitem(Y) collate(C) sortorder(Z). { sqlite3ExprListCheckLength(pParse, A, "index"); if( A ) A->a[A->nExpr-1].sortOrder = Z; } -idxitem(A) ::= nm(X). {A = X;} %type collate {Token} collate(C) ::= . {C.z = 0; C.n = 0;} diff --git a/src/tokenize.c b/src/tokenize.c index eaca6b53b..3e5bd9de9 100644 --- a/src/tokenize.c +++ b/src/tokenize.c @@ -15,7 +15,7 @@ ** individual tokens and sends those tokens one-by-one over to the ** parser for analysis. ** -** $Id: tokenize.c,v 1.149 2008/08/07 13:05:36 drh Exp $ +** $Id: tokenize.c,v 1.150 2008/08/08 14:19:41 drh Exp $ */ #include "sqliteInt.h" #include @@ -131,7 +131,7 @@ int sqlite3GetToken(const unsigned char *z, int *tokenType){ case '-': { if( z[1]=='-' ){ for(i=2; (c=z[i])!=0 && c!='\n'; i++){} - *tokenType = TK_COMMENT; + *tokenType = TK_SPACE; return i; } *tokenType = TK_MINUS; @@ -164,7 +164,7 @@ int sqlite3GetToken(const unsigned char *z, int *tokenType){ } for(i=3, c=z[2]; (c!='*' || z[i]!='/') && (c=z[i])!=0; i++){} if( c ) i++; - *tokenType = TK_COMMENT; + *tokenType = TK_SPACE; return i; } case '%': { @@ -420,8 +420,7 @@ int sqlite3RunParser(Parse *pParse, const char *zSql, char **pzErrMsg){ break; } switch( tokenType ){ - case TK_SPACE: - case TK_COMMENT: { + case TK_SPACE: { if( db->u1.isInterrupted ){ pParse->rc = SQLITE_INTERRUPT; sqlite3SetString(pzErrMsg, db, "interrupt"); -- cgit v1.2.3