1 /* 2 * Copyright 2015-2018 HuntLabs.cn 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 module hunt.sql.dialect.postgresql.parser.PGExprParser; 17 18 import hunt.sql.ast.SQLDataType; 19 import hunt.sql.ast.SQLExpr; 20 import hunt.sql.ast.expr; 21 import hunt.sql.dialect.postgresql.ast.expr.PGBoxExpr; 22 import hunt.sql.dialect.postgresql.ast.expr.PGCidrExpr; 23 import hunt.sql.dialect.postgresql.ast.expr.PGCircleExpr; 24 import hunt.sql.dialect.postgresql.ast.expr.PGDateField; 25 import hunt.sql.dialect.postgresql.ast.expr.PGExtractExpr; 26 import hunt.sql.dialect.postgresql.ast.expr.PGInetExpr; 27 import hunt.sql.dialect.postgresql.ast.expr.PGLineSegmentsExpr; 28 import hunt.sql.dialect.postgresql.ast.expr.PGMacAddrExpr; 29 import hunt.sql.dialect.postgresql.ast.expr.PGPointExpr; 30 import hunt.sql.dialect.postgresql.ast.expr.PGPolygonExpr; 31 import hunt.sql.dialect.postgresql.ast.expr.PGTypeCastExpr; 32 import hunt.sql.parser.Lexer; 33 import hunt.sql.parser.SQLExprParser; 34 import hunt.sql.parser.SQLParserFeature; 35 import hunt.sql.parser.Token; 36 import hunt.sql.util.FnvHash; 37 import hunt.sql.util.DBType; 38 import hunt.sql.dialect.postgresql.parser.PGSelectParser; 39 import hunt.sql.dialect.postgresql.parser.PGLexer; 40 import std.uni; 41 //import hunt.lang; 42 import hunt.collection; 43 import hunt.String; 44 import hunt.sql.util.Utils; 45 import hunt.text; 46 47 import std.concurrency : initOnce; 48 49 public class PGExprParser : SQLExprParser { 50 51 // public static string[] AGGREGATE_FUNCTIONS; 52 53 // public static long[] AGGREGATE_FUNCTIONS_CODES; 54 55 // static this(){ 56 // string[] strings = [ "AVG", "COUNT", "MAX", "MIN", "STDDEV", "SUM", "ROW_NUMBER" ]; 57 // AGGREGATE_FUNCTIONS_CODES = FnvHash.fnv1a_64_lower(strings, true); 58 // AGGREGATE_FUNCTIONS = new string[AGGREGATE_FUNCTIONS_CODES.length]; 59 // foreach(string str ; strings) { 60 // long hash = FnvHash.fnv1a_64_lower(str); 61 // int index = search(AGGREGATE_FUNCTIONS_CODES, hash); 62 // AGGREGATE_FUNCTIONS[index] = str; 63 // } 64 // } 65 66 private enum string[] strings = [ "AVG", "COUNT", "MAX", "MIN", "STDDEV", "SUM", "ROW_NUMBER" ]; 67 68 static string[] AGGREGATE_FUNCTIONS() { 69 __gshared string[] inst; 70 return initOnce!inst({ 71 long[] codes = AGGREGATE_FUNCTIONS_CODES(); 72 string[] r = new string[codes.length]; 73 74 foreach(string str ; strings) { 75 long hash = FnvHash.fnv1a_64_lower(str); 76 int index = search(codes, hash); 77 r[index] = str; 78 } 79 return r; 80 }()); 81 } 82 83 static long[] AGGREGATE_FUNCTIONS_CODES() { 84 __gshared long[] inst; 85 return initOnce!inst({ 86 return FnvHash.fnv1a_64_lower(strings, true); 87 }()); 88 } 89 90 public this(string sql){ 91 this(new PGLexer(sql)); 92 this.lexer.nextToken(); 93 this.dbType = DBType.POSTGRESQL.name; 94 } 95 96 public this(string sql, SQLParserFeature[] features...){ 97 this(new PGLexer(sql)); 98 this.lexer.nextToken(); 99 this.dbType = DBType.POSTGRESQL.name; 100 } 101 102 public this(Lexer lexer){ 103 super(lexer); 104 this.aggregateFunctions = AGGREGATE_FUNCTIONS; 105 this.aggregateFunctionHashCodes = AGGREGATE_FUNCTIONS_CODES; 106 this.dbType = DBType.POSTGRESQL.name; 107 } 108 109 override 110 public SQLDataType parseDataType() { 111 if (lexer.token() == Token.TYPE) { 112 lexer.nextToken(); 113 } 114 return super.parseDataType(); 115 } 116 117 override public PGSelectParser createSelectParser() { 118 return new PGSelectParser(this); 119 } 120 121 override public SQLExpr primary() { 122 if (lexer.token() == Token.ARRAY) { 123 string ident = lexer.stringVal(); 124 lexer.nextToken(); 125 126 if (lexer.token() == Token.LPAREN) { 127 SQLIdentifierExpr array = new SQLIdentifierExpr(ident); 128 return this.methodRest(array, true); 129 } else { 130 SQLArrayExpr array = new SQLArrayExpr(); 131 array.setExpr(new SQLIdentifierExpr(ident)); 132 accept(Token.LBRACKET); 133 this.exprList(array.getValues(), array); 134 accept(Token.RBRACKET); 135 return primaryRest(array); 136 } 137 138 } else if (lexer.token() == Token.POUND) { 139 lexer.nextToken(); 140 if (lexer.token() == Token.LBRACE) { 141 lexer.nextToken(); 142 string varName = lexer.stringVal(); 143 lexer.nextToken(); 144 accept(Token.RBRACE); 145 SQLVariantRefExpr expr = new SQLVariantRefExpr("#{" ~ varName ~ "}"); 146 return primaryRest(expr); 147 } else { 148 SQLExpr value = this.primary(); 149 SQLUnaryExpr expr = new SQLUnaryExpr(SQLUnaryOperator.Pound, value); 150 return primaryRest(expr); 151 } 152 } else if (lexer.token() == Token.VALUES) { 153 lexer.nextToken(); 154 155 SQLValuesExpr values = new SQLValuesExpr(); 156 for (;;) { 157 accept(Token.LPAREN); 158 SQLListExpr listExpr = new SQLListExpr(); 159 exprList(listExpr.getItems(), listExpr); 160 accept(Token.RPAREN); 161 162 listExpr.setParent(values); 163 164 values.getValues().add(listExpr); 165 166 if (lexer.token() == Token.COMMA) { 167 lexer.nextToken(); 168 continue; 169 } 170 break; 171 } 172 return values; 173 } 174 175 return super.primary(); 176 } 177 178 override 179 protected SQLExpr parseInterval() { 180 accept(Token.INTERVAL); 181 SQLIntervalExpr intervalExpr = new SQLIntervalExpr(); 182 if (lexer.token() != Token.LITERAL_CHARS) { 183 return new SQLIdentifierExpr("INTERVAL"); 184 } 185 intervalExpr.setValue(new SQLCharExpr(lexer.stringVal())); 186 lexer.nextToken(); 187 return intervalExpr; 188 } 189 190 override public SQLExpr primaryRest(SQLExpr expr) { 191 if (lexer.token() == Token.COLONCOLON) { 192 lexer.nextToken(); 193 SQLDataType dataType = this.parseDataType(); 194 195 PGTypeCastExpr castExpr = new PGTypeCastExpr(); 196 197 castExpr.setExpr(expr); 198 castExpr.setDataType(dataType); 199 200 return primaryRest(castExpr); 201 } 202 203 if (lexer.token() == Token.LBRACKET) { 204 SQLArrayExpr array = new SQLArrayExpr(); 205 array.setExpr(expr); 206 lexer.nextToken(); 207 this.exprList(array.getValues(), array); 208 accept(Token.RBRACKET); 209 return primaryRest(array); 210 } 211 212 if (typeid(expr) == typeid(SQLIdentifierExpr)) { 213 string ident = (cast(SQLIdentifierExpr)expr).getName(); 214 215 if (lexer.token() == Token.COMMA || lexer.token() == Token.RPAREN) { 216 return super.primaryRest(expr); 217 } 218 219 if ("TIMESTAMP".equalsIgnoreCase(ident)) { 220 if (lexer.token() != Token.LITERAL_ALIAS // 221 && lexer.token() != Token.LITERAL_CHARS // 222 && lexer.token() != Token.WITH) { 223 return super.primaryRest( 224 new SQLIdentifierExpr(ident)); 225 } 226 227 SQLTimestampExpr timestamp = new SQLTimestampExpr(); 228 229 if (lexer.token() == Token.WITH) { 230 lexer.nextToken(); 231 acceptIdentifier("TIME"); 232 acceptIdentifier("ZONE"); 233 timestamp.setWithTimeZone(true); 234 } 235 236 string literal = lexer.stringVal(); 237 timestamp.setLiteral(literal); 238 accept(Token.LITERAL_CHARS); 239 240 if (lexer.identifierEquals("AT")) { 241 lexer.nextToken(); 242 acceptIdentifier("TIME"); 243 acceptIdentifier("ZONE"); 244 245 string timezone = lexer.stringVal(); 246 timestamp.setTimeZone(timezone); 247 accept(Token.LITERAL_CHARS); 248 } 249 250 251 return primaryRest(timestamp); 252 } else if ("TIMESTAMPTZ".equalsIgnoreCase(ident)) { 253 if (lexer.token() != Token.LITERAL_ALIAS // 254 && lexer.token() != Token.LITERAL_CHARS // 255 && lexer.token() != Token.WITH) { 256 return super.primaryRest( 257 new SQLIdentifierExpr(ident)); 258 } 259 260 SQLTimestampExpr timestamp = new SQLTimestampExpr(); 261 timestamp.setWithTimeZone(true); 262 263 string literal = lexer.stringVal(); 264 timestamp.setLiteral(literal); 265 accept(Token.LITERAL_CHARS); 266 267 if (lexer.identifierEquals("AT")) { 268 lexer.nextToken(); 269 acceptIdentifier("TIME"); 270 acceptIdentifier("ZONE"); 271 272 string timezone = lexer.stringVal(); 273 timestamp.setTimeZone(timezone); 274 accept(Token.LITERAL_CHARS); 275 } 276 277 278 return primaryRest(timestamp); 279 } else if ("EXTRACT".equalsIgnoreCase(ident)) { 280 accept(Token.LPAREN); 281 282 PGExtractExpr extract = new PGExtractExpr(); 283 284 string fieldName = lexer.stringVal(); 285 PGDateField field = PGDateField(toUpper(fieldName)); 286 lexer.nextToken(); 287 288 extract.setField(field); 289 290 accept(Token.FROM); 291 SQLExpr source = this.expr(); 292 293 extract.setSource(source); 294 295 accept(Token.RPAREN); 296 297 return primaryRest(extract); 298 } else if ("POINT".equalsIgnoreCase(ident)) { 299 SQLExpr value = this.primary(); 300 PGPointExpr point = new PGPointExpr(); 301 point.setValue(value); 302 return primaryRest(point); 303 } else if ("BOX".equalsIgnoreCase(ident)) { 304 SQLExpr value = this.primary(); 305 PGBoxExpr box = new PGBoxExpr(); 306 box.setValue(value); 307 return primaryRest(box); 308 } else if ("macaddr".equalsIgnoreCase(ident)) { 309 SQLExpr value = this.primary(); 310 PGMacAddrExpr macaddr = new PGMacAddrExpr(); 311 macaddr.setValue(value); 312 return primaryRest(macaddr); 313 } else if ("inet".equalsIgnoreCase(ident)) { 314 SQLExpr value = this.primary(); 315 PGInetExpr inet = new PGInetExpr(); 316 inet.setValue(value); 317 return primaryRest(inet); 318 } else if ("cidr".equalsIgnoreCase(ident)) { 319 SQLExpr value = this.primary(); 320 PGCidrExpr cidr = new PGCidrExpr(); 321 cidr.setValue(value); 322 return primaryRest(cidr); 323 } else if ("polygon".equalsIgnoreCase(ident)) { 324 SQLExpr value = this.primary(); 325 PGPolygonExpr polygon = new PGPolygonExpr(); 326 polygon.setValue(value); 327 return primaryRest(polygon); 328 } else if ("circle".equalsIgnoreCase(ident)) { 329 SQLExpr value = this.primary(); 330 PGCircleExpr circle = new PGCircleExpr(); 331 circle.setValue(value); 332 return primaryRest(circle); 333 } else if ("lseg".equalsIgnoreCase(ident)) { 334 SQLExpr value = this.primary(); 335 PGLineSegmentsExpr lseg = new PGLineSegmentsExpr(); 336 lseg.setValue(value); 337 return primaryRest(lseg); 338 } else if (equalsIgnoreCase(ident, "b") && lexer.token() == Token.LITERAL_CHARS) { 339 string charValue = lexer.stringVal(); 340 lexer.nextToken(); 341 expr = new SQLBinaryExpr(charValue); 342 343 return primaryRest(expr); 344 } 345 } 346 347 return super.primaryRest(expr); 348 } 349 350 override 351 protected string alias_f() { 352 string _alias = super.alias_f(); 353 if (_alias !is null) { 354 return _alias; 355 } 356 // 某些关键字在alias时,不作为关键字,仍然是作用为别名 357 switch (lexer.token()) { 358 case Token.INTERSECT: 359 // 具体可以参考SQLParser::alias()的方法实现 360 _alias = lexer.stringVal(); 361 lexer.nextToken(); 362 return _alias; 363 // TODO other cases 364 default: 365 break; 366 } 367 return _alias; 368 } 369 370 override protected void filter(SQLAggregateExpr x) { 371 if (lexer.identifierEquals(FnvHash.Constants.FILTER)) { 372 lexer.nextToken(); 373 accept(Token.LPAREN); 374 accept(Token.WHERE); 375 SQLExpr filter = this.expr(); 376 accept(Token.RPAREN); 377 x.setFilter(filter); 378 } 379 } 380 }