svn-gvsig-desktop / trunk / org.gvsig.desktop / org.gvsig.desktop.library / org.gvsig.expressionevaluator / org.gvsig.expressionevaluator.lib / org.gvsig.expressionevaluator.lib.impl / src / main / java / org / gvsig / expressionevaluator / impl / DefaultCompiler.java @ 44361
History | View | Annotate | Download (20.5 KB)
1 |
package org.gvsig.expressionevaluator.impl; |
---|---|
2 |
|
3 |
import java.util.HashMap; |
4 |
import java.util.Map; |
5 |
import org.apache.commons.lang3.StringUtils; |
6 |
import org.gvsig.expressionevaluator.Compiler; |
7 |
import org.gvsig.expressionevaluator.LexicalAnalyzer; |
8 |
import org.gvsig.expressionevaluator.LexicalAnalyzer.Token; |
9 |
import org.gvsig.expressionevaluator.Code; |
10 |
import org.gvsig.expressionevaluator.CodeBuilder; |
11 |
import org.gvsig.expressionevaluator.Codes; |
12 |
import org.gvsig.expressionevaluator.ExpressionSyntaxException; |
13 |
import org.gvsig.expressionevaluator.GrammarSet; |
14 |
import org.gvsig.expressionevaluator.Statement; |
15 |
import org.gvsig.expressionevaluator.Statement.StatementContext; |
16 |
import org.gvsig.expressionevaluator.impl.DefaultCodeBuilder.BaseCodes; |
17 |
import org.gvsig.expressionevaluator.impl.DefaultCodeBuilder.BaseConstant; |
18 |
import org.gvsig.expressionevaluator.impl.function.operator.NegOperator; |
19 |
|
20 |
public class DefaultCompiler implements Compiler { |
21 |
|
22 |
class DefaultStatementContext implements StatementContext { |
23 |
|
24 |
private String codeClassifier; |
25 |
private Map<String,Code> codes; |
26 |
|
27 |
@Override
|
28 |
public Compiler getCompiler() { |
29 |
return DefaultCompiler.this;
|
30 |
} |
31 |
|
32 |
@Override
|
33 |
public LexicalAnalyzer getLexicalAnalyzer() {
|
34 |
return lexer;
|
35 |
} |
36 |
|
37 |
@Override
|
38 |
public void setCode(String id, Code code) { |
39 |
if( this.codes == null ) { |
40 |
this.codes = new HashMap<>(); |
41 |
} |
42 |
if( !StringUtils.isBlank(this.codeClassifier) ) { |
43 |
if( id.contains("#") ) { |
44 |
id = StringUtils.replace(id,"#",this.codeClassifier,1); |
45 |
} |
46 |
} |
47 |
this.codes.put(id, code);
|
48 |
} |
49 |
|
50 |
public Code getCode(String id) { |
51 |
return this.codes.get(id); |
52 |
} |
53 |
|
54 |
@Override
|
55 |
public void setCodeClassifier(String classifier) { |
56 |
this.codeClassifier = classifier;
|
57 |
} |
58 |
|
59 |
@Override
|
60 |
public String getCodeClassifier() { |
61 |
return this.codeClassifier; |
62 |
} |
63 |
|
64 |
@Override
|
65 |
public CodeBuilder getCodeBuilder() {
|
66 |
return codeBuilder;
|
67 |
} |
68 |
|
69 |
@Override
|
70 |
public Token look_token() {
|
71 |
return lexer.look();
|
72 |
} |
73 |
|
74 |
@Override
|
75 |
public Token next_token() {
|
76 |
return lexer.next();
|
77 |
} |
78 |
|
79 |
@Override
|
80 |
public Code parse_expression() {
|
81 |
return DefaultCompiler.this.parse_expression();
|
82 |
} |
83 |
|
84 |
@Override
|
85 |
public Codes parse_expressions(String separator) { |
86 |
return DefaultCompiler.this.parse_expressions(separator);
|
87 |
} |
88 |
|
89 |
} |
90 |
|
91 |
private boolean objectAccessSupported; |
92 |
private LexicalAnalyzer lexer;
|
93 |
private CodeBuilder codeBuilder;
|
94 |
private final GrammarSet grammars; |
95 |
//
|
96 |
// https://www.postgresql.org/docs/9.1/static/functions.html
|
97 |
//
|
98 |
|
99 |
public DefaultCompiler() {
|
100 |
this.grammars = new DefaultGrammarSet(); |
101 |
this.lexer = new SQLLexicalAnalyzer(); |
102 |
this.codeBuilder = new DefaultCodeBuilder(); |
103 |
this.objectAccessSupported = true; |
104 |
} |
105 |
|
106 |
@Override
|
107 |
public Compiler clone() throws CloneNotSupportedException { |
108 |
DefaultCompiler other = (DefaultCompiler) super.clone();
|
109 |
other.lexer = lexer.clone(); |
110 |
other.codeBuilder = codeBuilder.clone(); |
111 |
|
112 |
return other;
|
113 |
} |
114 |
|
115 |
@Override
|
116 |
public void setLexicalAnalyzer(LexicalAnalyzer lexer) { |
117 |
this.lexer = lexer;
|
118 |
} |
119 |
|
120 |
@Override
|
121 |
public LexicalAnalyzer getLexicalAnalyzer() {
|
122 |
return this.lexer; |
123 |
} |
124 |
|
125 |
@Override
|
126 |
public void setCodeBuilder(CodeBuilder codeBuilder) { |
127 |
this.codeBuilder = codeBuilder;
|
128 |
} |
129 |
|
130 |
@Override
|
131 |
public CodeBuilder getCodeBuilder() {
|
132 |
return this.codeBuilder; |
133 |
} |
134 |
|
135 |
@Override
|
136 |
public boolean isObjectAccessSupported() { |
137 |
return this.objectAccessSupported; |
138 |
} |
139 |
|
140 |
@Override
|
141 |
public void setObjectAccessSupported(boolean objectAccessSupported) { |
142 |
this.objectAccessSupported = objectAccessSupported;
|
143 |
} |
144 |
|
145 |
@Override
|
146 |
public GrammarSet getGrammars() {
|
147 |
return this.grammars; |
148 |
} |
149 |
|
150 |
@Override
|
151 |
public Code compileExpression(String expression) { |
152 |
this.lexer.setSource(expression.trim());
|
153 |
Code code = parse_expression(); |
154 |
if( !this.lexer.isEOF() ) { |
155 |
throw new ExpressionSyntaxException(lexer); |
156 |
} |
157 |
return code;
|
158 |
} |
159 |
|
160 |
public Code parse_expression() {
|
161 |
Code code = parse_relational(); |
162 |
return code;
|
163 |
} |
164 |
|
165 |
public Code parse_relational() {
|
166 |
Code op1 = parse_not(); |
167 |
Code op2; |
168 |
while( true ) { |
169 |
Token token = lexer.look(); |
170 |
switch( token.getType() ) {
|
171 |
case Token.OP_OR:
|
172 |
lexer.next(); |
173 |
op2 = parse_not(); |
174 |
if( op2==null ) { |
175 |
throw new ExpressionSyntaxException(I18N.Cant_recognize_the_second_operand_of_OR_operator(),lexer); |
176 |
} |
177 |
op1 = codeBuilder.or(op1, op2); |
178 |
break;
|
179 |
case Token.OP_AND:
|
180 |
lexer.next(); |
181 |
op2 = parse_not(); |
182 |
if( op2==null ) { |
183 |
throw new ExpressionSyntaxException(I18N.Cant_recognize_the_second_operand_of_AND_operator(),lexer); |
184 |
} |
185 |
op1 = codeBuilder.and(op1, op2); |
186 |
break;
|
187 |
default:
|
188 |
return op1;
|
189 |
} |
190 |
} |
191 |
} |
192 |
|
193 |
public Code parse_not() {
|
194 |
Code op1; |
195 |
Token token = lexer.look(); |
196 |
if( token.getType() == Token.OP_NOT ) {
|
197 |
lexer.next(); |
198 |
op1 = parse_conditional(); |
199 |
op1 = codeBuilder.not(op1); |
200 |
} else {
|
201 |
op1 = parse_conditional(); |
202 |
} |
203 |
return op1;
|
204 |
} |
205 |
|
206 |
public Code parse_conditional() {
|
207 |
Code op1 = parse_sum(); |
208 |
Code op2; |
209 |
while( true ) { |
210 |
Token token = lexer.look(); |
211 |
switch( token.getType() ) {
|
212 |
case Token.OP_LT:
|
213 |
lexer.next(); |
214 |
op2 = parse_sum(); |
215 |
if( op2==null ) { |
216 |
throw new ExpressionSyntaxException(I18N.Cant_recognize_the_second_operand_of_LT_operator(),lexer); |
217 |
} |
218 |
op1 = codeBuilder.lt(op1, op2); |
219 |
break;
|
220 |
case Token.OP_GT:
|
221 |
lexer.next(); |
222 |
op2 = parse_sum(); |
223 |
if( op2==null ) { |
224 |
throw new ExpressionSyntaxException(I18N.Cant_recognize_the_second_operand_of_GT_operator(),lexer); |
225 |
} |
226 |
op1 = codeBuilder.gt(op1, op2); |
227 |
break;
|
228 |
case Token.OP_LE:
|
229 |
lexer.next(); |
230 |
op2 = parse_sum(); |
231 |
if( op2==null ) { |
232 |
throw new ExpressionSyntaxException(I18N.Cant_recognize_the_second_operand_of_LE_operator(),lexer); |
233 |
} |
234 |
op1 = codeBuilder.le(op1, op2); |
235 |
break;
|
236 |
case Token.OP_GE:
|
237 |
lexer.next(); |
238 |
op2 = parse_sum(); |
239 |
if( op2==null ) { |
240 |
throw new ExpressionSyntaxException(I18N.Cant_recognize_the_second_operand_of_GE_operator(),lexer); |
241 |
} |
242 |
op1 = codeBuilder.ge(op1, op2); |
243 |
break;
|
244 |
case Token.OP_EQ:
|
245 |
lexer.next(); |
246 |
op2 = parse_sum(); |
247 |
if( op2==null ) { |
248 |
token = lexer.look(); |
249 |
String tip = null; |
250 |
switch(token.getType()) {
|
251 |
case Token.OP_GT:
|
252 |
tip = I18N.The_operator_greater_than_or_equal_is_ge(); |
253 |
break;
|
254 |
case Token.OP_LT:
|
255 |
tip = I18N.The_operator_less_than_or_equal_is_ge(); |
256 |
break;
|
257 |
} |
258 |
throw new ExpressionSyntaxException( |
259 |
I18N.Cant_recognize_the_second_operand_of_EQ_operator(), |
260 |
lexer, |
261 |
tip |
262 |
); |
263 |
} |
264 |
op1 = codeBuilder.eq(op1, op2); |
265 |
break;
|
266 |
case Token.OP_NE:
|
267 |
lexer.next(); |
268 |
op2 = parse_sum(); |
269 |
if( op2==null ) { |
270 |
throw new ExpressionSyntaxException(I18N.Cant_recognize_the_second_operand_of_NEQ_operator(),lexer); |
271 |
} |
272 |
op1 = codeBuilder.ne(op1, op2); |
273 |
break;
|
274 |
case Token.PRED_IS: {
|
275 |
lexer.next(); |
276 |
Token next = lexer.look(); |
277 |
switch(next.getType()) {
|
278 |
case Token.NOTNULL:
|
279 |
lexer.next(); |
280 |
op1 = codeBuilder.is(op1, codeBuilder.constant(null));
|
281 |
op1 = codeBuilder.not(op1); |
282 |
break;
|
283 |
case Token.OP_NOT:
|
284 |
lexer.next(); |
285 |
next = lexer.look(); |
286 |
if( next.getType() == Token.NULL ) {
|
287 |
lexer.next(); |
288 |
op1 = codeBuilder.is(op1, codeBuilder.constant(null));
|
289 |
} else {
|
290 |
op2 = parse_sum(); |
291 |
if( op2==null ) { |
292 |
throw new ExpressionSyntaxException(I18N.Cant_recognize_the_second_operand_of_IS_operator(),lexer); |
293 |
} |
294 |
op1 = codeBuilder.is(op1, op2); |
295 |
} |
296 |
op1 = codeBuilder.not(op1); |
297 |
break;
|
298 |
case Token.NULL:
|
299 |
lexer.next(); |
300 |
op1 = codeBuilder.is(op1, codeBuilder.constant(null));
|
301 |
break;
|
302 |
default:
|
303 |
op2 = parse_sum(); |
304 |
if( op2==null ) { |
305 |
throw new ExpressionSyntaxException(I18N.Cant_recognize_the_second_operand_of_IS_operator(),lexer); |
306 |
} |
307 |
op1 = codeBuilder.is(op1, op2); |
308 |
} |
309 |
} |
310 |
break;
|
311 |
case Token.ISNULL:
|
312 |
lexer.next(); |
313 |
op1 = codeBuilder.is(op1, codeBuilder.constant(null));
|
314 |
break;
|
315 |
case Token.OP_REGEXP:
|
316 |
lexer.next(); |
317 |
op2 = parse_sum(); |
318 |
if( op2==null ) { |
319 |
throw new ExpressionSyntaxException(I18N.Cant_recognize_the_second_operand_of_REGEXP_operator(),lexer); |
320 |
} |
321 |
op1 = codeBuilder.regexp(op1, op2); |
322 |
break;
|
323 |
case Token.PRED_LIKE:
|
324 |
lexer.next(); |
325 |
op2 = parse_sum(); |
326 |
if( op2==null ) { |
327 |
throw new ExpressionSyntaxException(I18N.Cant_recognize_the_second_operand_of_LIKE_operator(),lexer); |
328 |
} |
329 |
op1 = codeBuilder.like(op1, op2); |
330 |
break;
|
331 |
case Token.PRED_ILIKE:
|
332 |
lexer.next(); |
333 |
op2 = parse_sum(); |
334 |
if( op2==null ) { |
335 |
throw new ExpressionSyntaxException(I18N.Cant_recognize_the_second_operand_of_ILIKE_operator(),lexer); |
336 |
} |
337 |
op1 = codeBuilder.ilike(op1, op2); |
338 |
break;
|
339 |
default:
|
340 |
return op1;
|
341 |
} |
342 |
} |
343 |
} |
344 |
|
345 |
public Code parse_sum() {
|
346 |
Code op1 = parse_factor(); |
347 |
Code op2; |
348 |
while( true ) { |
349 |
Token token = lexer.look(); |
350 |
switch( token.getType() ) {
|
351 |
case Token.OP_CONCAT:
|
352 |
lexer.next(); |
353 |
op2 = parse_factor(); |
354 |
op1 = codeBuilder.concat(op1, op2); |
355 |
break;
|
356 |
case Token.OP_ADD:
|
357 |
lexer.next(); |
358 |
op2 = parse_factor(); |
359 |
op1 = codeBuilder.add(op1, op2); |
360 |
break;
|
361 |
case Token.OP_SUBST:
|
362 |
lexer.next(); |
363 |
op2 = parse_factor(); |
364 |
op1 = codeBuilder.subst(op1, op2); |
365 |
break;
|
366 |
default:
|
367 |
return op1;
|
368 |
} |
369 |
} |
370 |
} |
371 |
|
372 |
public Code parse_factor() {
|
373 |
Code op1 = parse_getattr(); |
374 |
Code op2; |
375 |
while( true ) { |
376 |
Token token = lexer.look(); |
377 |
switch( token.getType() ) {
|
378 |
case Token.OP_MULT:
|
379 |
lexer.next(); |
380 |
op2 = parse_getattr(); |
381 |
if( op2==null ) { |
382 |
throw new ExpressionSyntaxException(I18N.Cant_recognize_the_second_operand_of_MULT_operator(),lexer); |
383 |
} |
384 |
op1 = codeBuilder.mult(op1, op2); |
385 |
break;
|
386 |
case Token.OP_DIV:
|
387 |
lexer.next(); |
388 |
op2 = parse_getattr(); |
389 |
if( op2==null ) { |
390 |
throw new ExpressionSyntaxException(I18N.Cant_recognize_the_second_operand_of_DIV_operator(),lexer); |
391 |
} |
392 |
op1 = codeBuilder.div(op1, op2); |
393 |
break;
|
394 |
case Token.OP_MOD:
|
395 |
lexer.next(); |
396 |
op2 = parse_getattr(); |
397 |
if( op2==null ) { |
398 |
throw new ExpressionSyntaxException(I18N.Cant_recognize_the_second_operand_of_MOD_operator(),lexer); |
399 |
} |
400 |
op1 = codeBuilder.mod(op1, op2); |
401 |
break;
|
402 |
case Token.OPEN_BRACKET:
|
403 |
lexer.next(); |
404 |
Code codeIndex = parse_expression(); |
405 |
if( codeIndex == null ) { |
406 |
throw new ExpressionSyntaxException(I18N.unexpected_end_of_source(),lexer); |
407 |
} |
408 |
token = lexer.look(); |
409 |
if( token.getType()!=Token.CLOSED_BRACKET) {
|
410 |
throw new ExpressionSyntaxException(I18N.A_XTokenX_was_expected_and_XliteralX_was_found("]", token.getLiteral()),lexer); |
411 |
} |
412 |
lexer.next(); |
413 |
Code code = codeBuilder.getitem(op1, codeIndex); |
414 |
return code;
|
415 |
default:
|
416 |
return op1;
|
417 |
} |
418 |
} |
419 |
} |
420 |
|
421 |
public Code parse_getattr() {
|
422 |
Code op1 = parse_termino(); |
423 |
if( !isObjectAccessSupported() ) {
|
424 |
return op1;
|
425 |
} |
426 |
while( true ) { |
427 |
Token next = lexer.look(); |
428 |
switch( next.getType() ) {
|
429 |
case Token.OP_GETATTR:
|
430 |
lexer.next(); |
431 |
next = lexer.look(); |
432 |
if( next.getType()!=Token.IDENTIFIER ) {
|
433 |
throw new ExpressionSyntaxException( |
434 |
I18N.An_attribute_identifier_was_expected_and_XliteralX_was_found(next.getLiteral()), |
435 |
lexer |
436 |
); |
437 |
} |
438 |
String id = (String) next.getLiteral(); |
439 |
lexer.next(); |
440 |
next = lexer.look(); |
441 |
if( next.getType() == Token.PARENTHESIS_OPEN ) {
|
442 |
lexer.next(); |
443 |
Codes args = parse_expressions(",");
|
444 |
next = lexer.next(); |
445 |
if( next.getType() != Token.PARENTHESIS_CLOSE ) {
|
446 |
throw new ExpressionSyntaxException( |
447 |
I18N.Closing_parenthesis_was_expected_and_XliteralX_was_found(next.getLiteral()), |
448 |
lexer |
449 |
); |
450 |
} |
451 |
return codeBuilder.method(op1, id, args);
|
452 |
} else {
|
453 |
return codeBuilder.getattr(op1, id);
|
454 |
} |
455 |
default:
|
456 |
return op1;
|
457 |
} |
458 |
} |
459 |
} |
460 |
|
461 |
public Code parse_termino() {
|
462 |
|
463 |
Token token = lexer.look(); |
464 |
switch( token.getType() ) {
|
465 |
case Token.PARENTHESIS_OPEN: {
|
466 |
lexer.next(); |
467 |
Code value = parse_expression(); |
468 |
Token next = lexer.next(); |
469 |
switch(next.getType()) {
|
470 |
case Token.PARENTHESIS_CLOSE:
|
471 |
break;
|
472 |
case Token.EOF:
|
473 |
throw new ExpressionSyntaxException( |
474 |
I18N.Closing_parenthesis_was_expected_and_end_of_source_was_found(), |
475 |
lexer |
476 |
); |
477 |
default:
|
478 |
throw new ExpressionSyntaxException( |
479 |
I18N.Closing_parenthesis_was_expected_and_XliteralX_was_found(next.getLiteral()), |
480 |
lexer |
481 |
); |
482 |
} |
483 |
return value;
|
484 |
} |
485 |
case Token.IDENTIFIER: {
|
486 |
Code code = parse_grammars(); |
487 |
if( code!=null ) { |
488 |
return code;
|
489 |
} |
490 |
if( this.grammars.isReservedWord(token.getLiteral()) ) { |
491 |
return null; |
492 |
} |
493 |
lexer.next(); |
494 |
String id = (String) token.getLiteral(); |
495 |
Token next = lexer.look(); |
496 |
if( next.getType() == Token.PARENTHESIS_OPEN ) {
|
497 |
lexer.next(); |
498 |
Codes args = parse_expressions(",");
|
499 |
next = lexer.next(); |
500 |
switch(next.getType()) {
|
501 |
case Token.PARENTHESIS_CLOSE:
|
502 |
break;
|
503 |
case Token.EOF:
|
504 |
throw new ExpressionSyntaxException( |
505 |
I18N.Closing_parenthesis_was_expected_and_end_of_source_was_found(), |
506 |
lexer |
507 |
); |
508 |
default:
|
509 |
throw new ExpressionSyntaxException( |
510 |
I18N.Closing_parenthesis_was_expected_and_XliteralX_was_found(next.getLiteral()), |
511 |
lexer |
512 |
); |
513 |
} |
514 |
return codeBuilder.function(id, args);
|
515 |
} else {
|
516 |
return codeBuilder.identifier(id);
|
517 |
} |
518 |
} |
519 |
case Token.STRING_LITERAL:
|
520 |
lexer.next(); |
521 |
return codeBuilder.constant(token.getValue());
|
522 |
case Token.INTEGER_LITERAL:
|
523 |
lexer.next(); |
524 |
return codeBuilder.constant(token.getValue());
|
525 |
case Token.FLOATING_POINT_LITERAL:
|
526 |
lexer.next(); |
527 |
return codeBuilder.constant(token.getValue());
|
528 |
case Token.NULL:
|
529 |
lexer.next(); |
530 |
return codeBuilder.constant(null); |
531 |
case Token.TRUE:
|
532 |
lexer.next(); |
533 |
return codeBuilder.constant(true); |
534 |
case Token.FALSE:
|
535 |
lexer.next(); |
536 |
return codeBuilder.constant(false); |
537 |
case Token.OP_SUBST:
|
538 |
lexer.next(); |
539 |
Code code = parse_termino(); |
540 |
if( code.code()==Code.CONSTANT ) {
|
541 |
BaseConstant c = (BaseConstant)code; |
542 |
if( c.value() instanceof Number ) { |
543 |
c.value(NegOperator.negate((Number) c.value()));
|
544 |
return code;
|
545 |
} |
546 |
throw new ExpressionSyntaxException(I18N.A_numeric_constant_was_expected_after_the_unary_operator_minus(),lexer); |
547 |
} |
548 |
return codeBuilder.negate(code);
|
549 |
case Token.EOF:
|
550 |
throw new ExpressionSyntaxException(I18N.unexpected_end_of_source(),lexer); |
551 |
default:
|
552 |
return parse_grammars();
|
553 |
} |
554 |
} |
555 |
|
556 |
public Codes parse_expressions(String sep) { |
557 |
BaseCodes codes = null;
|
558 |
while( true ) { |
559 |
Code code = parse_expression(); |
560 |
if( code!=null ) { |
561 |
if( codes == null ) { |
562 |
codes = (BaseCodes) codeBuilder.args(); |
563 |
} |
564 |
codes.add(code); |
565 |
} |
566 |
Token next = lexer.look(); |
567 |
String literal = next.getLiteral();
|
568 |
if( literal == null ) { |
569 |
return codes;
|
570 |
} |
571 |
literal = literal.trim(); |
572 |
if( sep.equals(literal) ) {
|
573 |
lexer.next(); // Consume el ",".
|
574 |
} else {
|
575 |
return codes;
|
576 |
} |
577 |
} |
578 |
} |
579 |
|
580 |
private Code parse_grammars() {
|
581 |
StatementContext context = new DefaultStatementContext();
|
582 |
Statement stmt = this.grammars.getApplicableStatement(context); |
583 |
if( stmt!=null ) { |
584 |
Code code1 = stmt.parse(context); |
585 |
return code1;
|
586 |
} |
587 |
return null; |
588 |
} |
589 |
} |