Revision 10627 trunk/libraries/libFMap/src/com/iver/cit/gvsig/fmap/operations/arcview/ArcJoin.java

View differences:

ArcJoin.java
2 2

  
3 3
import java.util.HashMap;
4 4

  
5
import com.hardcode.gdbms.driver.exceptions.ReadDriverException;
5 6
import com.hardcode.gdbms.engine.customQuery.CustomQuery;
6 7
import com.hardcode.gdbms.engine.customQuery.QueryException;
7 8
import com.hardcode.gdbms.engine.data.DataSource;
8
import com.hardcode.gdbms.engine.data.driver.DriverException;
9 9
import com.hardcode.gdbms.engine.instruction.Adapter;
10 10
import com.hardcode.gdbms.engine.instruction.Expression;
11 11
import com.hardcode.gdbms.engine.instruction.IncompatibleTypesException;
......
25 25
	public OperationDataSource evaluate(DataSource[] tables, Expression[] values) throws QueryException {
26 26
		if (tables.length != 2) throw new QueryException("ArcJoin s?lo opera con 2 tablas");
27 27
		if (values.length != 2) throw new QueryException("Se esperan dos expresiones de campo");
28
		
28

  
29 29
		//Se establece el origen de datos para las expresiones
30 30
		((Adapter) values[0]).getInstructionContext().setDs(tables[0]);
31 31
		((Adapter) values[0]).getInstructionContext().setFromTable(tables[0]);
32
		
32

  
33 33
		String fieldName0 = values[0].getFieldName();
34 34
		if  (fieldName0 == null) throw new QueryException("El valor debe ser una referencia a columna:" + Utilities.getText(((Adapter)values[0]).getEntity()));
35 35
		String fieldName1 = values[1].getFieldName();
......
38 38
		try {
39 39
			tables[0].start();
40 40
			tables[1].start();
41
			
41

  
42 42
			int[] result = new int[(int) tables[0].getRowCount()];
43
			
43

  
44 44
			int index0 = tables[0].getFieldIndexByName(fieldName0);
45 45
			if (index0 == -1)  throw new QueryException("No existe el campo: " + fieldName0);
46 46
			int index1 = tables[1].getFieldIndexByName(fieldName1);
47 47
			if (index1 == -1)  throw new QueryException("No existe el campo: " + fieldName1);
48
			
48

  
49 49
			//Construimos el ?ndice
50 50
			HashMap idx = new HashMap(((int) tables[1].getRowCount())*2);
51 51
			for (int i = 0; i < tables[1].getRowCount(); i++) {
......
53 53
				if (idx.get(v) == null)
54 54
				idx.put(v, new Integer(i));
55 55
			}
56
			
56

  
57 57
/*			Index idx = new DiskIndex(((int) tables[1].getRowCount())*2);
58 58
			idx.start();
59 59
			for (int i = 0; i < tables[1].getRowCount(); i++) {
60 60
				idx.add(tables[1].getFieldValue(i, index1), i);
61 61
			}
62
*/			
62
*/
63 63
			//Hacemos la query
64 64
			for (int i = 0; i < tables[0].getRowCount(); i++) {
65 65
				Value v = tables[0].getFieldValue(i, index0);
......
76 76
					}
77 77
				}
78 78
			}
79
/*			
79
/*
80 80
			for (int i = 0; i < tables[0].getRowCount(); i++) {
81 81
				Value v = tables[0].getFieldValue(i, index0);
82 82
				PositionIterator pi = idx.getPositions(v);
......
92 92
						throw new QueryException("Los tipos de datos son incompatibles: " + tables[0].getFieldType(index0) + " - " + tables[1].getFieldType(index1), e1);
93 93
					}
94 94
				}
95
				if (!any) result[i] = -1; 
95
				if (!any) result[i] = -1;
96 96
			}
97 97
*/
98 98
			tables[0].stop();
99 99
			tables[1].stop();
100 100
//			idx.stop();
101
			
101

  
102 102
			return new ArcJoinDataSource(result, tables[0], tables[1], index1);
103
		} catch (DriverException e) {
103
		} catch (ReadDriverException e) {
104 104
			throw new QueryException("Error accediendo a los datos", e);
105 105
		}
106 106
	}

Also available in: Unified diff