svn-gvsig-desktop / trunk / org.gvsig.desktop / org.gvsig.desktop.compat.cdc / org.gvsig.fmap.dal / org.gvsig.fmap.dal.swing / org.gvsig.fmap.dal.swing.impl / src / main / java / org / gvsig / fmap / dal / swing / impl / searchPostProcess / distinctOn / DistinctOn.java @ 47362
History | View | Annotate | Download (14 KB)
1 |
/*
|
---|---|
2 |
* To change this license header, choose License Headers in Project Properties.
|
3 |
* To change this template file, choose Tools | Templates
|
4 |
* and open the template in the editor.
|
5 |
*/
|
6 |
package org.gvsig.fmap.dal.swing.impl.searchPostProcess.distinctOn; |
7 |
|
8 |
import java.io.File; |
9 |
import java.util.Collection; |
10 |
import java.util.HashMap; |
11 |
import java.util.Map; |
12 |
import java.util.Objects; |
13 |
import java.util.UUID; |
14 |
import org.apache.commons.lang3.StringUtils; |
15 |
import org.gvsig.fmap.dal.DALLocator; |
16 |
import org.gvsig.fmap.dal.DataManager; |
17 |
import org.gvsig.fmap.dal.DataStoreParameters; |
18 |
import org.gvsig.fmap.dal.StoresRepository; |
19 |
import org.gvsig.fmap.dal.exception.DataException; |
20 |
import org.gvsig.fmap.dal.exception.ValidateDataParametersException; |
21 |
import org.gvsig.fmap.dal.feature.EditableFeature; |
22 |
import org.gvsig.fmap.dal.feature.EditableFeatureAttributeDescriptor; |
23 |
import org.gvsig.fmap.dal.feature.EditableFeatureType; |
24 |
import org.gvsig.fmap.dal.feature.Feature; |
25 |
import org.gvsig.fmap.dal.feature.FeatureAttributeDescriptor; |
26 |
import org.gvsig.fmap.dal.feature.FeatureQuery; |
27 |
import org.gvsig.fmap.dal.feature.FeatureSet; |
28 |
import org.gvsig.fmap.dal.feature.FeatureStore; |
29 |
import org.gvsig.fmap.dal.feature.FeatureType; |
30 |
import org.gvsig.fmap.dal.store.jdbc.JDBCNewStoreParameters; |
31 |
import org.gvsig.fmap.dal.store.jdbc.JDBCServerExplorer; |
32 |
import org.gvsig.fmap.dal.store.jdbc.JDBCServerExplorerParameters; |
33 |
import org.gvsig.fmap.dal.swing.impl.searchPostProcess.distinctOn.AggregateOperation.AggregateOperationFactory; |
34 |
import org.gvsig.fmap.dal.swing.searchPostProcess.AbstractSearchPostProcess; |
35 |
import org.gvsig.fmap.dal.swing.searchPostProcess.SearchPostProcessFactory; |
36 |
import org.gvsig.timesupport.DataTypes; |
37 |
import org.gvsig.tools.ToolsLocator; |
38 |
import org.gvsig.tools.dataTypes.DataType; |
39 |
import org.gvsig.tools.dispose.DisposeUtils; |
40 |
import org.gvsig.tools.dynobject.DynObject; |
41 |
import org.gvsig.tools.folders.FoldersManager; |
42 |
import org.gvsig.tools.i18n.I18nManager; |
43 |
import org.gvsig.tools.task.SimpleTaskStatus; |
44 |
import org.gvsig.tools.util.HasAFile; |
45 |
import org.slf4j.LoggerFactory; |
46 |
|
47 |
/**
|
48 |
*
|
49 |
* @author jovivas
|
50 |
*/
|
51 |
public class DistinctOn extends AbstractSearchPostProcess { |
52 |
|
53 |
private static final org.slf4j.Logger LOGGER = LoggerFactory.getLogger(DistinctOnFactory.class); |
54 |
|
55 |
private static final String DISTINCT_ON_PRIMARY_KEY = "PK$$"; |
56 |
|
57 |
public DistinctOn(
|
58 |
SearchPostProcessFactory factory, |
59 |
FeatureStore input, |
60 |
FeatureQuery query, |
61 |
DynObject parameters |
62 |
) { |
63 |
super(factory, input, query, parameters);
|
64 |
|
65 |
} |
66 |
|
67 |
@Override
|
68 |
public SearchPostProcessResult execute(FeatureStore inputStore, FeatureQuery query, DynObject parameters, SimpleTaskStatus status) {
|
69 |
String uniqueFieldName;
|
70 |
|
71 |
uniqueFieldName = (String) parameters.getDynValue("field"); |
72 |
|
73 |
I18nManager i18n = ToolsLocator.getI18nManager(); |
74 |
if (status == null) { |
75 |
status = ToolsLocator.getTaskStatusManager().createDefaultSimpleTaskStatus(i18n.getTranslation("_Executing_post_process"));
|
76 |
status.setAutoremove(true);
|
77 |
status.add(); |
78 |
} else {
|
79 |
status.push(); |
80 |
status.setTitle(i18n.getTranslation("_Executing_post_process"));
|
81 |
} |
82 |
status.message(i18n.getTranslation("_Creating_data_base"));
|
83 |
|
84 |
try {
|
85 |
// crear ruta de archivo temporal
|
86 |
FoldersManager foldersManager = ToolsLocator.getFoldersManager(); |
87 |
File tempFile = foldersManager.getUniqueTemporaryFile("distinctOn_process_" + UUID.randomUUID().toString()); |
88 |
|
89 |
// crear SERVER STORE
|
90 |
DataManager dataManager = DALLocator.getDataManager(); |
91 |
JDBCServerExplorerParameters serverParameters = (JDBCServerExplorerParameters) dataManager.createServerExplorerParameters("H2Spatial");
|
92 |
((HasAFile) serverParameters).setFile(tempFile); |
93 |
JDBCServerExplorer serverExplorer = (JDBCServerExplorer) dataManager.openServerExplorer("H2Spatial", serverParameters);
|
94 |
|
95 |
//Crear tablas en server store
|
96 |
JDBCNewStoreParameters parametersResults = serverExplorer.getAddParameters(); |
97 |
parametersResults.setDynValue("Table", "results"); |
98 |
FeatureType ft_inputStore = inputStore.getDefaultFeatureType(); |
99 |
EditableFeatureType ft_outputStoreNoContainer = dataManager.createFeatureType(); |
100 |
ft_outputStoreNoContainer.add(DISTINCT_ON_PRIMARY_KEY, DataTypes.INTEGER).setIsPrimaryKey(true).setIsAutomatic(false).setLabel("pk"); |
101 |
|
102 |
ft_outputStoreNoContainer.addAll(ft_inputStore); |
103 |
for (FeatureAttributeDescriptor attr : ft_inputStore) {
|
104 |
DataType dataType = attr.getDataType(); |
105 |
EditableFeatureAttributeDescriptor eattr = ft_outputStoreNoContainer.getEditableAttributeDescriptor(attr.getName()); |
106 |
if (dataType.isContainer()) {
|
107 |
ft_outputStoreNoContainer.remove(attr.getName()); |
108 |
} |
109 |
} |
110 |
Map<String, EditableFeatureAttributeDescriptor> allExtraColumns = new HashMap<>(); |
111 |
for (EditableFeatureAttributeDescriptor column : ft_inputStore.getExtraColumns().getColumns()) {
|
112 |
allExtraColumns.put(column.getName(), column); |
113 |
} |
114 |
for (EditableFeatureAttributeDescriptor column : query.getExtraColumn().getColumns()) {
|
115 |
allExtraColumns.put(column.getName(), column); |
116 |
} |
117 |
if (allExtraColumns.size() > 0) { |
118 |
for (EditableFeatureAttributeDescriptor column : allExtraColumns.values()) {
|
119 |
DataType dataType = column.getDataType(); |
120 |
if (!dataType.isContainer()) {
|
121 |
EditableFeatureAttributeDescriptor eattr = ft_outputStoreNoContainer.add(column.getName(), column.getType(), column.getSize()); |
122 |
//Copiar column a attr y quitar la f?rmula
|
123 |
eattr.copyFrom(column); |
124 |
} |
125 |
} |
126 |
} |
127 |
|
128 |
// Creaci?n del Map con las funciones de agregado
|
129 |
Map<String, AggregateOperation> aggregates = new HashMap<>(); |
130 |
for (FeatureAttributeDescriptor attr : ft_outputStoreNoContainer) {
|
131 |
if(StringUtils.equalsIgnoreCase(attr.getName(), DISTINCT_ON_PRIMARY_KEY)){
|
132 |
continue;
|
133 |
} |
134 |
if (parameters.hasDynValue(attr.getName())) {
|
135 |
String operationName = (String) parameters.getDynValue(attr.getName()); |
136 |
AggregateOperation operation = (AggregateOperation) aggregatesOperationFactories.get(operationName).create(); |
137 |
operation.fixAttributeDescriptor((EditableFeatureAttributeDescriptor) attr); |
138 |
aggregates.put( |
139 |
attr.getName(), |
140 |
operation |
141 |
); |
142 |
} |
143 |
} |
144 |
|
145 |
parametersResults.setDefaultFeatureType(ft_outputStoreNoContainer); |
146 |
|
147 |
serverExplorer.add("H2Spatial", parametersResults, true); |
148 |
|
149 |
DataStoreParameters storeParametersResults = dataManager.createStoreParameters("H2Spatial");
|
150 |
storeParametersResults.setDynValue("database_file", tempFile);
|
151 |
storeParametersResults.setDynValue("Table", "results"); |
152 |
|
153 |
//Creaci?n del store con los resultados
|
154 |
FeatureStore storeResults = (FeatureStore) dataManager.openStore("H2Spatial", storeParametersResults);
|
155 |
|
156 |
|
157 |
// distictOn
|
158 |
storeResults.edit(FeatureStore.MODE_APPEND); |
159 |
|
160 |
FeatureSet features; |
161 |
|
162 |
status.message(i18n.getTranslation("_Getting_features_from_query"));
|
163 |
|
164 |
if (query != null) { |
165 |
features = inputStore.getFeatureSet(query); |
166 |
} else {
|
167 |
features = inputStore.getFeatureSet(); |
168 |
} |
169 |
EditableFeature aggregateFeature = null;
|
170 |
|
171 |
status.message(i18n.getTranslation("_Processing_features"));
|
172 |
status.setRangeOfValues(0, features.size());
|
173 |
|
174 |
long counter = 1; |
175 |
for (Feature feature : features) {
|
176 |
if (status.isCancellationRequested()) {
|
177 |
DisposeUtils.disposeQuietly(features); |
178 |
if (storeResults.isEditing()) {
|
179 |
storeResults.cancelEditingQuietly(); |
180 |
} |
181 |
|
182 |
status.setTitle(i18n.getTranslation("_Post_process_canceled"));
|
183 |
status.message("");
|
184 |
status.cancel(); |
185 |
return null; |
186 |
} |
187 |
if (aggregateFeature == null) { |
188 |
|
189 |
aggregateFeature = storeResults.createNewFeature(feature); |
190 |
} |
191 |
|
192 |
if (!Objects.equals(feature.get(uniqueFieldName), aggregateFeature.get(uniqueFieldName))) {
|
193 |
//Recoge lo agregado y lo inserta
|
194 |
for (Map.Entry<String, AggregateOperation> entry : aggregates.entrySet()) { |
195 |
String name = entry.getKey();
|
196 |
AggregateOperation operation = entry.getValue(); |
197 |
aggregateFeature.set(name, operation.getValue()); |
198 |
} |
199 |
aggregateFeature.set(DISTINCT_ON_PRIMARY_KEY, counter++); |
200 |
storeResults.insert(aggregateFeature); |
201 |
//Resetear las operaciones de agregado y la feature agregada
|
202 |
for (Map.Entry<String, AggregateOperation> entry : aggregates.entrySet()) { |
203 |
AggregateOperation operation = entry.getValue(); |
204 |
operation.reset(); |
205 |
} |
206 |
aggregateFeature = storeResults.createNewFeature(feature); |
207 |
} |
208 |
//Evalua las funciones de agregado de la feature corriente
|
209 |
for (Map.Entry<String, AggregateOperation> entry : aggregates.entrySet()) { |
210 |
String name = entry.getKey();
|
211 |
AggregateOperation operation = entry.getValue(); |
212 |
Object value = feature.get(name);
|
213 |
if (operation.isApplicable(value)) {
|
214 |
operation.perform(value); |
215 |
} |
216 |
} |
217 |
|
218 |
//
|
219 |
//
|
220 |
//
|
221 |
//// if (aggregateFeature != null) {
|
222 |
// if (Objects.equals(feature.get(uniqueFieldName), aggregateFeature.get(uniqueFieldName))) {
|
223 |
// for (Map.Entry<String, AggregateOperation> entry : aggregates.entrySet()) {
|
224 |
// String name = entry.getKey();
|
225 |
// AggregateOperation operation = entry.getValue();
|
226 |
// Object value = feature.get(name);
|
227 |
// if (operation.isApplicable(value)) {
|
228 |
// operation.perform(value);
|
229 |
// }
|
230 |
// }
|
231 |
// } else {
|
232 |
// for (Map.Entry<String, AggregateOperation> entry : aggregates.entrySet()) {
|
233 |
// String name = entry.getKey();
|
234 |
// AggregateOperation operation = entry.getValue();
|
235 |
// aggregateFeature.set(name, operation.getValue());
|
236 |
// }
|
237 |
// aggregateFeature.set(DISTINCT_ON_PRIMARY_KEY, counter++);
|
238 |
// storeResults.insert(aggregateFeature);
|
239 |
// aggregateFeature = null;
|
240 |
// }
|
241 |
//// }
|
242 |
//
|
243 |
//// if (aggregateFeature == null) {
|
244 |
//// aggregateFeature = storeResults.createNewFeature(feature);
|
245 |
//// for (Map.Entry<String, AggregateOperation> entry : aggregates.entrySet()) {
|
246 |
//// String name = entry.getKey();
|
247 |
//// AggregateOperation operation = entry.getValue();
|
248 |
//// operation.reset();
|
249 |
//// Object value = feature.get(name);
|
250 |
//// if (operation.isApplicable(value)) {
|
251 |
//// operation.perform(value);
|
252 |
//// }
|
253 |
//// aggregateFeature.set(name, operation.getValue());
|
254 |
//// }
|
255 |
//// }
|
256 |
status.incrementCurrentValue(); |
257 |
} |
258 |
if (aggregateFeature != null) { |
259 |
//Recoge lo agregado y lo inserta
|
260 |
for (Map.Entry<String, AggregateOperation> entry : aggregates.entrySet()) { |
261 |
String name = entry.getKey();
|
262 |
AggregateOperation operation = entry.getValue(); |
263 |
aggregateFeature.set(name, operation.getValue()); |
264 |
} |
265 |
aggregateFeature.set(DISTINCT_ON_PRIMARY_KEY, counter++); |
266 |
storeResults.insert(aggregateFeature); |
267 |
} |
268 |
|
269 |
storeResults.finishEditing(); |
270 |
storeResults.setTemporary(true);
|
271 |
Boolean addTableToProject = (Boolean) parameters.getDynValue("addTableToProject"); |
272 |
if(addTableToProject){
|
273 |
StoresRepository repository = dataManager.getStoresRepository().getSubrepository("PROJECT_TABLES");
|
274 |
repository.add(this.getName(), storeResults);
|
275 |
} |
276 |
SearchPostProcessResult searchPostProcessResult = new DefaultSearchPostProcessResult(storeResults, null); |
277 |
status.setTitle(i18n.getTranslation("_Post_process_ended"));
|
278 |
status.message("");
|
279 |
return searchPostProcessResult;
|
280 |
|
281 |
} catch (DataException | ValidateDataParametersException ex) {
|
282 |
LOGGER.warn("Can't execute distinct on search post process.", ex);
|
283 |
} finally {
|
284 |
status.pop(); |
285 |
} |
286 |
|
287 |
return null; |
288 |
} |
289 |
|
290 |
private static Map<String, AggregateOperationFactory> aggregatesOperationFactories = new HashMap<>(); |
291 |
|
292 |
public static void registerAggregateOperation(AggregateOperationFactory aggregateOperationFactory) { |
293 |
aggregatesOperationFactories.put(aggregateOperationFactory.getName(), aggregateOperationFactory); |
294 |
} |
295 |
|
296 |
public static Collection<AggregateOperationFactory> getAggregatesOperationFactories() { |
297 |
return aggregatesOperationFactories.values();
|
298 |
} |
299 |
|
300 |
public static AggregateOperationFactory getAggregatesOperationFactory(String name) { |
301 |
return aggregatesOperationFactories.get(name);
|
302 |
} |
303 |
|
304 |
} |