svn-gvsig-desktop / trunk / org.gvsig.desktop / org.gvsig.desktop.compat.cdc / org.gvsig.fmap.dal / org.gvsig.fmap.dal.file / org.gvsig.fmap.dal.file.dbf / src / main / java / org / gvsig / fmap / dal / store / dbf / utils / DbaseFileHeader.java @ 44844
History | View | Annotate | Download (29.3 KB)
1 |
/**
|
---|---|
2 |
* gvSIG. Desktop Geographic Information System.
|
3 |
*
|
4 |
* Copyright (C) 2007-2013 gvSIG Association.
|
5 |
*
|
6 |
* This program is free software; you can redistribute it and/or
|
7 |
* modify it under the terms of the GNU General Public License
|
8 |
* as published by the Free Software Foundation; either version 3
|
9 |
* of the License, or (at your option) any later version.
|
10 |
*
|
11 |
* This program is distributed in the hope that it will be useful,
|
12 |
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
13 |
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
14 |
* GNU General Public License for more details.
|
15 |
*
|
16 |
* You should have received a copy of the GNU General Public License
|
17 |
* along with this program; if not, write to the Free Software
|
18 |
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
|
19 |
* MA 02110-1301, USA.
|
20 |
*
|
21 |
* For any additional information, do not hesitate to contact us
|
22 |
* at info AT gvsig.com, or visit our website www.gvsig.com.
|
23 |
*/
|
24 |
package org.gvsig.fmap.dal.store.dbf.utils; |
25 |
|
26 |
import java.io.IOException; |
27 |
import java.io.UnsupportedEncodingException; |
28 |
import java.nio.Buffer; |
29 |
import java.nio.ByteBuffer; |
30 |
import java.nio.ByteOrder; |
31 |
import java.nio.channels.FileChannel; |
32 |
import java.nio.charset.Charset; |
33 |
import java.util.ArrayList; |
34 |
import java.util.Calendar; |
35 |
import java.util.Date; |
36 |
import java.util.Iterator; |
37 |
import java.util.List; |
38 |
import java.util.Set; |
39 |
import java.util.SortedMap; |
40 |
|
41 |
import org.gvsig.fmap.dal.DataTypes; |
42 |
import org.gvsig.fmap.dal.exception.DataException; |
43 |
import org.gvsig.fmap.dal.exception.UnsupportedVersionException; |
44 |
import org.gvsig.fmap.dal.feature.EditableFeatureAttributeDescriptor; |
45 |
import org.gvsig.fmap.dal.feature.EditableFeatureType; |
46 |
import org.gvsig.fmap.dal.feature.FeatureAttributeDescriptor; |
47 |
import org.gvsig.fmap.dal.feature.FeatureType; |
48 |
import org.gvsig.fmap.dal.feature.exception.AttributeFeatureTypeNotSuportedException; |
49 |
import org.gvsig.fmap.dal.feature.exception.UnknownDataTypeException; |
50 |
import org.gvsig.fmap.dal.store.dbf.DBFStoreProvider; |
51 |
import org.gvsig.fmap.dal.store.dbf.DuplicatedFieldNameException; |
52 |
import static org.gvsig.fmap.dal.store.dbf.utils.DbaseFieldDescriptor.DBFTYPE_BOOLEAN; |
53 |
import static org.gvsig.fmap.dal.store.dbf.utils.DbaseFieldDescriptor.DBFTYPE_DATE; |
54 |
import static org.gvsig.fmap.dal.store.dbf.utils.DbaseFieldDescriptor.DBFTYPE_FLOAT; |
55 |
import static org.gvsig.fmap.dal.store.dbf.utils.DbaseFieldDescriptor.DBFTYPE_NUMBER; |
56 |
import static org.gvsig.fmap.dal.store.dbf.utils.DbaseFieldDescriptor.DBFTYPE_STRING; |
57 |
import org.gvsig.tools.ToolsLocator; |
58 |
import org.gvsig.tools.dataTypes.DataType; |
59 |
import org.gvsig.utils.bigfile.BigByteBuffer2; |
60 |
import org.slf4j.Logger; |
61 |
import org.slf4j.LoggerFactory; |
62 |
import static org.gvsig.fmap.dal.store.dbf.utils.FieldFormatter.TIMESTAMP_SIZE; |
63 |
import static org.gvsig.fmap.dal.store.dbf.utils.FieldFormatter.TIME_SIZE; |
64 |
|
65 |
/**
|
66 |
* Class to represent the header of a Dbase III file.
|
67 |
*/
|
68 |
@SuppressWarnings("UseSpecificCatch") |
69 |
public class DbaseFileHeader { |
70 |
|
71 |
private static final Logger LOGGER = LoggerFactory.getLogger(DbaseFileHeader.class); |
72 |
|
73 |
// Constant for the size of a record
|
74 |
private final int FILE_DESCRIPTOR_SIZE = 32; |
75 |
|
76 |
// type of the file, must be 03h
|
77 |
private static final byte MAGIC = 0x03; |
78 |
|
79 |
private static final int MINIMUM_HEADER = 33; |
80 |
|
81 |
// type of the file, must be 03h
|
82 |
private int myFileType = 0x03; |
83 |
|
84 |
// Date the file was last updated.
|
85 |
private Date myUpdateDate = new Date(); |
86 |
|
87 |
// Number of records in the datafile
|
88 |
private int myNumRecords = 0; |
89 |
|
90 |
// Length of the header structure
|
91 |
private int myHeaderLength; |
92 |
|
93 |
/**
|
94 |
* Length of the records. Set to 1 as the default value as if there is not any
|
95 |
* defined column, at least the deleted status initial byte is taken into
|
96 |
* account.
|
97 |
*/
|
98 |
private int myRecordLength = 1; |
99 |
|
100 |
// Number of fields in the record.
|
101 |
private int myNumFields; |
102 |
|
103 |
// collection of header records.
|
104 |
private DbaseFieldDescriptor[] myFieldDescriptions; |
105 |
|
106 |
private int myLanguageID = 0x00; |
107 |
/**
|
108 |
* Only considered when myLanguageID = 0x00;
|
109 |
*/
|
110 |
private String charset = null; |
111 |
|
112 |
private List<String> encodingSupportedByString = null; |
113 |
|
114 |
private int origLanguageID = 0x00; |
115 |
|
116 |
/**
|
117 |
* Headers must always be encoded using ASCII/ISO-8859-1, regardless the
|
118 |
* encoding of the records
|
119 |
*/
|
120 |
private static final Charset HEADER_CHARSET = Charset.forName("ISO-8859-1"); |
121 |
|
122 |
/**
|
123 |
* DbaseFileHreader constructor comment.
|
124 |
*/
|
125 |
public DbaseFileHeader() {
|
126 |
super();
|
127 |
|
128 |
encodingSupportedByString = new ArrayList<>(); |
129 |
SortedMap<String, Charset> m = Charset.availableCharsets(); |
130 |
Set<String> k = m.keySet(); |
131 |
Iterator<String> it = k.iterator(); |
132 |
while (it.hasNext()) {
|
133 |
encodingSupportedByString.add(it.next()); |
134 |
} |
135 |
} |
136 |
|
137 |
/**
|
138 |
* Add a column to this DbaseFileHeader.The type is one of (C N L or D)
|
139 |
character, number, logical(true/false), or date. The Field length is the
|
140 |
total length in bytes reserved for this column. The decimal count only
|
141 |
applies to numbers(N), and floating point values (F), and refers to the
|
142 |
number of characters to reserve after the decimal point.
|
143 |
*
|
144 |
* @param fieldName DOCUMENT ME!
|
145 |
* @param fieldType DOCUMENT ME!
|
146 |
* @param fieldSize DOCUMENT ME!
|
147 |
* @param fieldPrecision
|
148 |
* @param fieldScale DOCUMENT ME!
|
149 |
* @throws
|
150 |
* org.gvsig.fmap.dal.feature.exception.AttributeFeatureTypeNotSuportedException
|
151 |
*
|
152 |
*/
|
153 |
public void addColumn(String fieldName, char fieldType, |
154 |
int fieldSize, int fieldPrecision, int fieldScale) |
155 |
throws AttributeFeatureTypeNotSuportedException {
|
156 |
// if (fieldSize <= 0) {
|
157 |
// fieldSize = 1;
|
158 |
// }
|
159 |
|
160 |
if (myFieldDescriptions == null) { |
161 |
myFieldDescriptions = new DbaseFieldDescriptor[0]; |
162 |
} |
163 |
|
164 |
int tempLength = 1; // the length is used for the offset, and there is a * for deleted as the first byte |
165 |
DbaseFieldDescriptor[] tempFieldDescriptors = new DbaseFieldDescriptor[myFieldDescriptions.length |
166 |
+ 1];
|
167 |
|
168 |
for (int i = 0; i < myFieldDescriptions.length; i++) { |
169 |
myFieldDescriptions[i].setOffsetInRecord(tempLength); |
170 |
tempLength = tempLength + myFieldDescriptions[i].getSize(); |
171 |
tempFieldDescriptors[i] = myFieldDescriptions[i]; |
172 |
} |
173 |
|
174 |
tempFieldDescriptors[myFieldDescriptions.length] = new DbaseFieldDescriptor();
|
175 |
tempFieldDescriptors[myFieldDescriptions.length].setType(fieldType); |
176 |
tempFieldDescriptors[myFieldDescriptions.length].setSize(fieldSize); |
177 |
tempFieldDescriptors[myFieldDescriptions.length].setScale(fieldScale); |
178 |
tempFieldDescriptors[myFieldDescriptions.length].setOffsetInRecord(tempLength); |
179 |
|
180 |
// set the field name
|
181 |
String tempFieldName = fieldName;
|
182 |
|
183 |
if (tempFieldName == null) { |
184 |
tempFieldName = "NoName";
|
185 |
} |
186 |
|
187 |
if (tempFieldName.length() > DbaseFile.MAX_FIELD_NAME_LENGTH) {
|
188 |
tempFieldName = tempFieldName.substring(0, DbaseFile.MAX_FIELD_NAME_LENGTH);
|
189 |
warn("FieldName " + fieldName
|
190 |
+ " is longer than " + DbaseFile.MAX_FIELD_NAME_LENGTH + " characters, truncating to " |
191 |
+ tempFieldName); |
192 |
} |
193 |
|
194 |
tempFieldDescriptors[myFieldDescriptions.length].setName(tempFieldName); |
195 |
tempFieldDescriptors[myFieldDescriptions.length].setName_trim(tempFieldName.trim()); |
196 |
|
197 |
// the length of a record
|
198 |
tempLength += tempFieldDescriptors[myFieldDescriptions.length].getSize(); |
199 |
|
200 |
// set the new fields.
|
201 |
myFieldDescriptions = tempFieldDescriptors; |
202 |
myHeaderLength = 33 + (32 * myFieldDescriptions.length); |
203 |
myNumFields = myFieldDescriptions.length; |
204 |
myRecordLength = tempLength; |
205 |
} |
206 |
|
207 |
/**
|
208 |
* Remove a column from this DbaseFileHeader.
|
209 |
*
|
210 |
* @param inFieldName
|
211 |
*
|
212 |
* @return index of the removed column, -1 if no found
|
213 |
*/
|
214 |
public int removeColumn(String inFieldName) { |
215 |
int retCol = -1; |
216 |
int tempLength = 1; |
217 |
DbaseFieldDescriptor[] tempFieldDescriptors = new DbaseFieldDescriptor[myFieldDescriptions.length |
218 |
- 1];
|
219 |
|
220 |
for (int i = 0, j = 0; i < myFieldDescriptions.length; i++) { |
221 |
if (!inFieldName.equalsIgnoreCase(
|
222 |
myFieldDescriptions[i].getName().trim())) { |
223 |
// if this is the last field and we still haven't found the
|
224 |
// named field
|
225 |
if ((i == j) && (i == (myFieldDescriptions.length - 1))) { |
226 |
// System.err.println("Could not find a field named '" +
|
227 |
// inFieldName + "' for removal");
|
228 |
|
229 |
return retCol;
|
230 |
} |
231 |
|
232 |
tempFieldDescriptors[j] = myFieldDescriptions[i]; |
233 |
tempFieldDescriptors[j].setOffsetInRecord(tempLength); |
234 |
tempLength += tempFieldDescriptors[j].getSize(); |
235 |
|
236 |
// only increment j on non-matching fields
|
237 |
j++; |
238 |
} else {
|
239 |
retCol = i; |
240 |
} |
241 |
} |
242 |
|
243 |
// set the new fields.
|
244 |
myFieldDescriptions = tempFieldDescriptors; |
245 |
myHeaderLength = 33 + (32 * myFieldDescriptions.length); |
246 |
myNumFields = myFieldDescriptions.length; |
247 |
myRecordLength = tempLength; |
248 |
|
249 |
return retCol;
|
250 |
} |
251 |
|
252 |
private void warn(String inWarn) { |
253 |
LOGGER.warn(inWarn); |
254 |
} |
255 |
|
256 |
/**
|
257 |
* Return the Field Descriptor for the given field.
|
258 |
*
|
259 |
* @param index, the index of the requeted field description
|
260 |
* @return the dbase field descriptor.
|
261 |
*/
|
262 |
public DbaseFieldDescriptor getFieldDescription(int index) { |
263 |
return myFieldDescriptions[index];
|
264 |
} |
265 |
|
266 |
public DbaseFieldDescriptor getFieldDescription(String name) { |
267 |
int index = this.getFieldIndex(name); |
268 |
return myFieldDescriptions[index];
|
269 |
} |
270 |
|
271 |
public int getFieldIndex(String name) { |
272 |
for (int i = 0; i < myFieldDescriptions.length; i++) { |
273 |
if (myFieldDescriptions[i].getName_trim()
|
274 |
.equalsIgnoreCase(name)) { |
275 |
return i;
|
276 |
} |
277 |
} |
278 |
return -1; |
279 |
} |
280 |
|
281 |
/**
|
282 |
* Return the date this file was last updated.
|
283 |
*
|
284 |
* @return
|
285 |
*/
|
286 |
public Date getLastUpdateDate() { |
287 |
return myUpdateDate;
|
288 |
} |
289 |
|
290 |
/**
|
291 |
* Return the number of fields in the records.
|
292 |
*
|
293 |
* @return
|
294 |
*/
|
295 |
public int getNumFields() { |
296 |
return myNumFields;
|
297 |
} |
298 |
|
299 |
/**
|
300 |
* Return the number of records in the file
|
301 |
*
|
302 |
* @return
|
303 |
*/
|
304 |
public int getNumRecords() { |
305 |
return myNumRecords;
|
306 |
} |
307 |
|
308 |
/**
|
309 |
* Return the length of the records in bytes.
|
310 |
*
|
311 |
* @return
|
312 |
*/
|
313 |
public int getRecordLength() { |
314 |
return myRecordLength;
|
315 |
} |
316 |
|
317 |
/**
|
318 |
* Return the length of the header
|
319 |
*
|
320 |
* @return
|
321 |
*/
|
322 |
public int getHeaderLength() { |
323 |
return myHeaderLength;
|
324 |
} |
325 |
|
326 |
/**
|
327 |
* Read the header data from the DBF file.
|
328 |
*
|
329 |
* @param in
|
330 |
* @param charsName
|
331 |
* @param allowDuplicatedFieldNames
|
332 |
* @throws UnsupportedVersionException
|
333 |
* @throws AttributeFeatureTypeNotSuportedException
|
334 |
*
|
335 |
*/
|
336 |
public void read(BigByteBuffer2 in, String charsName, boolean allowDuplicatedFieldNames) |
337 |
throws UnsupportedVersionException,
|
338 |
AttributeFeatureTypeNotSuportedException { |
339 |
// type of file.
|
340 |
myFileType = in.get(); |
341 |
|
342 |
if (myFileType != 0x03) { |
343 |
throw new UnsupportedVersionException("DBF", Integer.toHexString(myFileType)); |
344 |
} |
345 |
|
346 |
// parse the update date information.
|
347 |
int tempUpdateYear = in.get();
|
348 |
int tempUpdateMonth = in.get();
|
349 |
int tempUpdateDay = in.get();
|
350 |
tempUpdateYear = tempUpdateYear + 1900;
|
351 |
|
352 |
Calendar c = Calendar.getInstance(); |
353 |
c.set(Calendar.YEAR, tempUpdateYear);
|
354 |
c.set(Calendar.MONTH, tempUpdateMonth - 1); |
355 |
c.set(Calendar.DATE, tempUpdateDay);
|
356 |
myUpdateDate = c.getTime(); |
357 |
|
358 |
// read the number of records.
|
359 |
in.order(ByteOrder.LITTLE_ENDIAN);
|
360 |
myNumRecords = in.getInt(); |
361 |
|
362 |
// read the length of the header structure.
|
363 |
myHeaderLength = in.getShort(); |
364 |
|
365 |
// read the length of a record
|
366 |
myRecordLength = in.getShort(); //posicon 0h
|
367 |
|
368 |
in.order(ByteOrder.BIG_ENDIAN);
|
369 |
|
370 |
// read the language bit (LDID)
|
371 |
in.position(29);
|
372 |
origLanguageID = byteAsUnsigned(in.get()); |
373 |
if (charsName != null) { |
374 |
// ignore the language bit, use the provided charset name
|
375 |
myLanguageID = DbaseCodepage.getLdid(charsName); |
376 |
this.charset = charsName;
|
377 |
} else {
|
378 |
// use the read the language bit
|
379 |
myLanguageID = origLanguageID; |
380 |
charsName = getCharsetName(); |
381 |
} |
382 |
|
383 |
// Posicionamos para empezar a leer los campos.
|
384 |
in.position(32);
|
385 |
|
386 |
// calculate the number of Fields in the header
|
387 |
myNumFields = (myHeaderLength - FILE_DESCRIPTOR_SIZE - 1) / FILE_DESCRIPTOR_SIZE;
|
388 |
|
389 |
// read all of the header records
|
390 |
myFieldDescriptions = new DbaseFieldDescriptor[myNumFields];
|
391 |
int fieldOffset = 0; |
392 |
|
393 |
List fieldNames = new ArrayList<>(); |
394 |
|
395 |
// FIXME: should field names be always read using ISO8859-1??
|
396 |
for (int i = 0; i < myNumFields; i++) { |
397 |
myFieldDescriptions[i] = new DbaseFieldDescriptor();
|
398 |
|
399 |
// read the field name
|
400 |
byte[] buffer = new byte[11]; |
401 |
in.get(buffer); |
402 |
String fieldName;
|
403 |
fieldName = new String(buffer, HEADER_CHARSET); |
404 |
|
405 |
if (allowDuplicatedFieldNames) {
|
406 |
fieldName = getUniqueFieldName(fieldName, fieldNames); |
407 |
} |
408 |
fieldNames.add(fieldName); |
409 |
|
410 |
myFieldDescriptions[i].setName(fieldName); |
411 |
|
412 |
myFieldDescriptions[i].setName_trim(myFieldDescriptions[i].getName().trim()); |
413 |
|
414 |
// read the field type
|
415 |
myFieldDescriptions[i].setType((char) in.get());
|
416 |
|
417 |
// read the field data address, offset from the start of the record.
|
418 |
myFieldDescriptions[i].setOffsetInRecord(in.getInt()); |
419 |
|
420 |
// read the field length in bytes
|
421 |
int tempLength = in.get();
|
422 |
|
423 |
if (tempLength < 0) { |
424 |
tempLength = tempLength + 256;
|
425 |
} |
426 |
|
427 |
myFieldDescriptions[i].setSize(tempLength); |
428 |
|
429 |
// read the field decimal count in bytes
|
430 |
myFieldDescriptions[i].setScale(in.get()); |
431 |
|
432 |
myFieldDescriptions[i].setOffsetInRecord(fieldOffset); |
433 |
|
434 |
fieldOffset += tempLength; |
435 |
|
436 |
// read the reserved bytes.
|
437 |
in.position(in.position() + 14);
|
438 |
|
439 |
} |
440 |
|
441 |
// Last byte is a marker for the end of the field definitions.
|
442 |
in.get(); |
443 |
} |
444 |
|
445 |
/**
|
446 |
* Set the number of records in the file
|
447 |
*
|
448 |
* @param inNumRecords
|
449 |
*/
|
450 |
public void setNumRecords(int inNumRecords) { |
451 |
myNumRecords = inNumRecords; |
452 |
} |
453 |
|
454 |
/**
|
455 |
* Returns the value of the unsigned byte as a short Bytes are always signed
|
456 |
* in Java, so if we are reading a C unsigned byte with value > 128, it will
|
457 |
* appear as a negative value.
|
458 |
*
|
459 |
* In this case, we need to get the original unsigned value and return it as
|
460 |
* short or int, as byte will never correctly store the value in Java.
|
461 |
*
|
462 |
* @return
|
463 |
*/
|
464 |
private int byteAsUnsigned(byte b) { |
465 |
int i;
|
466 |
if (b < 0) { |
467 |
i = b & 0xFF;
|
468 |
} else {
|
469 |
i = b; |
470 |
} |
471 |
return i;
|
472 |
} |
473 |
|
474 |
/**
|
475 |
* Gets the Language driver IDs (code page) defined on the file header (or
|
476 |
* guessed from the provided charset)
|
477 |
*
|
478 |
* Some examples: 01h DOS USA code page 437 02h DOS Multilingual code page 850
|
479 |
* 03h Windows ANSI code page 1252 04h Standard Macintosh 64h EE MS-DOS code
|
480 |
* page 852 65h Nordic MS-DOS code page 865 66h Russian MS-DOS code page 866
|
481 |
* 67h Icelandic MS-DOS 68h Kamenicky (Czech) MS-DOS 69h Mazovia (Polish)
|
482 |
* MS-DOS 6Ah Greek MS-DOS (437G) 6Bh Turkish MS-DOS 96h Russian Macintosh 97h
|
483 |
* Eastern European Macintosh 98h Greek Macintosh C8h Windows EE code page
|
484 |
* 1250 C9h Russian Windows CAh Turkish Windows CBh Greek Windows
|
485 |
*
|
486 |
* See the java equivalences in
|
487 |
* {@link DbaseCodepage#dbfLdid} & {@link DbaseCodepage#ldidJava} objects.
|
488 |
*
|
489 |
* See some others here:
|
490 |
* https://github.com/infused/dbf/blob/master/docs/supported_encodings.csv
|
491 |
*
|
492 |
* @return
|
493 |
*/
|
494 |
public int getLanguageID() { |
495 |
|
496 |
return myLanguageID;
|
497 |
} |
498 |
|
499 |
/**
|
500 |
* Write the header data to the DBF file.
|
501 |
*
|
502 |
* @param out A channel to write to. If you have an OutputStream you can
|
503 |
* obtain the correct channel by using
|
504 |
* java.nio.Channels.newChannel(OutputStream out).
|
505 |
*
|
506 |
* @throws IOException If errors occur.
|
507 |
*/
|
508 |
public void write(FileChannel out) throws IOException { |
509 |
// take care of the annoying case where no records have been added...
|
510 |
if (myHeaderLength <= 0) { |
511 |
myHeaderLength = MINIMUM_HEADER; |
512 |
} |
513 |
|
514 |
// Desde el principio
|
515 |
out.position(0);
|
516 |
|
517 |
ByteBuffer buffer = ByteBuffer.allocateDirect(myHeaderLength); |
518 |
buffer.order(ByteOrder.LITTLE_ENDIAN);
|
519 |
|
520 |
// write the output file type.
|
521 |
buffer.put(MAGIC); |
522 |
|
523 |
// write the date stuff
|
524 |
Calendar c = Calendar.getInstance(); |
525 |
c.setTime(new Date()); |
526 |
buffer.put((byte) (c.get(Calendar.YEAR) % 100)); |
527 |
buffer.put((byte) (c.get(Calendar.MONTH) + 1)); |
528 |
buffer.put((byte) (c.get(Calendar.DAY_OF_MONTH))); |
529 |
|
530 |
// write the number of records in the datafile.
|
531 |
buffer.putInt(myNumRecords); |
532 |
|
533 |
// write the length of the header structure.
|
534 |
buffer.putShort((short) myHeaderLength);
|
535 |
|
536 |
// write the length of a record
|
537 |
buffer.putShort((short) myRecordLength);
|
538 |
|
539 |
// write the reserved bytes in the header
|
540 |
((Buffer) buffer).position(((Buffer) buffer).position() + 17); |
541 |
|
542 |
// write the language id
|
543 |
buffer.put((byte) getLanguageID());
|
544 |
|
545 |
// write the reserved bytes in the header
|
546 |
((Buffer) buffer).position(((Buffer) buffer).position() + 2); |
547 |
|
548 |
// write all of the header records
|
549 |
int tempOffset = 0; |
550 |
|
551 |
if (myFieldDescriptions != null) { |
552 |
for (int i = 0; i < myFieldDescriptions.length; i++) { |
553 |
// write the field name
|
554 |
for (int j = 0; j < DbaseFile.MAX_FIELD_NAME_LENGTH + 1; j++) { |
555 |
if (myFieldDescriptions[i].getName().length() > j) {
|
556 |
buffer.put((byte) myFieldDescriptions[i].getName().charAt(j));
|
557 |
} else {
|
558 |
buffer.put((byte) 0); |
559 |
} |
560 |
} |
561 |
|
562 |
// write the field type
|
563 |
buffer.put((byte) myFieldDescriptions[i].getType());
|
564 |
|
565 |
// // write the field data address, offset from the start of the
|
566 |
// record.
|
567 |
buffer.putInt(tempOffset); |
568 |
tempOffset += myFieldDescriptions[i].getSize(); |
569 |
|
570 |
// write the length of the field.
|
571 |
buffer.put((byte) myFieldDescriptions[i].getSize());
|
572 |
|
573 |
// write the decimal count.
|
574 |
buffer.put((byte) myFieldDescriptions[i].getScale());
|
575 |
|
576 |
// write the reserved bytes.
|
577 |
// for (in j=0; jj<14; j++) out.writeByteLE(0);
|
578 |
((Buffer) buffer).position(((Buffer) buffer).position() + 14); |
579 |
} |
580 |
} |
581 |
// write the end of the field definitions marker
|
582 |
buffer.put((byte) 0x0D); |
583 |
|
584 |
((Buffer) buffer).position(0); |
585 |
|
586 |
int r = buffer.remaining();
|
587 |
|
588 |
while ((r -= out.write(buffer)) > 0) { |
589 |
// do nothing
|
590 |
} |
591 |
} |
592 |
|
593 |
public String getCharsetName() { |
594 |
return getCharsetName(getLanguageID());
|
595 |
} |
596 |
|
597 |
public String getCharsetName(int ldid) { |
598 |
if (ldid != 0) { |
599 |
charset = DbaseCodepage.getCharsetName(ldid); |
600 |
} |
601 |
if (charset != null) { |
602 |
// use charset otherwise
|
603 |
return charset;
|
604 |
} |
605 |
// default
|
606 |
return "ISO-8859-1"; |
607 |
} |
608 |
|
609 |
public String getOriginalCharset() { |
610 |
return getCharsetName(this.origLanguageID); |
611 |
} |
612 |
|
613 |
public String mappingEncoding(String dbfEnconding) { |
614 |
if (encodingSupportedByString.contains(dbfEnconding)) {
|
615 |
return dbfEnconding;
|
616 |
} else {
|
617 |
return "UTF-8"; |
618 |
} |
619 |
} |
620 |
|
621 |
private String getUniqueFieldName(String fieldName, List fieldNames) { |
622 |
|
623 |
int index = 0; |
624 |
String tempFieldName = fieldName;
|
625 |
while (fieldNames.contains(tempFieldName) && index < 1000) { |
626 |
index++; |
627 |
String sufix = String.valueOf(index); |
628 |
tempFieldName = tempFieldName.substring(0, DbaseFile.MAX_FIELD_NAME_LENGTH - sufix.length()) + sufix;
|
629 |
} |
630 |
if (index >= 1000) { |
631 |
throw new RuntimeException("Can't fix duplicated name for field '" + fieldName + "'."); |
632 |
} |
633 |
return tempFieldName;
|
634 |
} |
635 |
|
636 |
public EditableFeatureType toFeatureType(EditableFeatureType featureType, boolean handleDatesAsStrings) throws DataException { |
637 |
featureType.setHasOID(true);
|
638 |
|
639 |
for (DbaseFieldDescriptor dbfattr : this.myFieldDescriptions) { |
640 |
if (featureType.get(dbfattr.getName()) != null) { |
641 |
throw new DuplicatedFieldNameException(dbfattr.getName()); |
642 |
} |
643 |
EditableFeatureAttributeDescriptor attr; |
644 |
switch (dbfattr.getType()) {
|
645 |
case DBFTYPE_BOOLEAN:
|
646 |
attr = featureType.add(dbfattr.getName(), DataTypes.BOOLEAN); |
647 |
attr.setSize(0);
|
648 |
attr.setDefaultValue(null);
|
649 |
attr.setAllowNull(true);
|
650 |
break;
|
651 |
case DBFTYPE_FLOAT:
|
652 |
if (dbfattr.getScale() > 0) { |
653 |
if (dbfattr.getPrecision() > DataType.DOUBLE_MAX_PRECISION
|
654 |
|| dbfattr.getScale() > DataType.DOUBLE_MAX_PRECISION) { |
655 |
attr = featureType.add(dbfattr.getName(), |
656 |
DataTypes.DECIMAL, dbfattr.getSize()); |
657 |
attr.setScale(dbfattr.getScale()); |
658 |
attr.setPrecision(dbfattr.getPrecision()); |
659 |
attr.setDefaultValue(null);
|
660 |
attr.setAllowNull(true);
|
661 |
|
662 |
} else if (dbfattr.getPrecision() > DataType.FLOAT_MAX_PRECISION |
663 |
|| dbfattr.getScale() > DataType.FLOAT_MAX_PRECISION) { |
664 |
attr = featureType.add(dbfattr.getName(), |
665 |
DataTypes.DOUBLE, dbfattr.getSize()); |
666 |
attr.setScale(dbfattr.getScale()); |
667 |
attr.setPrecision(dbfattr.getPrecision()); |
668 |
attr.setDefaultValue(null);
|
669 |
attr.setAllowNull(true);
|
670 |
} else {
|
671 |
attr = featureType.add(dbfattr.getName(), |
672 |
DataTypes.FLOAT, dbfattr.getSize()); |
673 |
attr.setScale(dbfattr.getScale()); |
674 |
attr.setPrecision(dbfattr.getPrecision()); |
675 |
attr.setDefaultValue(null);
|
676 |
attr.setAllowNull(true);
|
677 |
} |
678 |
} else {
|
679 |
if (dbfattr.getPrecision() > DataType.LONG_MAX_PRECISION) {
|
680 |
attr = featureType.add(dbfattr.getName(), |
681 |
DataTypes.DECIMAL, dbfattr.getSize()); |
682 |
attr.setPrecision(dbfattr.getPrecision()); |
683 |
attr.setScale(dbfattr.getScale()); |
684 |
attr.setDefaultValue(null);
|
685 |
attr.setAllowNull(true);
|
686 |
} else if (dbfattr.getPrecision() > DataType.INT_MAX_PRECISION) { |
687 |
attr = featureType.add( |
688 |
dbfattr.getName(), |
689 |
DataTypes.LONG |
690 |
); |
691 |
attr.setPrecision(dbfattr.getPrecision()); |
692 |
attr.setScale(0);
|
693 |
attr.setDefaultValue(null);
|
694 |
attr.setAllowNull(true);
|
695 |
} else if (dbfattr.getPrecision() > DataType.BYTE_MAX_PRECISION) { |
696 |
attr = featureType.add( |
697 |
dbfattr.getName(), |
698 |
DataTypes.INT |
699 |
); |
700 |
attr.setPrecision(dbfattr.getPrecision()); |
701 |
attr.setScale(0);
|
702 |
attr.setDefaultValue(null);
|
703 |
attr.setAllowNull(true);
|
704 |
} else {
|
705 |
attr = featureType.add( |
706 |
dbfattr.getName(), |
707 |
DataTypes.DECIMAL |
708 |
); |
709 |
attr.setPrecision(dbfattr.getPrecision()); |
710 |
attr.setScale(0);
|
711 |
attr.setDefaultValue(null);
|
712 |
attr.setAllowNull(true);
|
713 |
} |
714 |
} |
715 |
break;
|
716 |
case DBFTYPE_NUMBER:
|
717 |
if (dbfattr.getScale() > 0) { |
718 |
attr = featureType.add(dbfattr.getName(), |
719 |
DataTypes.DECIMAL, dbfattr.getSize()); |
720 |
attr.setPrecision(dbfattr.getPrecision()); |
721 |
attr.setScale(dbfattr.getScale()); |
722 |
attr.setDefaultValue(null);
|
723 |
attr.setAllowNull(true);
|
724 |
} else {
|
725 |
if (dbfattr.getPrecision() > DataType.LONG_MAX_PRECISION) {
|
726 |
attr = featureType.add(dbfattr.getName(), |
727 |
DataTypes.DECIMAL, dbfattr.getSize()); |
728 |
attr.setPrecision(dbfattr.getPrecision()); |
729 |
attr.setScale(dbfattr.getScale()); |
730 |
attr.setDefaultValue(null);
|
731 |
attr.setAllowNull(true);
|
732 |
} else if (dbfattr.getPrecision() > DataType.INT_MAX_PRECISION) { |
733 |
attr = featureType.add( |
734 |
dbfattr.getName(), |
735 |
DataTypes.LONG |
736 |
); |
737 |
attr.setPrecision(dbfattr.getPrecision()); |
738 |
attr.setScale(0);
|
739 |
attr.setDefaultValue(null);
|
740 |
attr.setAllowNull(true);
|
741 |
} else if (dbfattr.getPrecision() > DataType.BYTE_MAX_PRECISION) { |
742 |
attr = featureType.add( |
743 |
dbfattr.getName(), |
744 |
DataTypes.INT |
745 |
); |
746 |
attr.setPrecision(dbfattr.getPrecision()); |
747 |
attr.setScale(0);
|
748 |
attr.setDefaultValue(null);
|
749 |
attr.setAllowNull(true);
|
750 |
} else {
|
751 |
attr = featureType.add( |
752 |
dbfattr.getName(), |
753 |
DataTypes.DECIMAL |
754 |
); |
755 |
attr.setPrecision(dbfattr.getPrecision()); |
756 |
attr.setScale(0);
|
757 |
attr.setDefaultValue(null);
|
758 |
attr.setAllowNull(true);
|
759 |
} |
760 |
} |
761 |
break;
|
762 |
case DBFTYPE_STRING:
|
763 |
attr = featureType.add(dbfattr.getName(), |
764 |
DataTypes.STRING, dbfattr.getSize()); |
765 |
attr.setDefaultValue(null);
|
766 |
attr.setAllowNull(true);
|
767 |
if (dbfattr.getSize() == TIMESTAMP_SIZE) {
|
768 |
try {
|
769 |
DataType dataType = ToolsLocator.getDataTypesManager().get(DataTypes.STRING).clone(); |
770 |
dataType.addCoercion(new CoerceDateToDbfTimestampAsString());
|
771 |
attr.setDataType(dataType); |
772 |
} catch (Exception ex) { |
773 |
// Do nothing
|
774 |
} |
775 |
} else if (dbfattr.getSize() == TIME_SIZE) { |
776 |
try {
|
777 |
DataType dataType = ToolsLocator.getDataTypesManager().get(DataTypes.STRING).clone(); |
778 |
dataType.addCoercion(new CoerceDateToDbfTimeAsString());
|
779 |
attr.setDataType(dataType); |
780 |
} catch (Exception ex) { |
781 |
// Do nothing
|
782 |
} |
783 |
} |
784 |
break;
|
785 |
case DBFTYPE_DATE:
|
786 |
if (handleDatesAsStrings) {
|
787 |
attr = featureType.add(dbfattr.getName(), |
788 |
DataTypes.STRING, dbfattr.getSize()); |
789 |
attr.setDefaultValue(null);
|
790 |
attr.setAllowNull(true);
|
791 |
} else {
|
792 |
attr = featureType.add(dbfattr.getName(), |
793 |
DataTypes.DATE |
794 |
); |
795 |
attr.setDefaultValue(null);
|
796 |
attr.setAllowNull(true);
|
797 |
} |
798 |
break;
|
799 |
default:
|
800 |
throw new UnknownDataTypeException( |
801 |
dbfattr.getName(), String.valueOf(dbfattr.getType()),
|
802 |
DBFStoreProvider.NAME |
803 |
); |
804 |
} |
805 |
attr.setRequiredBytes(dbfattr.getSize()); |
806 |
} |
807 |
return featureType;
|
808 |
} |
809 |
|
810 |
public static DbaseFileHeader fromFeatureType(FeatureType featureType) |
811 |
throws DataException {
|
812 |
return fromFeatureType(featureType, null); |
813 |
} |
814 |
|
815 |
public static DbaseFileHeader fromFeatureType(FeatureType featureType, String charsetName) |
816 |
throws DataException {
|
817 |
DbaseFileHeader header = new DbaseFileHeader();
|
818 |
Iterator iterator = featureType.iterator();
|
819 |
header.myLanguageID = DbaseCodepage.getLdid(charsetName); |
820 |
header.charset = charsetName; |
821 |
while (iterator.hasNext()) {
|
822 |
FeatureAttributeDescriptor descriptor = (FeatureAttributeDescriptor) iterator.next(); |
823 |
|
824 |
if (descriptor.isComputed()) {
|
825 |
continue;
|
826 |
} |
827 |
int type = descriptor.getType();
|
828 |
String colName = descriptor.getName();
|
829 |
if (colName.length() > DbaseFile.MAX_FIELD_NAME_LENGTH) {
|
830 |
throw new FieldNameTooLongException("DBF file", colName); |
831 |
} |
832 |
|
833 |
int size = descriptor.getSize();
|
834 |
int scale = descriptor.getScale();
|
835 |
int precision = descriptor.getPrecision();
|
836 |
int requiredBytes = descriptor.getRequiredBytes();
|
837 |
switch (type) {
|
838 |
case DataTypes.DECIMAL:
|
839 |
header.addColumn(colName, 'N', requiredBytes>0?requiredBytes:precision+3, precision, scale); |
840 |
break;
|
841 |
case DataTypes.DOUBLE:
|
842 |
header.addColumn(colName, 'F',
|
843 |
requiredBytes>0?requiredBytes:(precision+2>=DataType.DOUBLE_MAX_PRECISION?precision:precision+3), |
844 |
precision, scale |
845 |
); |
846 |
break;
|
847 |
case DataTypes.FLOAT:
|
848 |
header.addColumn(colName, 'F',
|
849 |
requiredBytes>0?requiredBytes:(precision+2>=DataType.FLOAT_MAX_PRECISION?precision:precision+3), |
850 |
precision, scale |
851 |
); |
852 |
break;
|
853 |
case DataTypes.INT:
|
854 |
header.addColumn(colName, 'N',
|
855 |
requiredBytes>0?requiredBytes:(precision>=DataType.INT_MAX_PRECISION?precision:precision+1), |
856 |
precision, scale |
857 |
); |
858 |
break;
|
859 |
case DataTypes.LONG:
|
860 |
header.addColumn(colName, 'N',
|
861 |
requiredBytes>0?requiredBytes:(precision>=DataType.LONG_MAX_PRECISION?precision:precision+1), |
862 |
precision, scale |
863 |
); |
864 |
break;
|
865 |
case DataTypes.DATE:
|
866 |
header.addColumn(colName, 'D', FieldFormatter.DATE_SIZE, 0, 0); |
867 |
break;
|
868 |
case DataTypes.TIME:
|
869 |
header.addColumn(colName, 'C', FieldFormatter.TIME_SIZE, 0, 0); |
870 |
break;
|
871 |
case DataTypes.TIMESTAMP:
|
872 |
header.addColumn(colName, 'C', FieldFormatter.TIMESTAMP_SIZE, 0, 0); |
873 |
break;
|
874 |
case DataTypes.BOOLEAN:
|
875 |
header.addColumn(colName, 'L', 1, 0, 0); |
876 |
break;
|
877 |
case DataTypes.STRING:
|
878 |
header.addColumn(colName, 'C', Math.min(254, size), 0, 0); |
879 |
break;
|
880 |
case DataTypes.BYTE:
|
881 |
header.addColumn(colName, 'N',
|
882 |
requiredBytes>0?requiredBytes:(precision>=DataType.BYTE_MAX_PRECISION?precision:precision+1), |
883 |
precision, scale |
884 |
); |
885 |
break;
|
886 |
default:
|
887 |
// Si no sabemos lo que es intentaremos guardarlo como un string
|
888 |
header.addColumn(colName, 'C', Math.min(254, size < 10 ? 10 : size), 0, 0); |
889 |
break;
|
890 |
} |
891 |
|
892 |
} |
893 |
return header;
|
894 |
} |
895 |
} |