svn-gvsig-desktop / trunk / org.gvsig.desktop / org.gvsig.desktop.compat.cdc / org.gvsig.fmap.dal / org.gvsig.fmap.dal.file / org.gvsig.fmap.dal.file.dbf / src / main / java / org / gvsig / fmap / dal / store / dbf / utils / DbaseFileHeader.java @ 40435
History | View | Annotate | Download (25.1 KB)
1 | 40435 | jjdelcerro | /*
|
---|---|---|---|
2 | * Created on 16-feb-2004
|
||
3 | *
|
||
4 | * To change the template for this generated file go to
|
||
5 | * Window>Preferences>Java>Code Generation>Code and Comments
|
||
6 | */
|
||
7 | package org.gvsig.fmap.dal.store.dbf.utils; |
||
8 | |||
9 | import java.io.IOException; |
||
10 | import java.io.UnsupportedEncodingException; |
||
11 | import java.nio.ByteBuffer; |
||
12 | import java.nio.ByteOrder; |
||
13 | import java.nio.channels.FileChannel; |
||
14 | import java.nio.charset.Charset; |
||
15 | import java.util.ArrayList; |
||
16 | import java.util.Calendar; |
||
17 | import java.util.Date; |
||
18 | import java.util.Iterator; |
||
19 | import java.util.List; |
||
20 | import java.util.Set; |
||
21 | import java.util.SortedMap; |
||
22 | |||
23 | import org.gvsig.fmap.dal.DataTypes; |
||
24 | import org.gvsig.fmap.dal.exception.UnsupportedVersionException; |
||
25 | import org.gvsig.fmap.dal.feature.FeatureAttributeDescriptor; |
||
26 | import org.gvsig.fmap.dal.feature.FeatureType; |
||
27 | import org.gvsig.fmap.dal.feature.exception.AttributeFeatureTypeNotSuportedException; |
||
28 | import org.gvsig.tools.ToolsLocator; |
||
29 | import org.gvsig.utils.bigfile.BigByteBuffer2; |
||
30 | |||
31 | |||
32 | |||
33 | /**
|
||
34 | * Class to represent the header of a Dbase III file. Creation date: (5/15/2001
|
||
35 | * 5:15:30 PM)
|
||
36 | */
|
||
37 | public class DbaseFileHeader { |
||
38 | // Constant for the size of a record
|
||
39 | private int FILE_DESCRIPTOR_SIZE = 32; |
||
40 | |||
41 | // type of the file, must be 03h
|
||
42 | private static final byte MAGIC = 0x03; |
||
43 | |||
44 | private static final int MINIMUM_HEADER = 33; |
||
45 | |||
46 | // type of the file, must be 03h
|
||
47 | private int myFileType = 0x03; |
||
48 | |||
49 | // Date the file was last updated.
|
||
50 | private Date myUpdateDate = new Date(); |
||
51 | |||
52 | // Number of records in the datafile
|
||
53 | private int myNumRecords = 0; |
||
54 | |||
55 | // Length of the header structure
|
||
56 | private int myHeaderLength; |
||
57 | |||
58 | /**
|
||
59 | * Length of the records. Set to 1 as the default value as if there is
|
||
60 | * not any defined column, at least the deleted status initial byte
|
||
61 | * is taken into account.
|
||
62 | */
|
||
63 | private int myRecordLength = 1; |
||
64 | |||
65 | // Number of fields in the record.
|
||
66 | private int myNumFields; |
||
67 | |||
68 | // collection of header records.
|
||
69 | private DbaseFieldDescriptor[] myFieldDescriptions; |
||
70 | |||
71 | private byte myLanguageID; |
||
72 | |||
73 | private List<String> encodingSupportedByString = null; |
||
74 | |||
75 | /**
|
||
76 | * DbaseFileHreader constructor comment.
|
||
77 | */
|
||
78 | public DbaseFileHeader() {
|
||
79 | super();
|
||
80 | |||
81 | encodingSupportedByString = new ArrayList<String>(); |
||
82 | SortedMap<String, Charset> m = Charset.availableCharsets(); |
||
83 | Set<String> k = m.keySet(); |
||
84 | Iterator<String> it = k.iterator(); |
||
85 | while(it.hasNext()) {
|
||
86 | encodingSupportedByString.add(it.next()); |
||
87 | } |
||
88 | } |
||
89 | |||
90 | /**
|
||
91 | * Add a column to this DbaseFileHeader. The type is one of (C N L or D)
|
||
92 | * character, number, logical(true/false), or date. The Field length is
|
||
93 | * the total length in bytes reserved for this column. The decimal count
|
||
94 | * only applies to numbers(N), and floating point values (F), and refers
|
||
95 | * to the number of characters to reserve after the decimal point.
|
||
96 | *
|
||
97 | * @param inFieldName DOCUMENT ME!
|
||
98 | * @param inFieldType DOCUMENT ME!
|
||
99 | * @param inFieldLength DOCUMENT ME!
|
||
100 | * @param inDecimalCount DOCUMENT ME!
|
||
101 | * @throws BadFieldDriverException
|
||
102 | *
|
||
103 | * @throws Exception DOCUMENT ME!
|
||
104 | */
|
||
105 | public void addColumn(String inFieldName, char inFieldType, |
||
106 | int inFieldLength, int inDecimalCount) |
||
107 | throws AttributeFeatureTypeNotSuportedException {
|
||
108 | if (inFieldLength <= 0) { |
||
109 | inFieldLength = 1;
|
||
110 | } |
||
111 | |||
112 | if (myFieldDescriptions == null) { |
||
113 | myFieldDescriptions = new DbaseFieldDescriptor[0]; |
||
114 | } |
||
115 | |||
116 | int tempLength = 1; // the length is used for the offset, and there is a * for deleted as the first byte |
||
117 | DbaseFieldDescriptor[] tempFieldDescriptors = new DbaseFieldDescriptor[myFieldDescriptions.length + |
||
118 | 1];
|
||
119 | |||
120 | for (int i = 0; i < myFieldDescriptions.length; i++) { |
||
121 | myFieldDescriptions[i].myFieldDataAddress = tempLength; |
||
122 | tempLength = tempLength + myFieldDescriptions[i].myFieldLength; |
||
123 | tempFieldDescriptors[i] = myFieldDescriptions[i]; |
||
124 | } |
||
125 | |||
126 | tempFieldDescriptors[myFieldDescriptions.length] = new DbaseFieldDescriptor();
|
||
127 | tempFieldDescriptors[myFieldDescriptions.length].myFieldLength = inFieldLength; |
||
128 | tempFieldDescriptors[myFieldDescriptions.length].myDecimalCount = inDecimalCount; |
||
129 | tempFieldDescriptors[myFieldDescriptions.length].myFieldDataAddress = tempLength; |
||
130 | |||
131 | // set the field name
|
||
132 | String tempFieldName = inFieldName;
|
||
133 | |||
134 | if (tempFieldName == null) { |
||
135 | tempFieldName = "NoName";
|
||
136 | } |
||
137 | |||
138 | if (tempFieldName.length() > 11) { |
||
139 | tempFieldName = tempFieldName.substring(0, 11); |
||
140 | warn("FieldName " + inFieldName +
|
||
141 | " is longer than 11 characters, truncating to " +
|
||
142 | tempFieldName); |
||
143 | } |
||
144 | |||
145 | tempFieldDescriptors[myFieldDescriptions.length].myFieldName = tempFieldName; |
||
146 | tempFieldDescriptors[myFieldDescriptions.length].myFieldName_trim = tempFieldName |
||
147 | .trim(); |
||
148 | |||
149 | // the field type
|
||
150 | if ((inFieldType == 'C') || (inFieldType == 'c')) { |
||
151 | tempFieldDescriptors[myFieldDescriptions.length].myFieldType = 'C';
|
||
152 | |||
153 | if (inFieldLength > 254) { |
||
154 | warn("Field Length for " + inFieldName + " set to " + |
||
155 | inFieldLength + |
||
156 | " Which is longer than 254, not consistent with dbase III");
|
||
157 | } |
||
158 | } else if ((inFieldType == 'S') || (inFieldType == 's')) { |
||
159 | tempFieldDescriptors[myFieldDescriptions.length].myFieldType = 'C';
|
||
160 | warn("Field type for " + inFieldName +
|
||
161 | " set to S which is flat out wrong people!, I am setting this to C, in the hopes you meant character.");
|
||
162 | |||
163 | if (inFieldLength > 254) { |
||
164 | warn("Field Length for " + inFieldName + " set to " + |
||
165 | inFieldLength + |
||
166 | " Which is longer than 254, not consistent with dbase III");
|
||
167 | } |
||
168 | |||
169 | tempFieldDescriptors[myFieldDescriptions.length].myFieldLength = 8;
|
||
170 | } else if ((inFieldType == 'D') || (inFieldType == 'd')) { |
||
171 | tempFieldDescriptors[myFieldDescriptions.length].myFieldType = 'D';
|
||
172 | |||
173 | if (inFieldLength != 8) { |
||
174 | warn("Field Length for " + inFieldName + " set to " + |
||
175 | inFieldLength + " Setting to 8 digets YYYYMMDD");
|
||
176 | } |
||
177 | |||
178 | tempFieldDescriptors[myFieldDescriptions.length].myFieldLength = 8;
|
||
179 | } else if ((inFieldType == 'F') || (inFieldType == 'f')) { |
||
180 | tempFieldDescriptors[myFieldDescriptions.length].myFieldType = 'F';
|
||
181 | |||
182 | if (inFieldLength > 20) { |
||
183 | warn("Field Length for " + inFieldName + " set to " + |
||
184 | inFieldLength + |
||
185 | " Preserving length, but should be set to Max of 20 not valid for dbase IV, and UP specification, not present in dbaseIII.");
|
||
186 | } |
||
187 | } else if ((inFieldType == 'N') || (inFieldType == 'n')) { |
||
188 | tempFieldDescriptors[myFieldDescriptions.length].myFieldType = 'N';
|
||
189 | |||
190 | if (inFieldLength > 18) { |
||
191 | warn("Field Length for " + inFieldName + " set to " + |
||
192 | inFieldLength + |
||
193 | " Preserving length, but should be set to Max of 18 for dbase III specification.");
|
||
194 | } |
||
195 | |||
196 | if (inDecimalCount < 0) { |
||
197 | warn("Field Decimal Position for " + inFieldName + " set to " + |
||
198 | inDecimalCount + |
||
199 | " Setting to 0 no decimal data will be saved.");
|
||
200 | tempFieldDescriptors[myFieldDescriptions.length].myDecimalCount = 0;
|
||
201 | } |
||
202 | //
|
||
203 | // if (inDecimalCount > (inFieldLength - 1)) {
|
||
204 | // warn("Field Decimal Position for " + inFieldName + " set to " +
|
||
205 | // inDecimalCount + " Setting to " + (inFieldLength - 1) +
|
||
206 | // " no non decimal data will be saved.");
|
||
207 | // tempFieldDescriptors[myFieldDescriptions.length].myDecimalCount = inFieldLength -
|
||
208 | // 1;
|
||
209 | // }
|
||
210 | } else if ((inFieldType == 'L') || (inFieldType == 'l')) { |
||
211 | tempFieldDescriptors[myFieldDescriptions.length].myFieldType = 'L';
|
||
212 | |||
213 | if (inFieldLength != 1) { |
||
214 | warn("Field Length for " + inFieldName + " set to " + |
||
215 | inFieldLength + |
||
216 | " Setting to length of 1 for logical fields.");
|
||
217 | } |
||
218 | |||
219 | tempFieldDescriptors[myFieldDescriptions.length].myFieldLength = 1;
|
||
220 | } else {
|
||
221 | throw new AttributeFeatureTypeNotSuportedException(tempFieldName, |
||
222 | inFieldType, ToolsLocator.getDataTypesManager().getTypeName(inFieldType), "DBF");
|
||
223 | } |
||
224 | |||
225 | // the length of a record
|
||
226 | tempLength = tempLength + |
||
227 | tempFieldDescriptors[myFieldDescriptions.length].myFieldLength; |
||
228 | |||
229 | // set the new fields.
|
||
230 | myFieldDescriptions = tempFieldDescriptors; |
||
231 | myHeaderLength = 33 + (32 * myFieldDescriptions.length); |
||
232 | myNumFields = myFieldDescriptions.length; |
||
233 | myRecordLength = tempLength; |
||
234 | } |
||
235 | |||
236 | /**
|
||
237 | * Remove a column from this DbaseFileHeader.
|
||
238 | *
|
||
239 | * @param inFieldName DOCUMENT ME!
|
||
240 | *
|
||
241 | * @return index of the removed column, -1 if no found
|
||
242 | */
|
||
243 | public int removeColumn(String inFieldName) { |
||
244 | int retCol = -1; |
||
245 | int tempLength = 1; |
||
246 | DbaseFieldDescriptor[] tempFieldDescriptors = new DbaseFieldDescriptor[myFieldDescriptions.length - |
||
247 | 1];
|
||
248 | |||
249 | for (int i = 0, j = 0; i < myFieldDescriptions.length; i++) { |
||
250 | if (!inFieldName.equalsIgnoreCase(
|
||
251 | myFieldDescriptions[i].myFieldName.trim())) { |
||
252 | // if this is the last field and we still haven't found the
|
||
253 | // named field
|
||
254 | if ((i == j) && (i == (myFieldDescriptions.length - 1))) { |
||
255 | System.err.println("Could not find a field named '" + |
||
256 | inFieldName + "' for removal");
|
||
257 | |||
258 | return retCol;
|
||
259 | } |
||
260 | |||
261 | tempFieldDescriptors[j] = myFieldDescriptions[i]; |
||
262 | tempFieldDescriptors[j].myFieldDataAddress = tempLength; |
||
263 | tempLength += tempFieldDescriptors[j].myFieldLength; |
||
264 | |||
265 | // only increment j on non-matching fields
|
||
266 | j++; |
||
267 | } else {
|
||
268 | retCol = i; |
||
269 | } |
||
270 | } |
||
271 | |||
272 | // set the new fields.
|
||
273 | myFieldDescriptions = tempFieldDescriptors; |
||
274 | myHeaderLength = 33 + (32 * myFieldDescriptions.length); |
||
275 | myNumFields = myFieldDescriptions.length; |
||
276 | myRecordLength = tempLength; |
||
277 | |||
278 | return retCol;
|
||
279 | } |
||
280 | |||
281 | /**
|
||
282 | * DOCUMENT ME!
|
||
283 | *
|
||
284 | * @param inWarn DOCUMENT ME!
|
||
285 | */
|
||
286 | private void warn(String inWarn) { |
||
287 | //TODO Descomentar esto cuando tenga la clase warning support
|
||
288 | // warnings.warn(inWarn);
|
||
289 | } |
||
290 | |||
291 | /**
|
||
292 | * Return the Field Descriptor for the given field.
|
||
293 | *
|
||
294 | * @param inIndex DOCUMENT ME!
|
||
295 | *
|
||
296 | * @return DOCUMENT ME!
|
||
297 | */
|
||
298 | public DbaseFieldDescriptor getFieldDescription(int inIndex) { |
||
299 | return myFieldDescriptions[inIndex];
|
||
300 | } |
||
301 | |||
302 | // Retrieve the length of the field at the given index
|
||
303 | public int getFieldLength(int inIndex) { |
||
304 | return myFieldDescriptions[inIndex].myFieldLength;
|
||
305 | } |
||
306 | |||
307 | // Retrieve the location of the decimal point within the field.
|
||
308 | public int getFieldDecimalCount(int inIndex) { |
||
309 | return myFieldDescriptions[inIndex].myDecimalCount;
|
||
310 | } |
||
311 | |||
312 | // Retrieve the Name of the field at the given index
|
||
313 | public String getFieldName(int inIndex) { |
||
314 | return myFieldDescriptions[inIndex].myFieldName;
|
||
315 | } |
||
316 | |||
317 | public int getFieldIndex(String name) { |
||
318 | for (int i = 0; i < myFieldDescriptions.length; i++) { |
||
319 | if (myFieldDescriptions[i].myFieldName_trim
|
||
320 | .equalsIgnoreCase(name)) { |
||
321 | return i;
|
||
322 | } |
||
323 | } |
||
324 | return -1; |
||
325 | } |
||
326 | |||
327 | // Retrieve the type of field at the given index
|
||
328 | public char getFieldType(int inIndex) { |
||
329 | return myFieldDescriptions[inIndex].myFieldType;
|
||
330 | } |
||
331 | |||
332 | /**
|
||
333 | * Return the date this file was last updated.
|
||
334 | *
|
||
335 | * @return DOCUMENT ME!
|
||
336 | */
|
||
337 | public Date getLastUpdateDate() { |
||
338 | return myUpdateDate;
|
||
339 | } |
||
340 | |||
341 | /**
|
||
342 | * Return the number of fields in the records.
|
||
343 | *
|
||
344 | * @return DOCUMENT ME!
|
||
345 | */
|
||
346 | public int getNumFields() { |
||
347 | return myNumFields;
|
||
348 | } |
||
349 | |||
350 | /**
|
||
351 | * Return the number of records in the file
|
||
352 | *
|
||
353 | * @return DOCUMENT ME!
|
||
354 | */
|
||
355 | public int getNumRecords() { |
||
356 | return myNumRecords;
|
||
357 | } |
||
358 | |||
359 | /**
|
||
360 | * Return the length of the records in bytes.
|
||
361 | *
|
||
362 | * @return DOCUMENT ME!
|
||
363 | */
|
||
364 | public int getRecordLength() { |
||
365 | return myRecordLength;
|
||
366 | } |
||
367 | |||
368 | /**
|
||
369 | * Return the length of the header
|
||
370 | *
|
||
371 | * @return DOCUMENT ME!
|
||
372 | */
|
||
373 | public int getHeaderLength() { |
||
374 | return myHeaderLength;
|
||
375 | } |
||
376 | |||
377 | /**
|
||
378 | * Read the header data from the DBF file.
|
||
379 | *
|
||
380 | * @param in
|
||
381 | * DOCUMENT ME!
|
||
382 | * @throws UnsupportedVersionException
|
||
383 | * @throws UnsupportedEncodingException
|
||
384 | *
|
||
385 | * @throws IOException
|
||
386 | * DOCUMENT ME!
|
||
387 | */
|
||
388 | public void readHeader(BigByteBuffer2 in, String charsName) |
||
389 | throws UnsupportedVersionException, UnsupportedEncodingException { |
||
390 | // type of file.
|
||
391 | myFileType = in.get(); |
||
392 | |||
393 | if (myFileType != 0x03) { |
||
394 | throw new UnsupportedVersionException("DBF", Integer |
||
395 | .toHexString(myFileType)); |
||
396 | } |
||
397 | |||
398 | // parse the update date information.
|
||
399 | int tempUpdateYear = in.get();
|
||
400 | int tempUpdateMonth = in.get();
|
||
401 | int tempUpdateDay = in.get();
|
||
402 | tempUpdateYear = tempUpdateYear + 1900;
|
||
403 | |||
404 | Calendar c = Calendar.getInstance(); |
||
405 | c.set(Calendar.YEAR, tempUpdateYear);
|
||
406 | c.set(Calendar.MONTH, tempUpdateMonth - 1); |
||
407 | c.set(Calendar.DATE, tempUpdateDay);
|
||
408 | myUpdateDate = c.getTime(); |
||
409 | |||
410 | // read the number of records.
|
||
411 | in.order(ByteOrder.LITTLE_ENDIAN);
|
||
412 | myNumRecords = in.getInt(); |
||
413 | |||
414 | // read the length of the header structure.
|
||
415 | myHeaderLength = in.getShort(); |
||
416 | |||
417 | // read the length of a record
|
||
418 | myRecordLength = in.getShort(); //posicon 0h
|
||
419 | |||
420 | in.order(ByteOrder.BIG_ENDIAN);
|
||
421 | |||
422 | // skip the reserved bytes in the header.
|
||
423 | // in.position(in.position() + 20);
|
||
424 | |||
425 | // Leemos el byte de language
|
||
426 | in.position(29);
|
||
427 | myLanguageID = in.get(); |
||
428 | if (charsName == null) { |
||
429 | charsName = getCharsetName(); |
||
430 | charsName = mappingEncoding(charsName); |
||
431 | } |
||
432 | |||
433 | |||
434 | // Posicionamos para empezar a leer los campos.
|
||
435 | in.position(32);
|
||
436 | |||
437 | // calculate the number of Fields in the header
|
||
438 | myNumFields = (myHeaderLength - FILE_DESCRIPTOR_SIZE - 1) / FILE_DESCRIPTOR_SIZE;
|
||
439 | |||
440 | // read all of the header records
|
||
441 | myFieldDescriptions = new DbaseFieldDescriptor[myNumFields];
|
||
442 | int fieldOffset = 0; |
||
443 | |||
444 | for (int i = 0; i < myNumFields; i++) { |
||
445 | myFieldDescriptions[i] = new DbaseFieldDescriptor();
|
||
446 | |||
447 | // read the field name
|
||
448 | byte[] buffer = new byte[11]; |
||
449 | in.get(buffer); |
||
450 | if (charsName != null) { |
||
451 | myFieldDescriptions[i].myFieldName = new String(buffer, |
||
452 | charsName); |
||
453 | } else {
|
||
454 | myFieldDescriptions[i].myFieldName = new String(buffer); |
||
455 | } |
||
456 | myFieldDescriptions[i].myFieldName_trim = myFieldDescriptions[i].myFieldName |
||
457 | .trim(); |
||
458 | |||
459 | // read the field type
|
||
460 | myFieldDescriptions[i].myFieldType = (char) in.get();
|
||
461 | |||
462 | // read the field data address, offset from the start of the record.
|
||
463 | myFieldDescriptions[i].myFieldDataAddress = in.getInt(); |
||
464 | |||
465 | // read the field length in bytes
|
||
466 | int tempLength = in.get();
|
||
467 | |||
468 | if (tempLength < 0) { |
||
469 | tempLength = tempLength + 256;
|
||
470 | } |
||
471 | |||
472 | myFieldDescriptions[i].myFieldLength = tempLength; |
||
473 | |||
474 | // read the field decimal count in bytes
|
||
475 | myFieldDescriptions[i].myDecimalCount = in.get(); |
||
476 | |||
477 | // NUEVO: Calculamos los offsets aqu? para no
|
||
478 | // tener que recalcular cada vez que nos piden
|
||
479 | // algo.
|
||
480 | myFieldDescriptions[i].myFieldDataAddress = fieldOffset; |
||
481 | fieldOffset += tempLength; |
||
482 | // Fin NUEVO
|
||
483 | // read the reserved bytes.
|
||
484 | in.position(in.position() + 14);
|
||
485 | } |
||
486 | |||
487 | // Last byte is a marker for the end of the field definitions.
|
||
488 | in.get(); |
||
489 | } |
||
490 | |||
491 | /**
|
||
492 | * Set the number of records in the file
|
||
493 | *
|
||
494 | * @param inNumRecords DOCUMENT ME!
|
||
495 | */
|
||
496 | public void setNumRecords(int inNumRecords) { |
||
497 | myNumRecords = inNumRecords; |
||
498 | } |
||
499 | |||
500 | /*
|
||
501 | * Write the header data to the DBF file.
|
||
502 | *
|
||
503 | * @param out DOCUMENT ME!
|
||
504 | *
|
||
505 | * @throws Exception DOCUMENT ME!
|
||
506 | *
|
||
507 | public void writeHeader(LEDataOutputStream out) throws Exception {
|
||
508 | // write the output file type.
|
||
509 | out.writeByte(myFileType);
|
||
510 | // write the date stuff
|
||
511 | Calendar c = Calendar.getInstance();
|
||
512 | c.setTime(new Date());
|
||
513 | out.writeByte(c.get(Calendar.YEAR) - 1900);
|
||
514 | out.writeByte(c.get(Calendar.MONTH) + 1);
|
||
515 | out.writeByte(c.get(Calendar.DAY_OF_MONTH));
|
||
516 | // write the number of records in the datafile.
|
||
517 | out.writeInt(myNumRecords);
|
||
518 | // write the length of the header structure.
|
||
519 | out.writeShort(myHeaderLength);
|
||
520 | // write the length of a record
|
||
521 | out.writeShort(myRecordLength);
|
||
522 | // write the reserved bytes in the header
|
||
523 | for (int i = 0; i < 20; i++)
|
||
524 | out.writeByte(0);
|
||
525 | // write all of the header records
|
||
526 | int tempOffset = 0;
|
||
527 | for (int i = 0; i < myFieldDescriptions.length; i++) {
|
||
528 | // write the field name
|
||
529 | for (int j = 0; j < 11; j++) {
|
||
530 | if (myFieldDescriptions[i].myFieldName.length() > j) {
|
||
531 | out.writeByte((int) myFieldDescriptions[i].myFieldName.charAt(
|
||
532 | j));
|
||
533 | } else {
|
||
534 | out.writeByte(0);
|
||
535 | }
|
||
536 | }
|
||
537 | // write the field type
|
||
538 | out.writeByte(myFieldDescriptions[i].myFieldType);
|
||
539 | // write the field data address, offset from the start of the record.
|
||
540 | out.writeInt(tempOffset);
|
||
541 | tempOffset += myFieldDescriptions[i].myFieldLength;
|
||
542 | // write the length of the field.
|
||
543 | out.writeByte(myFieldDescriptions[i].myFieldLength);
|
||
544 | // write the decimal count.
|
||
545 | out.writeByte(myFieldDescriptions[i].myDecimalCount);
|
||
546 | // write the reserved bytes.
|
||
547 | for (int j = 0; j < 14; j++)
|
||
548 | out.writeByte(0);
|
||
549 | }
|
||
550 | // write the end of the field definitions marker
|
||
551 | out.writeByte(0x0D);
|
||
552 | }
|
||
553 | */
|
||
554 | |||
555 | /**
|
||
556 | * Class for holding the information assicated with a record.
|
||
557 | */
|
||
558 | class DbaseFieldDescriptor { |
||
559 | // Field Name
|
||
560 | String myFieldName;
|
||
561 | |||
562 | String myFieldName_trim;
|
||
563 | |||
564 | // Field Type (C N L D F or M)
|
||
565 | char myFieldType;
|
||
566 | |||
567 | // Field Data Address offset from the start of the record.
|
||
568 | int myFieldDataAddress;
|
||
569 | |||
570 | // Length of the data in bytes
|
||
571 | int myFieldLength;
|
||
572 | |||
573 | // Field decimal count in Binary, indicating where the decimal is
|
||
574 | int myDecimalCount;
|
||
575 | } |
||
576 | |||
577 | public byte getLanguageID() { |
||
578 | return myLanguageID;
|
||
579 | } |
||
580 | |||
581 | |||
582 | |||
583 | public static DbaseFileHeader createDbaseHeader(FeatureType featureType) |
||
584 | throws AttributeFeatureTypeNotSuportedException {
|
||
585 | DbaseFileHeader header = new DbaseFileHeader();
|
||
586 | Iterator iterator=featureType.iterator();
|
||
587 | // TODO header.myLanguageID = langId;
|
||
588 | while (iterator.hasNext()) {
|
||
589 | FeatureAttributeDescriptor descriptor = (FeatureAttributeDescriptor) iterator.next(); |
||
590 | |||
591 | |||
592 | int type = descriptor.getType();
|
||
593 | String colName = descriptor.getName();
|
||
594 | |||
595 | int fieldLen = descriptor.getSize(); // TODO aqu? el |
||
596 | // tama?o no es
|
||
597 | // correcto hay que
|
||
598 | // calcularlo, ahora
|
||
599 | // mismo est? puesto
|
||
600 | // a pi??n.
|
||
601 | int decimales = descriptor.getPrecision();
|
||
602 | if ((type==DataTypes.DOUBLE || type==DataTypes.FLOAT) && decimales==0){ |
||
603 | decimales=1;
|
||
604 | } |
||
605 | |||
606 | if (DataTypes.DOUBLE == type || DataTypes.FLOAT == type
|
||
607 | || DataTypes.INT == type || DataTypes.LONG == type) { |
||
608 | header.addColumn(colName, 'N', Math.min(fieldLen, 18), |
||
609 | decimales); |
||
610 | } else if (DataTypes.DATE == type) { |
||
611 | header.addColumn(colName, 'D', fieldLen, 0); |
||
612 | } else if (DataTypes.BOOLEAN == type) { |
||
613 | header.addColumn(colName, 'L', 1, 0); |
||
614 | } else if (DataTypes.STRING == type) { |
||
615 | header.addColumn(colName, 'C', Math.min(254, fieldLen), 0); |
||
616 | } |
||
617 | |||
618 | |||
619 | } |
||
620 | return header;
|
||
621 | } |
||
622 | /**
|
||
623 | * Write the header data to the DBF file.
|
||
624 | *
|
||
625 | * @param out
|
||
626 | * A channel to write to. If you have an OutputStream you can
|
||
627 | * obtain the correct channel by using
|
||
628 | * java.nio.Channels.newChannel(OutputStream out).
|
||
629 | *
|
||
630 | * @throws IOException
|
||
631 | * If errors occur.
|
||
632 | */
|
||
633 | public void writeHeader(FileChannel out) throws IOException { |
||
634 | // take care of the annoying case where no records have been added...
|
||
635 | if (myHeaderLength <= 0) { |
||
636 | myHeaderLength = MINIMUM_HEADER; |
||
637 | } |
||
638 | |||
639 | // Desde el principio
|
||
640 | out.position(0);
|
||
641 | |||
642 | ByteBuffer buffer = ByteBuffer.allocateDirect(myHeaderLength); |
||
643 | buffer.order(ByteOrder.LITTLE_ENDIAN);
|
||
644 | |||
645 | // write the output file type.
|
||
646 | buffer.put(MAGIC); |
||
647 | |||
648 | // write the date stuff
|
||
649 | Calendar c = Calendar.getInstance(); |
||
650 | c.setTime(new Date()); |
||
651 | buffer.put((byte) (c.get(Calendar.YEAR) % 100)); |
||
652 | buffer.put((byte) (c.get(Calendar.MONTH) + 1)); |
||
653 | buffer.put((byte) (c.get(Calendar.DAY_OF_MONTH))); |
||
654 | |||
655 | // write the number of records in the datafile.
|
||
656 | buffer.putInt(myNumRecords); |
||
657 | |||
658 | // write the length of the header structure.
|
||
659 | buffer.putShort((short) myHeaderLength);
|
||
660 | |||
661 | // write the length of a record
|
||
662 | buffer.putShort((short) myRecordLength);
|
||
663 | |||
664 | // // write the reserved bytes in the header
|
||
665 | // for (int i=0; i<20; i++) out.writeByteLE(0);
|
||
666 | buffer.position(buffer.position() + 20);
|
||
667 | |||
668 | // write all of the header records
|
||
669 | int tempOffset = 0; |
||
670 | |||
671 | if (myFieldDescriptions != null) { |
||
672 | for (int i = 0; i < myFieldDescriptions.length; i++) { |
||
673 | // write the field name
|
||
674 | for (int j = 0; j < 11; j++) { |
||
675 | if (myFieldDescriptions[i].myFieldName.length() > j) {
|
||
676 | buffer.put((byte) myFieldDescriptions[i].myFieldName.charAt(j));
|
||
677 | } else {
|
||
678 | buffer.put((byte) 0); |
||
679 | } |
||
680 | } |
||
681 | |||
682 | // write the field type
|
||
683 | buffer.put((byte) myFieldDescriptions[i].myFieldType);
|
||
684 | |||
685 | // // write the field data address, offset from the start of the
|
||
686 | // record.
|
||
687 | buffer.putInt(tempOffset); |
||
688 | tempOffset += myFieldDescriptions[i].myFieldLength; |
||
689 | |||
690 | // write the length of the field.
|
||
691 | buffer.put((byte) myFieldDescriptions[i].myFieldLength);
|
||
692 | |||
693 | // write the decimal count.
|
||
694 | buffer.put((byte) myFieldDescriptions[i].myDecimalCount);
|
||
695 | |||
696 | // write the reserved bytes.
|
||
697 | // for (in j=0; jj<14; j++) out.writeByteLE(0);
|
||
698 | buffer.position(buffer.position() + 14);
|
||
699 | } |
||
700 | } |
||
701 | // write the end of the field definitions marker
|
||
702 | buffer.put((byte) 0x0D); |
||
703 | |||
704 | buffer.position(0);
|
||
705 | |||
706 | int r = buffer.remaining();
|
||
707 | |||
708 | while ((r -= out.write(buffer)) > 0) { |
||
709 | ; // do nothing
|
||
710 | } |
||
711 | } |
||
712 | |||
713 | /**
|
||
714 | * 01h DOS USA code page 437
|
||
715 | 02h DOS Multilingual code page 850
|
||
716 | 03h Windows ANSI code page 1252
|
||
717 | 04h Standard Macintosh
|
||
718 | 64h EE MS-DOS code page 852
|
||
719 | 65h Nordic MS-DOS code page 865
|
||
720 | 66h Russian MS-DOS code page 866
|
||
721 | 67h Icelandic MS-DOS
|
||
722 | 68h Kamenicky (Czech) MS-DOS
|
||
723 | 69h Mazovia (Polish) MS-DOS
|
||
724 | 6Ah Greek MS-DOS (437G)
|
||
725 | 6Bh Turkish MS-DOS
|
||
726 | 96h Russian Macintosh
|
||
727 | 97h Eastern European Macintosh
|
||
728 | 98h Greek Macintosh
|
||
729 | C8h Windows EE code page 1250
|
||
730 | C9h Russian Windows
|
||
731 | CAh Turkish Windows
|
||
732 | CBh Greek Windows
|
||
733 | * @return
|
||
734 | */
|
||
735 | public String getCharsetName() { |
||
736 | switch (getLanguageID()) {
|
||
737 | case 0x01: |
||
738 | return "US-ASCII"; |
||
739 | case 0x02: |
||
740 | return "ISO-8859-1"; |
||
741 | case 0x03: |
||
742 | return "windows-1252"; |
||
743 | case 0x04: |
||
744 | return "mac"; |
||
745 | case 0x64: |
||
746 | return "ISO-8859-1"; |
||
747 | case 0x65: |
||
748 | return "ISO-8859-1"; |
||
749 | case 0x66: |
||
750 | return "ISO-8859-1"; |
||
751 | case 0x67: |
||
752 | return "ISO-8859-1"; |
||
753 | case 0x68: |
||
754 | return "greek"; |
||
755 | case 0x69: |
||
756 | return "ISO-8859-1"; |
||
757 | case 0x6A: |
||
758 | return "greek"; |
||
759 | case 0x6B: |
||
760 | return "ISO-8859-1"; |
||
761 | |||
762 | default:
|
||
763 | return "ISO-8859-1"; |
||
764 | } |
||
765 | } |
||
766 | |||
767 | public String mappingEncoding(String dbfEnconding) { |
||
768 | if(encodingSupportedByString.contains(dbfEnconding))
|
||
769 | return dbfEnconding;
|
||
770 | else
|
||
771 | return "UTF-8"; |
||
772 | } |
||
773 | |||
774 | } |