Statistics
| Revision:

svn-gvsig-desktop / trunk / org.gvsig.desktop / org.gvsig.desktop.compat.cdc / org.gvsig.fmap.dal / org.gvsig.fmap.dal.file / org.gvsig.fmap.dal.file.dbf / src / main / java / org / gvsig / fmap / dal / store / dbf / utils / DbaseFileHeader.java @ 44870

History | View | Annotate | Download (29.2 KB)

1
/**
2
 * gvSIG. Desktop Geographic Information System.
3
 *
4
 * Copyright (C) 2007-2013 gvSIG Association.
5
 *
6
 * This program is free software; you can redistribute it and/or
7
 * modify it under the terms of the GNU General Public License
8
 * as published by the Free Software Foundation; either version 3
9
 * of the License, or (at your option) any later version.
10
 *
11
 * This program is distributed in the hope that it will be useful,
12
 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13
 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14
 * GNU General Public License for more details.
15
 *
16
 * You should have received a copy of the GNU General Public License
17
 * along with this program; if not, write to the Free Software
18
 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
19
 * MA  02110-1301, USA.
20
 *
21
 * For any additional information, do not hesitate to contact us
22
 * at info AT gvsig.com, or visit our website www.gvsig.com.
23
 */
24
package org.gvsig.fmap.dal.store.dbf.utils;
25

    
26
import java.io.IOException;
27
import java.io.UnsupportedEncodingException;
28
import java.nio.Buffer;
29
import java.nio.ByteBuffer;
30
import java.nio.ByteOrder;
31
import java.nio.channels.FileChannel;
32
import java.nio.charset.Charset;
33
import java.util.ArrayList;
34
import java.util.Calendar;
35
import java.util.Date;
36
import java.util.Iterator;
37
import java.util.List;
38
import java.util.Set;
39
import java.util.SortedMap;
40

    
41
import org.gvsig.fmap.dal.DataTypes;
42
import org.gvsig.fmap.dal.exception.DataException;
43
import org.gvsig.fmap.dal.exception.UnsupportedVersionException;
44
import org.gvsig.fmap.dal.feature.EditableFeatureAttributeDescriptor;
45
import org.gvsig.fmap.dal.feature.EditableFeatureType;
46
import org.gvsig.fmap.dal.feature.FeatureAttributeDescriptor;
47
import org.gvsig.fmap.dal.feature.FeatureType;
48
import org.gvsig.fmap.dal.feature.exception.AttributeFeatureTypeNotSuportedException;
49
import org.gvsig.fmap.dal.feature.exception.UnknownDataTypeException;
50
import org.gvsig.fmap.dal.store.dbf.DBFStoreProvider;
51
import org.gvsig.fmap.dal.store.dbf.DuplicatedFieldNameException;
52
import static org.gvsig.fmap.dal.store.dbf.utils.DbaseFieldDescriptor.DBFTYPE_BOOLEAN;
53
import static org.gvsig.fmap.dal.store.dbf.utils.DbaseFieldDescriptor.DBFTYPE_DATE;
54
import static org.gvsig.fmap.dal.store.dbf.utils.DbaseFieldDescriptor.DBFTYPE_FLOAT;
55
import static org.gvsig.fmap.dal.store.dbf.utils.DbaseFieldDescriptor.DBFTYPE_NUMBER;
56
import static org.gvsig.fmap.dal.store.dbf.utils.DbaseFieldDescriptor.DBFTYPE_STRING;
57
import org.gvsig.tools.ToolsLocator;
58
import org.gvsig.tools.dataTypes.DataType;
59
import org.gvsig.utils.bigfile.BigByteBuffer2;
60
import org.slf4j.Logger;
61
import org.slf4j.LoggerFactory;
62
import static org.gvsig.fmap.dal.store.dbf.utils.FieldFormatter.TIMESTAMP_SIZE;
63
import static org.gvsig.fmap.dal.store.dbf.utils.FieldFormatter.TIME_SIZE;
64

    
65
/**
66
 * Class to represent the header of a Dbase III file.
67
 */
68
@SuppressWarnings("UseSpecificCatch")
69
public class DbaseFileHeader {
70

    
71
  private static final Logger LOGGER = LoggerFactory.getLogger(DbaseFileHeader.class);
72

    
73
  // Constant for the size of a record
74
  private final int FILE_DESCRIPTOR_SIZE = 32;
75

    
76
  // type of the file, must be 03h
77
  private static final byte MAGIC = 0x03;
78

    
79
  private static final int MINIMUM_HEADER = 33;
80

    
81
  // type of the file, must be 03h
82
  private int myFileType = 0x03;
83

    
84
  // Date the file was last updated.
85
  private Date myUpdateDate = new Date();
86

    
87
  // Number of records in the datafile
88
  private int myNumRecords = 0;
89

    
90
  // Length of the header structure
91
  private int myHeaderLength;
92

    
93
  /**
94
   * Length of the records. Set to 1 as the default value as if there is not any
95
   * defined column, at least the deleted status initial byte is taken into
96
   * account.
97
   */
98
  private int myRecordLength = 1;
99

    
100
  // Number of fields in the record.
101
  private int myNumFields;
102

    
103
  // collection of header records.
104
  private DbaseFieldDescriptor[] myFieldDescriptions;
105

    
106
  private int myLanguageID = 0x00;
107
  /**
108
   * Only considered when myLanguageID = 0x00;
109
   */
110
  private String charset = null;
111

    
112
  private List<String> encodingSupportedByString = null;
113

    
114
  private int origLanguageID = 0x00;
115

    
116
  /**
117
   * Headers must always be encoded using ASCII/ISO-8859-1, regardless the
118
   * encoding of the records
119
   */
120
  private static final Charset HEADER_CHARSET = Charset.forName("ISO-8859-1");
121

    
122
  /**
123
   * DbaseFileHreader constructor comment.
124
   */
125
  public DbaseFileHeader() {
126
    super();
127

    
128
    encodingSupportedByString = new ArrayList<>();
129
    SortedMap<String, Charset> m = Charset.availableCharsets();
130
    Set<String> k = m.keySet();
131
    Iterator<String> it = k.iterator();
132
    while (it.hasNext()) {
133
      encodingSupportedByString.add(it.next());
134
    }
135
  }
136

    
137
  /**
138
   * Add a column to this DbaseFileHeader.The type is one of (C N L or D)
139
 character, number, logical(true/false), or date. The Field length is the
140
 total length in bytes reserved for this column. The decimal count only
141
 applies to numbers(N), and floating point values (F), and refers to the
142
 number of characters to reserve after the decimal point.
143
   *
144
   * @param fieldName DOCUMENT ME!
145
   * @param fieldType DOCUMENT ME!
146
   * @param fieldSize DOCUMENT ME!
147
   * @param fieldPrecision
148
   * @param fieldScale DOCUMENT ME!
149
   * @throws
150
   * org.gvsig.fmap.dal.feature.exception.AttributeFeatureTypeNotSuportedException
151
   *
152
   */
153
  public void addColumn(String fieldName, char fieldType,
154
          int fieldSize, int fieldPrecision, int fieldScale)
155
          throws AttributeFeatureTypeNotSuportedException {
156
//    if (fieldSize <= 0) {
157
//      fieldSize = 1;
158
//    }
159

    
160
    if (myFieldDescriptions == null) {
161
      myFieldDescriptions = new DbaseFieldDescriptor[0];
162
    }
163

    
164
    int tempLength = 1; // the length is used for the offset, and there is a * for deleted as the first byte
165
    DbaseFieldDescriptor[] tempFieldDescriptors = new DbaseFieldDescriptor[myFieldDescriptions.length
166
            + 1];
167

    
168
    for (int i = 0; i < myFieldDescriptions.length; i++) {
169
      myFieldDescriptions[i].setOffsetInRecord(tempLength);
170
      tempLength = tempLength + myFieldDescriptions[i].getSize();
171
      tempFieldDescriptors[i] = myFieldDescriptions[i];
172
    }
173

    
174
    tempFieldDescriptors[myFieldDescriptions.length] = new DbaseFieldDescriptor();
175
    tempFieldDescriptors[myFieldDescriptions.length].setType(fieldType);
176
    tempFieldDescriptors[myFieldDescriptions.length].setSize(fieldSize);
177
    tempFieldDescriptors[myFieldDescriptions.length].setScale(fieldScale);
178
    tempFieldDescriptors[myFieldDescriptions.length].setOffsetInRecord(tempLength);
179

    
180
    // set the field name
181
    String tempFieldName = fieldName;
182

    
183
    if (tempFieldName == null) {
184
      tempFieldName = "NoName";
185
    }
186

    
187
    if (tempFieldName.length() > DbaseFile.MAX_FIELD_NAME_LENGTH) {
188
      tempFieldName = tempFieldName.substring(0, DbaseFile.MAX_FIELD_NAME_LENGTH);
189
      warn("FieldName " + fieldName
190
              + " is longer than " + DbaseFile.MAX_FIELD_NAME_LENGTH + " characters, truncating to "
191
              + tempFieldName);
192
    }
193

    
194
    tempFieldDescriptors[myFieldDescriptions.length].setName(tempFieldName);
195
    tempFieldDescriptors[myFieldDescriptions.length].setName_trim(tempFieldName.trim());
196

    
197
    // the length of a record
198
    tempLength += tempFieldDescriptors[myFieldDescriptions.length].getSize();
199

    
200
    // set the new fields.
201
    myFieldDescriptions = tempFieldDescriptors;
202
    myHeaderLength = 33 + (32 * myFieldDescriptions.length);
203
    myNumFields = myFieldDescriptions.length;
204
    myRecordLength = tempLength;
205
  }
206

    
207
  /**
208
   * Remove a column from this DbaseFileHeader.
209
   *
210
   * @param inFieldName
211
   *
212
   * @return index of the removed column, -1 if no found
213
   */
214
  public int removeColumn(String inFieldName) {
215
    int retCol = -1;
216
    int tempLength = 1;
217
    DbaseFieldDescriptor[] tempFieldDescriptors = new DbaseFieldDescriptor[myFieldDescriptions.length
218
            - 1];
219

    
220
    for (int i = 0, j = 0; i < myFieldDescriptions.length; i++) {
221
      if (!inFieldName.equalsIgnoreCase(
222
              myFieldDescriptions[i].getName().trim())) {
223
        // if this is the last field and we still haven't found the
224
        // named field
225
        if ((i == j) && (i == (myFieldDescriptions.length - 1))) {
226
//                    System.err.println("Could not find a field named '" +
227
//                        inFieldName + "' for removal");
228

    
229
          return retCol;
230
        }
231

    
232
        tempFieldDescriptors[j] = myFieldDescriptions[i];
233
        tempFieldDescriptors[j].setOffsetInRecord(tempLength);
234
        tempLength += tempFieldDescriptors[j].getSize();
235

    
236
        // only increment j on non-matching fields
237
        j++;
238
      } else {
239
        retCol = i;
240
      }
241
    }
242

    
243
    // set the new fields.
244
    myFieldDescriptions = tempFieldDescriptors;
245
    myHeaderLength = 33 + (32 * myFieldDescriptions.length);
246
    myNumFields = myFieldDescriptions.length;
247
    myRecordLength = tempLength;
248

    
249
    return retCol;
250
  }
251

    
252
  private void warn(String inWarn) {
253
    LOGGER.warn(inWarn);
254
  }
255

    
256
  /**
257
   * Return the Field Descriptor for the given field.
258
   *
259
   * @param index, the index of the requeted field description
260
   * @return the dbase field descriptor.
261
   */
262
  public DbaseFieldDescriptor getFieldDescription(int index) {
263
    return myFieldDescriptions[index];
264
  }
265

    
266
  public DbaseFieldDescriptor getFieldDescription(String name) {
267
    int index = this.getFieldIndex(name);
268
    return myFieldDescriptions[index];
269
  }
270

    
271
  public int getFieldIndex(String name) {
272
    for (int i = 0; i < myFieldDescriptions.length; i++) {
273
      if (myFieldDescriptions[i].getName_trim()
274
              .equalsIgnoreCase(name)) {
275
        return i;
276
      }
277
    }
278
    return -1;
279
  }
280

    
281
  /**
282
   * Return the date this file was last updated.
283
   *
284
   * @return
285
   */
286
  public Date getLastUpdateDate() {
287
    return myUpdateDate;
288
  }
289

    
290
  /**
291
   * Return the number of fields in the records.
292
   *
293
   * @return
294
   */
295
  public int getNumFields() {
296
    return myNumFields;
297
  }
298

    
299
  /**
300
   * Return the number of records in the file
301
   *
302
   * @return
303
   */
304
  public int getNumRecords() {
305
    return myNumRecords;
306
  }
307

    
308
  /**
309
   * Return the length of the records in bytes.
310
   *
311
   * @return
312
   */
313
  public int getRecordLength() {
314
    return myRecordLength;
315
  }
316

    
317
  /**
318
   * Return the length of the header
319
   *
320
   * @return
321
   */
322
  public int getHeaderLength() {
323
    return myHeaderLength;
324
  }
325

    
326
  /**
327
   * Read the header data from the DBF file.
328
   *
329
   * @param in
330
   * @param charsName
331
   * @param allowDuplicatedFieldNames
332
   * @throws UnsupportedVersionException
333
   * @throws AttributeFeatureTypeNotSuportedException
334
   *
335
   */
336
  public void read(BigByteBuffer2 in, String charsName, boolean allowDuplicatedFieldNames)
337
          throws UnsupportedVersionException, 
338
            AttributeFeatureTypeNotSuportedException {
339
    // type of file.
340
    myFileType = in.get();
341

    
342
    if (myFileType != 0x03) {
343
      throw new UnsupportedVersionException("DBF", Integer.toHexString(myFileType));
344
    }
345

    
346
    // parse the update date information.
347
    int tempUpdateYear = in.get();
348
    int tempUpdateMonth = in.get();
349
    int tempUpdateDay = in.get();
350
    tempUpdateYear = tempUpdateYear + 1900;
351

    
352
    Calendar c = Calendar.getInstance();
353
    c.set(Calendar.YEAR, tempUpdateYear);
354
    c.set(Calendar.MONTH, tempUpdateMonth - 1);
355
    c.set(Calendar.DATE, tempUpdateDay);
356
    myUpdateDate = c.getTime();
357

    
358
    // read the number of records.
359
    in.order(ByteOrder.LITTLE_ENDIAN);
360
    myNumRecords = in.getInt();
361

    
362
    // read the length of the header structure.
363
    myHeaderLength = in.getShort();
364

    
365
    // read the length of a record
366
    myRecordLength = in.getShort(); //posicon 0h
367

    
368
    in.order(ByteOrder.BIG_ENDIAN);
369

    
370
    // read the language bit (LDID) 
371
    in.position(29);
372
    origLanguageID = byteAsUnsigned(in.get());
373
    if (charsName != null) {
374
      // ignore the language bit, use the provided charset name
375
      myLanguageID = DbaseCodepage.getLdid(charsName);
376
      this.charset = charsName;
377
    } else {
378
      // use the read the language bit
379
      myLanguageID = origLanguageID;
380
      charsName = getCharsetName();
381
    }
382

    
383
    // Posicionamos para empezar a leer los campos.
384
    in.position(32);
385

    
386
    // calculate the number of Fields in the header
387
    myNumFields = (myHeaderLength - FILE_DESCRIPTOR_SIZE - 1) / FILE_DESCRIPTOR_SIZE;
388

    
389
    // read all of the header records
390
    myFieldDescriptions = new DbaseFieldDescriptor[myNumFields];
391
    int fieldOffset = 0;
392

    
393
    List fieldNames = new ArrayList<>();
394

    
395
    // FIXME: should field names be always read using ISO8859-1??
396
    for (int i = 0; i < myNumFields; i++) {
397
      myFieldDescriptions[i] = new DbaseFieldDescriptor();
398

    
399
      // read the field name
400
      byte[] buffer = new byte[11];
401
      in.get(buffer);
402
      String fieldName;
403
      fieldName = new String(buffer, HEADER_CHARSET);
404

    
405
      if (allowDuplicatedFieldNames) {
406
        fieldName = getUniqueFieldName(fieldName, fieldNames);
407
      }
408
      fieldNames.add(fieldName);
409

    
410
      myFieldDescriptions[i].setName(fieldName);
411

    
412
      myFieldDescriptions[i].setName_trim(myFieldDescriptions[i].getName().trim());
413

    
414
      // read the field type
415
      myFieldDescriptions[i].setType((char) in.get());
416

    
417
      // read the field data address, offset from the start of the record.
418
      myFieldDescriptions[i].setOffsetInRecord(in.getInt());
419

    
420
      // read the field length in bytes
421
      int tempLength = in.get();
422

    
423
      if (tempLength < 0) {
424
        tempLength = tempLength + 256;
425
      }
426

    
427
      myFieldDescriptions[i].setSize(tempLength);
428

    
429
      // read the field decimal count in bytes
430
      myFieldDescriptions[i].setScale(in.get());
431

    
432
      myFieldDescriptions[i].setOffsetInRecord(fieldOffset);
433
      
434
      fieldOffset += tempLength;
435

    
436
      // read the reserved bytes.
437
      in.position(in.position() + 14);
438

    
439
    }
440

    
441
    // Last byte is a marker for the end of the field definitions.
442
    in.get();
443
  }
444

    
445
  /**
446
   * Set the number of records in the file
447
   *
448
   * @param inNumRecords
449
   */
450
  public void setNumRecords(int inNumRecords) {
451
    myNumRecords = inNumRecords;
452
  }
453

    
454
  /**
455
   * Returns the value of the unsigned byte as a short Bytes are always signed
456
   * in Java, so if we are reading a C unsigned byte with value > 128, it will
457
   * appear as a negative value.
458
   *
459
   * In this case, we need to get the original unsigned value and return it as
460
   * short or int, as byte will never correctly store the value in Java.
461
   *
462
   * @return
463
   */
464
  private int byteAsUnsigned(byte b) {
465
    int i;
466
    if (b < 0) {
467
      i = b & 0xFF;
468
    } else {
469
      i = b;
470
    }
471
    return i;
472
  }
473

    
474
  /**
475
   * Gets the Language driver IDs (code page) defined on the file header (or
476
   * guessed from the provided charset)
477
   *
478
   * Some examples: 01h        DOS USA        code page 437 02h        DOS Multilingual code page 850
479
   * 03h        Windows ANSI code page 1252 04h        Standard Macintosh 64h        EE MS-DOS code
480
   * page 852 65h        Nordic MS-DOS code page 865 66h        Russian MS-DOS code page 866
481
   * 67h        Icelandic MS-DOS 68h        Kamenicky (Czech) MS-DOS 69h        Mazovia (Polish)
482
   * MS-DOS 6Ah        Greek MS-DOS (437G) 6Bh        Turkish MS-DOS 96h        Russian Macintosh 97h
483
   * Eastern European Macintosh 98h        Greek Macintosh C8h        Windows EE        code page
484
   * 1250 C9h        Russian Windows CAh        Turkish Windows CBh        Greek Windows
485
   *
486
   * See the java equivalences in
487
   * {@link DbaseCodepage#dbfLdid} & {@link DbaseCodepage#ldidJava} objects.
488
   *
489
   * See some others here:
490
   * https://github.com/infused/dbf/blob/master/docs/supported_encodings.csv
491
   *
492
   * @return
493
   */
494
  public int getLanguageID() {
495

    
496
    return myLanguageID;
497
  }
498

    
499
  /**
500
   * Write the header data to the DBF file.
501
   *
502
   * @param out A channel to write to. If you have an OutputStream you can
503
   * obtain the correct channel by using
504
   * java.nio.Channels.newChannel(OutputStream out).
505
   *
506
   * @throws IOException If errors occur.
507
   */
508
  public void write(FileChannel out) throws IOException {
509
    // take care of the annoying case where no records have been added...
510
    if (myHeaderLength <= 0) {
511
      myHeaderLength = MINIMUM_HEADER;
512
    }
513

    
514
    // Desde el principio
515
    out.position(0);
516

    
517
    ByteBuffer buffer = ByteBuffer.allocateDirect(myHeaderLength);
518
    buffer.order(ByteOrder.LITTLE_ENDIAN);
519

    
520
    // write the output file type.
521
    buffer.put(MAGIC);
522

    
523
    // write the date stuff
524
    Calendar c = Calendar.getInstance();
525
    c.setTime(new Date());
526
    buffer.put((byte) (c.get(Calendar.YEAR) % 100));
527
    buffer.put((byte) (c.get(Calendar.MONTH) + 1));
528
    buffer.put((byte) (c.get(Calendar.DAY_OF_MONTH)));
529

    
530
    // write the number of records in the datafile.
531
    buffer.putInt(myNumRecords);
532

    
533
    // write the length of the header structure.
534
    buffer.putShort((short) myHeaderLength);
535

    
536
    // write the length of a record
537
    buffer.putShort((short) myRecordLength);
538

    
539
    // write the reserved bytes in the header
540
    ((Buffer) buffer).position(((Buffer) buffer).position() + 17);
541

    
542
    // write the language id
543
    buffer.put((byte) getLanguageID());
544

    
545
    // write the reserved bytes in the header                
546
    ((Buffer) buffer).position(((Buffer) buffer).position() + 2);
547

    
548
    // write all of the header records
549
    int tempOffset = 0;
550

    
551
    if (myFieldDescriptions != null) {
552
      for (int i = 0; i < myFieldDescriptions.length; i++) {
553
        // write the field name
554
        for (int j = 0; j < DbaseFile.MAX_FIELD_NAME_LENGTH + 1; j++) {
555
          if (myFieldDescriptions[i].getName().length() > j) {
556
            buffer.put((byte) myFieldDescriptions[i].getName().charAt(j));
557
          } else {
558
            buffer.put((byte) 0);
559
          }
560
        }
561

    
562
        // write the field type
563
        buffer.put((byte) myFieldDescriptions[i].getType());
564

    
565
        // // write the field data address, offset from the start of the
566
        // record.
567
        buffer.putInt(tempOffset);
568
        tempOffset += myFieldDescriptions[i].getSize();
569

    
570
        // write the length of the field.
571
        buffer.put((byte) myFieldDescriptions[i].getSize());
572

    
573
        // write the decimal count.
574
        buffer.put((byte) myFieldDescriptions[i].getScale());
575

    
576
        // write the reserved bytes.
577
        // for (in j=0; jj<14; j++) out.writeByteLE(0);
578
        ((Buffer) buffer).position(((Buffer) buffer).position() + 14);
579
      }
580
    }
581
    // write the end of the field definitions marker
582
    buffer.put((byte) 0x0D);
583

    
584
    ((Buffer) buffer).position(0);
585

    
586
    int r = buffer.remaining();
587

    
588
    while ((r -= out.write(buffer)) > 0) {
589
      // do nothing
590
    }
591
  }
592

    
593
  public String getCharsetName() {
594
    return getCharsetName(getLanguageID());
595
  }
596

    
597
  public String getCharsetName(int ldid) {
598
    if (ldid != 0) {
599
      charset = DbaseCodepage.getCharsetName(ldid);
600
    }
601
    if (charset != null) {
602
      // use charset otherwise
603
      return charset;
604
    }
605
    // default
606
    return "ISO-8859-1";
607
  }
608

    
609
  public String getOriginalCharset() {
610
    return getCharsetName(this.origLanguageID);
611
  }
612

    
613
  public String mappingEncoding(String dbfEnconding) {
614
    if (encodingSupportedByString.contains(dbfEnconding)) {
615
      return dbfEnconding;
616
    } else {
617
      return "UTF-8";
618
    }
619
  }
620

    
621
  private String getUniqueFieldName(String fieldName, List fieldNames) {
622

    
623
    int index = 0;
624
    String tempFieldName = fieldName;
625
    while (fieldNames.contains(tempFieldName) && index < 1000) {
626
      index++;
627
      String sufix = String.valueOf(index);
628
      tempFieldName = tempFieldName.substring(0, DbaseFile.MAX_FIELD_NAME_LENGTH - sufix.length()) + sufix;
629
    }
630
    if (index >= 1000) {
631
      throw new RuntimeException("Can't fix duplicated name for field '" + fieldName + "'.");
632
    }
633
    return tempFieldName;
634
  }
635

    
636
  public EditableFeatureType toFeatureType(EditableFeatureType featureType, boolean handleDatesAsStrings) throws DataException {
637
    featureType.setHasOID(true);
638

    
639
    for (DbaseFieldDescriptor dbfattr : this.myFieldDescriptions) {
640
      if (featureType.get(dbfattr.getName()) != null) {
641
        throw new DuplicatedFieldNameException(dbfattr.getName());
642
      }
643
      EditableFeatureAttributeDescriptor attr;
644
      switch (dbfattr.getType()) {
645
        case DBFTYPE_BOOLEAN:
646
          attr = featureType.add(dbfattr.getName(), DataTypes.BOOLEAN);
647
          attr.setSize(0);
648
          attr.setDefaultValue(null);
649
          attr.setAllowNull(true);
650
          break;
651
        case DBFTYPE_FLOAT:
652
          if (dbfattr.getScale() > 0) {
653
            if (dbfattr.getSize() > DataType.DOUBLE_MAX_PRECISION+3) {
654
              attr = featureType.add(dbfattr.getName(),
655
                      DataTypes.DECIMAL, dbfattr.getSize());
656
              attr.setScale(dbfattr.getScale());
657
              attr.setPrecision(dbfattr.getPrecision());
658
              attr.setDefaultValue(null);
659
              attr.setAllowNull(true);
660

    
661
            } else if (dbfattr.getSize() > DataType.FLOAT_MAX_PRECISION+3) {
662
              attr = featureType.add(dbfattr.getName(),
663
                      DataTypes.DOUBLE, dbfattr.getSize());
664
              attr.setScale(dbfattr.getScale());
665
              attr.setPrecision(dbfattr.getPrecision());
666
              attr.setDefaultValue(null);
667
              attr.setAllowNull(true);
668
            } else {
669
              attr = featureType.add(dbfattr.getName(),
670
                      DataTypes.FLOAT, dbfattr.getSize());
671
              attr.setScale(dbfattr.getScale());
672
              attr.setPrecision(dbfattr.getPrecision());
673
              attr.setDefaultValue(null);
674
              attr.setAllowNull(true);
675
            }
676
          } else {
677
            if (dbfattr.getPrecision() > DataType.LONG_MAX_PRECISION) {
678
              attr = featureType.add(dbfattr.getName(),
679
                      DataTypes.DECIMAL, dbfattr.getSize());
680
              attr.setPrecision(dbfattr.getPrecision());
681
              attr.setScale(dbfattr.getScale());
682
              attr.setDefaultValue(null);
683
              attr.setAllowNull(true);
684
            } else if (dbfattr.getPrecision() > DataType.INT_MAX_PRECISION) {
685
              attr = featureType.add(
686
                      dbfattr.getName(),
687
                      DataTypes.LONG
688
              );
689
              attr.setPrecision(dbfattr.getPrecision());
690
              attr.setScale(0);
691
              attr.setDefaultValue(null);
692
              attr.setAllowNull(true);
693
            } else if (dbfattr.getPrecision() > DataType.BYTE_MAX_PRECISION) {
694
              attr = featureType.add(
695
                      dbfattr.getName(),
696
                      DataTypes.INT
697
              );
698
              attr.setPrecision(dbfattr.getPrecision());
699
              attr.setScale(0);
700
              attr.setDefaultValue(null);
701
              attr.setAllowNull(true);
702
            } else {
703
              attr = featureType.add(
704
                      dbfattr.getName(),
705
                      DataTypes.DECIMAL
706
              );
707
              attr.setPrecision(dbfattr.getPrecision());
708
              attr.setScale(0);
709
              attr.setDefaultValue(null);
710
              attr.setAllowNull(true);
711
            }
712
          }
713
          break;
714
        case DBFTYPE_NUMBER:
715
          if (dbfattr.getScale() > 0) {
716
            attr = featureType.add(dbfattr.getName(),
717
                    DataTypes.DECIMAL, dbfattr.getSize());
718
            attr.setPrecision(dbfattr.getPrecision());
719
            attr.setScale(dbfattr.getScale());
720
            attr.setDefaultValue(null);
721
            attr.setAllowNull(true);
722
          } else {
723
            if (dbfattr.getPrecision() > DataType.LONG_MAX_PRECISION) {
724
              attr = featureType.add(dbfattr.getName(),
725
                      DataTypes.DECIMAL, dbfattr.getSize());
726
              attr.setPrecision(dbfattr.getPrecision());
727
              attr.setScale(dbfattr.getScale());
728
              attr.setDefaultValue(null);
729
              attr.setAllowNull(true);
730
            } else if (dbfattr.getPrecision() > DataType.INT_MAX_PRECISION) {
731
              attr = featureType.add(
732
                      dbfattr.getName(),
733
                      DataTypes.LONG
734
              );
735
              attr.setPrecision(dbfattr.getPrecision());
736
              attr.setScale(0);
737
              attr.setDefaultValue(null);
738
              attr.setAllowNull(true);
739
            } else if (dbfattr.getPrecision() > DataType.BYTE_MAX_PRECISION) {
740
              attr = featureType.add(
741
                      dbfattr.getName(),
742
                      DataTypes.INT
743
              );
744
              attr.setPrecision(dbfattr.getPrecision());
745
              attr.setScale(0);
746
              attr.setDefaultValue(null);
747
              attr.setAllowNull(true);
748
            } else {
749
              attr = featureType.add(
750
                      dbfattr.getName(),
751
                      DataTypes.DECIMAL
752
              );
753
              attr.setPrecision(dbfattr.getPrecision());
754
              attr.setScale(0);
755
              attr.setDefaultValue(null);
756
              attr.setAllowNull(true);
757
            }
758
          }
759
          break;
760
        case DBFTYPE_STRING:
761
          attr = featureType.add(dbfattr.getName(),
762
                  DataTypes.STRING, dbfattr.getSize());
763
          attr.setDefaultValue(null);
764
          attr.setAllowNull(true);
765
          if (dbfattr.getSize() == TIMESTAMP_SIZE) {
766
            try {
767
              DataType dataType = ToolsLocator.getDataTypesManager().get(DataTypes.STRING).clone();
768
              dataType.addCoercion(new CoerceDateToDbfTimestampAsString());
769
              attr.setDataType(dataType);
770
            } catch (Exception ex) {
771
              // Do nothing
772
            }
773
          } else if (dbfattr.getSize() == TIME_SIZE) {
774
            try {
775
              DataType dataType = ToolsLocator.getDataTypesManager().get(DataTypes.STRING).clone();
776
              dataType.addCoercion(new CoerceDateToDbfTimeAsString());
777
              attr.setDataType(dataType);
778
            } catch (Exception ex) {
779
              // Do nothing
780
            }
781
          }
782
          break;
783
        case DBFTYPE_DATE:
784
          if (handleDatesAsStrings) {
785
            attr = featureType.add(dbfattr.getName(),
786
                    DataTypes.STRING, dbfattr.getSize());
787
            attr.setDefaultValue(null);
788
            attr.setAllowNull(true);
789
          } else {
790
            attr = featureType.add(dbfattr.getName(),
791
                    DataTypes.DATE
792
            );
793
            attr.setDefaultValue(null);
794
            attr.setAllowNull(true);
795
          }
796
          break;
797
        default:
798
          throw new UnknownDataTypeException(
799
                  dbfattr.getName(), String.valueOf(dbfattr.getType()),
800
                  DBFStoreProvider.NAME
801
          );
802
      }
803
      attr.setRequiredBytes(dbfattr.getSize());
804
    }
805
    return featureType;
806
  }
807

    
808
  public static DbaseFileHeader fromFeatureType(FeatureType featureType)
809
          throws DataException {
810
    return fromFeatureType(featureType, null);
811
  }
812

    
813
  public static DbaseFileHeader fromFeatureType(FeatureType featureType, String charsetName)
814
          throws DataException {
815
    DbaseFileHeader header = new DbaseFileHeader();
816
    Iterator iterator = featureType.iterator();
817
    header.myLanguageID = DbaseCodepage.getLdid(charsetName);
818
    header.charset = charsetName;
819
    while (iterator.hasNext()) {
820
      FeatureAttributeDescriptor descriptor = (FeatureAttributeDescriptor) iterator.next();
821

    
822
      if (descriptor.isComputed()) {
823
        continue;
824
      }
825
      int type = descriptor.getType();
826
      String colName = descriptor.getName();
827
      if (colName.length() > DbaseFile.MAX_FIELD_NAME_LENGTH) {      
828
        throw new FieldNameTooLongException("DBF file", colName);        
829
      }
830

    
831
      int size = descriptor.getSize();
832
      int scale = descriptor.getScale();
833
      int precision = descriptor.getPrecision();
834
      int requiredBytes = descriptor.getRequiredBytes();
835
      switch (type) {
836
        case DataTypes.DECIMAL:
837
          header.addColumn(colName, DBFTYPE_NUMBER, requiredBytes>0?requiredBytes:precision+3, precision, scale);
838
          break;
839
        case DataTypes.DOUBLE:
840
          header.addColumn(colName, DBFTYPE_FLOAT, 
841
                  requiredBytes>0?requiredBytes:DataType.DOUBLE_MAX_PRECISION+3, 
842
                  precision, scale
843
          );
844
          break;
845
        case DataTypes.FLOAT:
846
          header.addColumn(colName, DBFTYPE_FLOAT, 
847
                  requiredBytes>0?requiredBytes:DataType.FLOAT_MAX_PRECISION+3, 
848
                  precision, scale
849
          );
850
          break;
851
        case DataTypes.INT:
852
          header.addColumn(colName, DBFTYPE_NUMBER, 
853
                  requiredBytes>0?requiredBytes:(precision>=DataType.INT_MAX_PRECISION?precision:precision+1), 
854
                  precision, scale
855
          );
856
          break;
857
        case DataTypes.LONG:
858
          header.addColumn(colName, DBFTYPE_NUMBER, 
859
                  requiredBytes>0?requiredBytes:(precision>=DataType.LONG_MAX_PRECISION?precision:precision+1), 
860
                  precision, scale
861
          );
862
          break;
863
        case DataTypes.DATE:
864
          header.addColumn(colName, DBFTYPE_DATE, FieldFormatter.DATE_SIZE, 0, 0);
865
          break;
866
        case DataTypes.TIME:
867
          header.addColumn(colName, DBFTYPE_STRING, FieldFormatter.TIME_SIZE, 0, 0);
868
          break;
869
        case DataTypes.TIMESTAMP:
870
          header.addColumn(colName, DBFTYPE_STRING, FieldFormatter.TIMESTAMP_SIZE, 0, 0);
871
          break;
872
        case DataTypes.BOOLEAN:
873
          header.addColumn(colName, DBFTYPE_BOOLEAN, 1, 0, 0);
874
          break;
875
        case DataTypes.STRING:
876
          header.addColumn(colName, DBFTYPE_STRING, Math.min(254, size), 0, 0);
877
          break;
878
        case DataTypes.BYTE:
879
          header.addColumn(colName, DBFTYPE_NUMBER, 
880
                  requiredBytes>0?requiredBytes:(precision>=DataType.BYTE_MAX_PRECISION?precision:precision+1), 
881
                  precision, scale
882
          );
883
          break;
884
        default:
885
          // Si no sabemos lo que es intentaremos guardarlo como un string
886
          header.addColumn(colName, DBFTYPE_STRING, Math.min(254, size < 10 ? 10 : size), 0, 0);
887
          break;
888
      }
889

    
890
    }
891
    return header;
892
  }
893
}