Statistics
| Revision:

svn-gvsig-desktop / trunk / org.gvsig.desktop / org.gvsig.desktop.compat.cdc / org.gvsig.fmap.dal / org.gvsig.fmap.dal.file / org.gvsig.fmap.dal.file.dbf / src / main / java / org / gvsig / fmap / dal / store / dbf / utils / DbaseFileHeader.java @ 47325

History | View | Annotate | Download (30.7 KB)

1
/**
2
 * gvSIG. Desktop Geographic Information System.
3
 *
4
 * Copyright (C) 2007-2013 gvSIG Association.
5
 *
6
 * This program is free software; you can redistribute it and/or
7
 * modify it under the terms of the GNU General Public License
8
 * as published by the Free Software Foundation; either version 3
9
 * of the License, or (at your option) any later version.
10
 *
11
 * This program is distributed in the hope that it will be useful,
12
 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13
 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14
 * GNU General Public License for more details.
15
 *
16
 * You should have received a copy of the GNU General Public License
17
 * along with this program; if not, write to the Free Software
18
 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
19
 * MA  02110-1301, USA.
20
 *
21
 * For any additional information, do not hesitate to contact us
22
 * at info AT gvsig.com, or visit our website www.gvsig.com.
23
 */
24
package org.gvsig.fmap.dal.store.dbf.utils;
25

    
26
import java.io.IOException;
27
import java.io.UnsupportedEncodingException;
28
import java.nio.Buffer;
29
import java.nio.ByteBuffer;
30
import java.nio.ByteOrder;
31
import java.nio.channels.FileChannel;
32
import java.nio.charset.Charset;
33
import java.util.ArrayList;
34
import java.util.Calendar;
35
import java.util.Date;
36
import java.util.Iterator;
37
import java.util.List;
38
import java.util.Set;
39
import java.util.SortedMap;
40
import org.apache.commons.lang3.StringUtils;
41

    
42
import org.gvsig.fmap.dal.DataTypes;
43
import org.gvsig.fmap.dal.exception.DataException;
44
import org.gvsig.fmap.dal.exception.UnsupportedVersionException;
45
import org.gvsig.fmap.dal.feature.EditableFeatureAttributeDescriptor;
46
import org.gvsig.fmap.dal.feature.EditableFeatureType;
47
import org.gvsig.fmap.dal.feature.FeatureAttributeDescriptor;
48
import org.gvsig.fmap.dal.feature.FeatureType;
49
import org.gvsig.fmap.dal.feature.exception.AttributeFeatureTypeNotSuportedException;
50
import org.gvsig.fmap.dal.feature.exception.UnknownDataTypeException;
51
import org.gvsig.fmap.dal.store.dbf.DBFStoreProvider;
52
import org.gvsig.fmap.dal.store.dbf.DuplicatedFieldNameException;
53
import static org.gvsig.fmap.dal.store.dbf.utils.DbaseFieldDescriptor.DBFTYPE_BOOLEAN;
54
import static org.gvsig.fmap.dal.store.dbf.utils.DbaseFieldDescriptor.DBFTYPE_DATE;
55
import static org.gvsig.fmap.dal.store.dbf.utils.DbaseFieldDescriptor.DBFTYPE_FLOAT;
56
import static org.gvsig.fmap.dal.store.dbf.utils.DbaseFieldDescriptor.DBFTYPE_NUMBER;
57
import static org.gvsig.fmap.dal.store.dbf.utils.DbaseFieldDescriptor.DBFTYPE_STRING;
58
import org.gvsig.tools.ToolsLocator;
59
import org.gvsig.tools.dataTypes.DataType;
60
import org.gvsig.utils.bigfile.BigByteBuffer2;
61
import org.slf4j.Logger;
62
import org.slf4j.LoggerFactory;
63
import static org.gvsig.fmap.dal.store.dbf.utils.FieldFormatter.TIMESTAMP_SIZE;
64
import static org.gvsig.fmap.dal.store.dbf.utils.FieldFormatter.TIME_SIZE;
65

    
66
/**
67
 * Class to represent the header of a Dbase III file.
68
 */
69
@SuppressWarnings("UseSpecificCatch")
70
public class DbaseFileHeader {
71

    
72
  private static final Logger LOGGER = LoggerFactory.getLogger(DbaseFileHeader.class);
73

    
74
  // Constant for the size of a record
75
  private final int FILE_DESCRIPTOR_SIZE = 32;
76

    
77
  // type of the file, must be 03h
78
  private static final byte MAGIC = 0x03;
79

    
80
  private static final int MINIMUM_HEADER = 33;
81

    
82
  // type of the file, must be 03h
83
  private int myFileType = 0x03;
84

    
85
  // Date the file was last updated.
86
  private Date myUpdateDate = new Date();
87

    
88
  // Number of records in the datafile
89
  private int myNumRecords = 0;
90

    
91
  // Length of the header structure
92
  private int myHeaderLength;
93

    
94
  /**
95
   * Length of the records. Set to 1 as the default value as if there is not any
96
   * defined column, at least the deleted status initial byte is taken into
97
   * account.
98
   */
99
  private int myRecordLength = 1;
100

    
101
  // Number of fields in the record.
102
  private int myNumFields;
103

    
104
  // collection of header records.
105
  private DbaseFieldDescriptor[] myFieldDescriptions;
106

    
107
  private int myLanguageID = 0x00;
108
  /**
109
   * Only considered when myLanguageID = 0x00;
110
   */
111
  private String charset = null;
112

    
113
  private List<String> encodingSupportedByString = null;
114

    
115
  private int origLanguageID = 0x00;
116

    
117
  /**
118
   * Headers must always be encoded using ASCII/ISO-8859-1, regardless the
119
   * encoding of the records
120
   */
121
  private static final Charset HEADER_CHARSET = Charset.forName("ISO-8859-1");
122

    
123
  /**
124
   * DbaseFileHreader constructor comment.
125
   */
126
  public DbaseFileHeader() {
127
    super();
128

    
129
    encodingSupportedByString = new ArrayList<>();
130
    SortedMap<String, Charset> m = Charset.availableCharsets();
131
    Set<String> k = m.keySet();
132
    Iterator<String> it = k.iterator();
133
    while (it.hasNext()) {
134
      encodingSupportedByString.add(it.next());
135
    }
136
  }
137

    
138
  /**
139
   * Add a column to this DbaseFileHeader.The type is one of (C N L or D)
140
 character, number, logical(true/false), or date. The Field length is the
141
 total length in bytes reserved for this column. The decimal count only
142
 applies to numbers(N), and floating point values (F), and refers to the
143
 number of characters to reserve after the decimal point.
144
   */
145
  public void addColumn(String fieldName, char fieldType,
146
          int fieldSize, int fieldPrecision, int fieldScale)
147
          throws AttributeFeatureTypeNotSuportedException {
148
//    if (fieldSize <= 0) {
149
//      fieldSize = 1;
150
//    }
151

    
152
    if (myFieldDescriptions == null) {
153
      myFieldDescriptions = new DbaseFieldDescriptor[0];
154
    }
155

    
156
    int tempLength = 1; // the length is used for the offset, and there is a * for deleted as the first byte
157
    DbaseFieldDescriptor[] tempFieldDescriptors = new DbaseFieldDescriptor[myFieldDescriptions.length
158
            + 1];
159

    
160
    for (int i = 0; i < myFieldDescriptions.length; i++) {
161
      myFieldDescriptions[i].setOffsetInRecord(tempLength);
162
      tempLength = tempLength + myFieldDescriptions[i].getSize();
163
      tempFieldDescriptors[i] = myFieldDescriptions[i];
164
    }
165

    
166
    tempFieldDescriptors[myFieldDescriptions.length] = new DbaseFieldDescriptor();
167
    tempFieldDescriptors[myFieldDescriptions.length].setType(fieldType);
168
    tempFieldDescriptors[myFieldDescriptions.length].setSize(fieldSize);
169
    tempFieldDescriptors[myFieldDescriptions.length].setScale(fieldScale);
170
    tempFieldDescriptors[myFieldDescriptions.length].setOffsetInRecord(tempLength);
171

    
172
    // set the field name
173
    String tempFieldName = fieldName;
174

    
175
    if (tempFieldName == null) {
176
      tempFieldName = "NoName";
177
    }
178

    
179
    if (tempFieldName.length() > DbaseFile.MAX_FIELD_NAME_LENGTH) {
180
      tempFieldName = tempFieldName.substring(0, DbaseFile.MAX_FIELD_NAME_LENGTH);
181
      warn("FieldName " + fieldName
182
              + " is longer than " + DbaseFile.MAX_FIELD_NAME_LENGTH + " characters, truncating to "
183
              + tempFieldName);
184
    }
185

    
186
    tempFieldDescriptors[myFieldDescriptions.length].setName(tempFieldName);
187
//    tempFieldDescriptors[myFieldDescriptions.length].setName_trim(tempFieldName.trim());
188
    tempFieldDescriptors[myFieldDescriptions.length].setName_trim(StringUtils.stripEnd(tempFieldName, null));
189

    
190
    // the length of a record
191
    tempLength += tempFieldDescriptors[myFieldDescriptions.length].getSize();
192

    
193
    // set the new fields.
194
    myFieldDescriptions = tempFieldDescriptors;
195
    myHeaderLength = 33 + (32 * myFieldDescriptions.length);
196
    myNumFields = myFieldDescriptions.length;
197
    myRecordLength = tempLength;
198
  }
199

    
200
  /**
201
   * Remove a column from this DbaseFileHeader.
202
   *
203
   * @return index of the removed column, -1 if no found
204
   */
205
  public int removeColumn(String inFieldName) {
206
    int retCol = -1;
207
    int tempLength = 1;
208
    DbaseFieldDescriptor[] tempFieldDescriptors = new DbaseFieldDescriptor[myFieldDescriptions.length
209
            - 1];
210

    
211
    for (int i = 0, j = 0; i < myFieldDescriptions.length; i++) {
212
      if (!inFieldName.equalsIgnoreCase(
213
              myFieldDescriptions[i].getName().trim())) {
214
        // if this is the last field and we still haven't found the
215
        // named field
216
        if ((i == j) && (i == (myFieldDescriptions.length - 1))) {
217
//                    System.err.println("Could not find a field named '" +
218
//                        inFieldName + "' for removal");
219

    
220
          return retCol;
221
        }
222

    
223
        tempFieldDescriptors[j] = myFieldDescriptions[i];
224
        tempFieldDescriptors[j].setOffsetInRecord(tempLength);
225
        tempLength += tempFieldDescriptors[j].getSize();
226

    
227
        // only increment j on non-matching fields
228
        j++;
229
      } else {
230
        retCol = i;
231
      }
232
    }
233

    
234
    // set the new fields.
235
    myFieldDescriptions = tempFieldDescriptors;
236
    myHeaderLength = 33 + (32 * myFieldDescriptions.length);
237
    myNumFields = myFieldDescriptions.length;
238
    myRecordLength = tempLength;
239

    
240
    return retCol;
241
  }
242

    
243
  private void warn(String inWarn) {
244
    LOGGER.warn(inWarn);
245
  }
246

    
247
  /**
248
   * Return the Field Descriptor for the given field.
249
   *
250
   * @param index, the index of the requeted field description
251
   * @return the dbase field descriptor.
252
   */
253
  public DbaseFieldDescriptor getFieldDescription(int index) {
254
    return myFieldDescriptions[index];
255
  }
256

    
257
  public DbaseFieldDescriptor getFieldDescription(String name) {
258
    int index = this.getFieldIndex(name);
259
    return myFieldDescriptions[index];
260
  }
261

    
262
  public int getFieldIndex(String name) {
263
    for (int i = 0; i < myFieldDescriptions.length; i++) {
264
      if (myFieldDescriptions[i].getName_trim()
265
              .equalsIgnoreCase(StringUtils.stripEnd(name, null))) {
266
        return i;
267
      }
268
    }
269
    return -1;
270
  }
271

    
272
  /**
273
   * Return the date this file was last updated.
274
   */
275
  public Date getLastUpdateDate() {
276
    return myUpdateDate;
277
  }
278

    
279
  /**
280
   * Return the number of fields in the records.
281
   */
282
  public int getNumFields() {
283
    return myNumFields;
284
  }
285

    
286
  /**
287
   * Return the number of records in the file
288
   */
289
  public int getNumRecords() {
290
    return myNumRecords;
291
  }
292

    
293
  /**
294
   * Return the length of the records in bytes.
295
   */
296
  public int getRecordLength() {
297
    return myRecordLength;
298
  }
299

    
300
  /**
301
   * Return the length of the header
302
   */
303
  public int getHeaderLength() {
304
    return myHeaderLength;
305
  }
306

    
307
  /**
308
   * Read the header data from the DBF file.
309
   */
310
  public void read(BigByteBuffer2 in, String charsName, boolean allowDuplicatedFieldNames)
311
          throws UnsupportedVersionException, 
312
            AttributeFeatureTypeNotSuportedException {
313
    // type of file.
314
    myFileType = in.get();
315

    
316
    if (myFileType != 0x03) {
317
      throw new UnsupportedVersionException("DBF", Integer.toHexString(myFileType));
318
    }
319

    
320
    // parse the update date information.
321
    int tempUpdateYear = in.get();
322
    int tempUpdateMonth = in.get();
323
    int tempUpdateDay = in.get();
324
    tempUpdateYear = tempUpdateYear + 1900;
325

    
326
    Calendar c = Calendar.getInstance();
327
    c.set(Calendar.YEAR, tempUpdateYear);
328
    c.set(Calendar.MONTH, tempUpdateMonth - 1);
329
    c.set(Calendar.DATE, tempUpdateDay);
330
    myUpdateDate = c.getTime();
331

    
332
    // read the number of records.
333
    in.order(ByteOrder.LITTLE_ENDIAN);
334
    myNumRecords = in.getInt();
335

    
336
    // read the length of the header structure.
337
    myHeaderLength = in.getShort();
338

    
339
    // read the length of a record
340
    myRecordLength = in.getShort(); //posicon 0h
341

    
342
    in.order(ByteOrder.BIG_ENDIAN);
343

    
344
    // read the language bit (LDID) 
345
    in.position(29);
346
    origLanguageID = byteAsUnsigned(in.get());
347
    if (charsName != null) {
348
      // ignore the language bit, use the provided charset name
349
      myLanguageID = DbaseCodepage.getLdid(charsName);
350
      this.charset = charsName;
351
    } else {
352
      // use the read the language bit
353
      myLanguageID = origLanguageID;
354
      charsName = getCharsetName();
355
    }
356

    
357
    // Posicionamos para empezar a leer los campos.
358
    in.position(32);
359

    
360
    // calculate the number of Fields in the header
361
    myNumFields = (myHeaderLength - FILE_DESCRIPTOR_SIZE - 1) / FILE_DESCRIPTOR_SIZE;
362

    
363
    // read all of the header records
364
    myFieldDescriptions = new DbaseFieldDescriptor[myNumFields];
365
    int fieldOffset = 0;
366

    
367
    List fieldNames = new ArrayList<>();
368

    
369
    // FIXME: should field names be always read using ISO8859-1??
370
    for (int i = 0; i < myNumFields; i++) {
371
      myFieldDescriptions[i] = new DbaseFieldDescriptor();
372

    
373
      // read the field name
374
      byte[] buffer = new byte[11];
375
      in.get(buffer);
376
      String fieldName;
377
      fieldName = new String(buffer, HEADER_CHARSET);
378

    
379
      if (allowDuplicatedFieldNames) {
380
        fieldName = getUniqueFieldName(fieldName, fieldNames);
381
      }
382
      fieldNames.add(fieldName);
383

    
384
      myFieldDescriptions[i].setName(fieldName);
385

    
386
      myFieldDescriptions[i].setName_trim(myFieldDescriptions[i].getName().trim());
387

    
388
      // read the field type
389
      myFieldDescriptions[i].setType((char) in.get());
390

    
391
      // read the field data address, offset from the start of the record.
392
      myFieldDescriptions[i].setOffsetInRecord(in.getInt());
393

    
394
      // read the field length in bytes
395
      int tempLength = in.get();
396

    
397
      if (tempLength < 0) {
398
        tempLength = tempLength + 256;
399
      }
400

    
401
      myFieldDescriptions[i].setSize(tempLength);
402

    
403
      // read the field decimal count in bytes
404
      myFieldDescriptions[i].setScale(in.get());
405

    
406
      myFieldDescriptions[i].setOffsetInRecord(fieldOffset);
407
      
408
      fieldOffset += tempLength;
409

    
410
      // read the reserved bytes.
411
      in.position(in.position() + 14);
412

    
413
    }
414

    
415
    // Last byte is a marker for the end of the field definitions.
416
    in.get();
417
  }
418

    
419
  /**
420
   * Set the number of records in the file
421
   */
422
  public void setNumRecords(int inNumRecords) {
423
    myNumRecords = inNumRecords;
424
  }
425

    
426
  /**
427
   * Returns the value of the unsigned byte as a short Bytes are always signed
428
   * in Java, so if we are reading a C unsigned byte with value &gt; 128, it will
429
   * appear as a negative value.
430
   *
431
   * In this case, we need to get the original unsigned value and return it as
432
   * short or int, as byte will never correctly store the value in Java.
433
   */
434
  private int byteAsUnsigned(byte b) {
435
    int i;
436
    if (b < 0) {
437
      i = b & 0xFF;
438
    } else {
439
      i = b;
440
    }
441
    return i;
442
  }
443

    
444
  /**
445
   * Gets the Language driver IDs (code page) defined on the file header (or
446
   * guessed from the provided charset)
447
   *
448
   * Some examples: 01h        DOS USA        code page 437 02h        DOS Multilingual code page 850
449
   * 03h        Windows ANSI code page 1252 04h        Standard Macintosh 64h        EE MS-DOS code
450
   * page 852 65h        Nordic MS-DOS code page 865 66h        Russian MS-DOS code page 866
451
   * 67h        Icelandic MS-DOS 68h        Kamenicky (Czech) MS-DOS 69h        Mazovia (Polish)
452
   * MS-DOS 6Ah        Greek MS-DOS (437G) 6Bh        Turkish MS-DOS 96h        Russian Macintosh 97h
453
   * Eastern European Macintosh 98h        Greek Macintosh C8h        Windows EE        code page
454
   * 1250 C9h        Russian Windows CAh        Turkish Windows CBh        Greek Windows
455
   *
456
   * See the java equivalences in
457
   * {@link DbaseCodepage#dbfLdid} and {@link DbaseCodepage#ldidJava} objects.
458
   *
459
   * See some others here:
460
   * https://github.com/infused/dbf/blob/master/docs/supported_encodings.csv
461
   */
462
  public int getLanguageID() {
463

    
464
    return myLanguageID;
465
  }
466

    
467
  /**
468
   * Write the header data to the DBF file.
469
   *
470
   * @param out A channel to write to. If you have an OutputStream you can
471
   * obtain the correct channel by using
472
   * java.nio.Channels.newChannel(OutputStream out).
473
   */
474
  public void write(FileChannel out) throws IOException {
475
    // take care of the annoying case where no records have been added...
476
    if (myHeaderLength <= 0) {
477
      myHeaderLength = MINIMUM_HEADER;
478
    }
479

    
480
    ByteBuffer buffer = ByteBuffer.allocateDirect(myHeaderLength);
481
    buffer.order(ByteOrder.LITTLE_ENDIAN);
482

    
483
    // write the output file type.
484
    buffer.put(MAGIC);
485

    
486
    // write the date stuff
487
    Calendar c = Calendar.getInstance();
488
    c.setTime(new Date());
489
    buffer.put((byte) (c.get(Calendar.YEAR) % 100));
490
    buffer.put((byte) (c.get(Calendar.MONTH) + 1));
491
    buffer.put((byte) (c.get(Calendar.DAY_OF_MONTH)));
492

    
493
    // write the number of records in the datafile.
494
    buffer.putInt(myNumRecords);
495

    
496
    // write the length of the header structure.
497
    buffer.putShort((short) myHeaderLength);
498

    
499
    // write the length of a record
500
    buffer.putShort((short) myRecordLength);
501

    
502
    // write the reserved bytes in the header
503
    ((Buffer) buffer).position(((Buffer) buffer).position() + 17);
504

    
505
    // write the language id
506
    buffer.put((byte) getLanguageID());
507

    
508
    // write the reserved bytes in the header                
509
    ((Buffer) buffer).position(((Buffer) buffer).position() + 2);
510

    
511
    // write all of the header records
512
    int tempOffset = 0;
513

    
514
    if (myFieldDescriptions != null) {
515
      for (int i = 0; i < myFieldDescriptions.length; i++) {
516
        // write the field name
517
        for (int j = 0; j < DbaseFile.MAX_FIELD_NAME_LENGTH + 1; j++) {
518
          if (myFieldDescriptions[i].getName().length() > j) {
519
            buffer.put((byte) myFieldDescriptions[i].getName().charAt(j));
520
          } else {
521
            buffer.put((byte) 0);
522
          }
523
        }
524

    
525
        // write the field type
526
        buffer.put((byte) myFieldDescriptions[i].getType());
527

    
528
        // // write the field data address, offset from the start of the
529
        // record.
530
        buffer.putInt(tempOffset);
531
        tempOffset += myFieldDescriptions[i].getSize();
532

    
533
        // write the length of the field.
534
        buffer.put((byte) myFieldDescriptions[i].getSize());
535

    
536
        // write the decimal count.
537
        buffer.put((byte) myFieldDescriptions[i].getScale());
538

    
539
        // write the reserved bytes.
540
        // for (in j=0; jj<14; j++) out.writeByteLE(0);
541
        ((Buffer) buffer).position(((Buffer) buffer).position() + 14);
542
      }
543
    }
544
    // write the end of the field definitions marker
545
    buffer.put((byte) 0x0D);
546

    
547
    ((Buffer) buffer).position(0);
548

    
549
    int r = buffer.remaining();
550

    
551
    // Desde el principio
552
    out.position(0);
553

    
554
    while ((r -= out.write(buffer)) > 0) {
555
      // do nothing
556
    }
557
  }
558

    
559
  public String getCharsetName() {
560
    return getCharsetName(getLanguageID());
561
  }
562

    
563
  public String getCharsetName(int ldid) {
564
    if (ldid != 0) {
565
      charset = DbaseCodepage.getCharsetName(ldid);
566
    }
567
    if (charset != null) {
568
      // use charset otherwise
569
      return charset;
570
    }
571
    // default
572
    return "ISO-8859-1";
573
  }
574

    
575
  public String getOriginalCharset() {
576
    return getCharsetName(this.origLanguageID);
577
  }
578

    
579
  public String mappingEncoding(String dbfEnconding) {
580
    if (encodingSupportedByString.contains(dbfEnconding)) {
581
      return dbfEnconding;
582
    } else {
583
      return "UTF-8";
584
    }
585
  }
586

    
587
  private String getUniqueFieldName(String fieldName, List fieldNames) {
588

    
589
    int index = 0;
590
    String tempFieldName = fieldName;
591
    while (fieldNames.contains(tempFieldName) && index < 1000) {
592
      index++;
593
      String sufix = String.valueOf(index);
594
      tempFieldName = tempFieldName.substring(0, DbaseFile.MAX_FIELD_NAME_LENGTH - sufix.length()) + sufix;
595
    }
596
    if (index >= 1000) {
597
      throw new RuntimeException("Can't fix duplicated name for field '" + fieldName + "'.");
598
    }
599
    return tempFieldName;
600
  }
601

    
602
  public EditableFeatureType toFeatureType(EditableFeatureType featureType, boolean handleDatesAsStrings) throws DataException {
603
    featureType.setHasOID(true);
604

    
605
    for (DbaseFieldDescriptor dbfattr : this.myFieldDescriptions) {
606
      if (featureType.get(dbfattr.getName()) != null) {
607
        throw new DuplicatedFieldNameException(dbfattr.getName());
608
      }
609
      EditableFeatureAttributeDescriptor attr;
610
      switch (dbfattr.getType()) {
611
        case DBFTYPE_BOOLEAN:
612
          attr = featureType.add(dbfattr.getName(), DataTypes.BOOLEAN);
613
          attr.setSize(0);
614
          attr.setDefaultValue(null);
615
          attr.setAllowNull(true);
616
          break;
617
        case DBFTYPE_FLOAT:
618
          if (dbfattr.getScale()==-1 || dbfattr.getScale() > 0) {
619
            if (dbfattr.getSize() > DataType.DOUBLE_MAX_PRECISION+3) {
620
              attr = featureType.add(dbfattr.getName(),
621
                      DataTypes.DECIMAL);
622
              attr.setDisplaySize(dbfattr.getSize());
623
              attr.setScale(DataType.SCALE_NONE);
624
              attr.setPrecision(dbfattr.getPrecision());
625
              attr.setDefaultValue(null);
626
              attr.setAllowNull(true);
627

    
628
            } else if (dbfattr.getSize() > DataType.FLOAT_MAX_PRECISION+3) {
629
              attr = featureType.add(dbfattr.getName(),
630
                      DataTypes.DOUBLE);
631
              attr.setDisplaySize(dbfattr.getSize());
632
              attr.setScale(dbfattr.getScale());
633
              attr.setPrecision(dbfattr.getPrecision());
634
              attr.setDefaultValue(null);
635
              attr.setAllowNull(true);
636
            } else {
637
              attr = featureType.add(dbfattr.getName(),
638
                      DataTypes.FLOAT);
639
              attr.setDisplaySize(dbfattr.getSize());
640
              attr.setScale(dbfattr.getScale());
641
              attr.setPrecision(dbfattr.getPrecision());
642
              attr.setDefaultValue(null);
643
              attr.setAllowNull(true);
644
            }
645
          } else {
646
            if (dbfattr.getPrecision() > DataType.LONG_MAX_PRECISION) {
647
              attr = featureType.add(dbfattr.getName(), DataTypes.DECIMAL);
648
              attr.setDisplaySize(dbfattr.getSize());
649
              attr.setPrecision(dbfattr.getPrecision());
650
              attr.setScale(DataType.SCALE_NONE);
651
              attr.setDefaultValue(null);
652
              attr.setAllowNull(true);
653
            } else if (dbfattr.getPrecision() > DataType.INT_MAX_PRECISION) {
654
              attr = featureType.add(
655
                      dbfattr.getName(),
656
                      DataTypes.LONG
657
              );
658
              attr.setDisplaySize(dbfattr.getSize());
659
              attr.setPrecision(dbfattr.getPrecision());
660
              attr.setScale(0);
661
              attr.setDefaultValue(null);
662
              attr.setAllowNull(true);
663
            } else if (dbfattr.getPrecision() > DataType.BYTE_MAX_PRECISION) {
664
              attr = featureType.add(
665
                      dbfattr.getName(),
666
                      DataTypes.INT
667
              );
668
              attr.setDisplaySize(dbfattr.getSize());
669
              attr.setPrecision(dbfattr.getPrecision());
670
              attr.setScale(0);
671
              attr.setDefaultValue(null);
672
              attr.setAllowNull(true);
673
            } else {
674
              attr = featureType.add(
675
                      dbfattr.getName(),
676
                      DataTypes.DECIMAL
677
              );
678
              attr.setDisplaySize(dbfattr.getSize());
679
              attr.setPrecision(dbfattr.getPrecision());
680
              attr.setScale(0);
681
              attr.setDefaultValue(null);
682
              attr.setAllowNull(true);
683
            }
684
          }
685
          break;
686
        case DBFTYPE_NUMBER:
687
          if (dbfattr.getScale()==-1 || dbfattr.getScale() > 0) {
688
            attr = featureType.add(dbfattr.getName(),
689
                    DataTypes.DECIMAL);
690
            attr.setDisplaySize(dbfattr.getSize());
691
            attr.setPrecision(dbfattr.getPrecision());
692
            attr.setScale(DataType.SCALE_NONE);
693
            attr.setDefaultValue(null);
694
            attr.setAllowNull(true);
695
          } else {
696
            if (dbfattr.getPrecision() > DataType.LONG_MAX_PRECISION) {
697
              attr = featureType.add(dbfattr.getName(),
698
                      DataTypes.DECIMAL);
699
              attr.setDisplaySize(dbfattr.getSize());
700
              attr.setPrecision(dbfattr.getPrecision());
701
              attr.setScale(DataType.SCALE_NONE);
702
              attr.setDefaultValue(null);
703
              attr.setAllowNull(true);
704
            } else if (dbfattr.getPrecision() > DataType.INT_MAX_PRECISION) {
705
              attr = featureType.add(
706
                      dbfattr.getName(),
707
                      DataTypes.LONG
708
              );
709
              attr.setDisplaySize(dbfattr.getSize());
710
              attr.setPrecision(dbfattr.getPrecision());
711
              attr.setScale(0);
712
              attr.setDefaultValue(null);
713
              attr.setAllowNull(true);
714
            } else if (dbfattr.getPrecision() > DataType.BYTE_MAX_PRECISION) {
715
              attr = featureType.add(
716
                      dbfattr.getName(),
717
                      DataTypes.INT
718
              );
719
              attr.setDisplaySize(dbfattr.getSize());
720
              attr.setPrecision(dbfattr.getPrecision());
721
              attr.setScale(0);
722
              attr.setDefaultValue(null);
723
              attr.setAllowNull(true);
724
            } else {
725
              attr = featureType.add(
726
                      dbfattr.getName(),
727
                      DataTypes.DECIMAL
728
              );
729
              attr.setDisplaySize(dbfattr.getSize());
730
              attr.setPrecision(dbfattr.getPrecision());
731
              attr.setScale(0);
732
              attr.setDefaultValue(null);
733
              attr.setAllowNull(true);
734
            }
735
          }
736
          break;
737
        case DBFTYPE_STRING:
738
          attr = featureType.add(dbfattr.getName(),
739
                  DataTypes.STRING, dbfattr.getSize());
740
          attr.setDefaultValue(null);
741
          attr.setAllowNull(true);
742
          if (dbfattr.getSize() == TIMESTAMP_SIZE) {
743
            try {
744
              DataType dataType = ToolsLocator.getDataTypesManager().get(DataTypes.STRING).clone();
745
              dataType.addCoercion(new CoerceDateToDbfTimestampAsString());
746
              attr.setDataType(dataType);
747
            } catch (Exception ex) {
748
              // Do nothing
749
            }
750
          } else if (dbfattr.getSize() == TIME_SIZE) {
751
            try {
752
              DataType dataType = ToolsLocator.getDataTypesManager().get(DataTypes.STRING).clone();
753
              dataType.addCoercion(new CoerceDateToDbfTimeAsString());
754
              attr.setDataType(dataType);
755
            } catch (Exception ex) {
756
              // Do nothing
757
            }
758
          }
759
          break;
760
        case DBFTYPE_DATE:
761
          if (handleDatesAsStrings) {
762
            attr = featureType.add(dbfattr.getName(),
763
                    DataTypes.STRING, dbfattr.getSize());
764
            attr.setDefaultValue(null);
765
            attr.setAllowNull(true);
766
          } else {
767
            attr = featureType.add(dbfattr.getName(),
768
                    DataTypes.DATE
769
            );
770
            attr.setDefaultValue(null);
771
            attr.setAllowNull(true);
772
          }
773
          break;
774
        default:
775
          throw new UnknownDataTypeException(
776
                  dbfattr.getName(), String.valueOf(dbfattr.getType()),
777
                  DBFStoreProvider.NAME
778
          );
779
      }
780
      attr.setDisplaySize(dbfattr.getSize());
781
    }
782
    return featureType;
783
  }
784

    
785
  public static DbaseFileHeader fromFeatureType(FeatureType featureType)
786
          throws DataException {
787
    return fromFeatureType(featureType, null);
788
  }
789

    
790
  public static DbaseFileHeader fromFeatureType(FeatureType featureType, String charsetName)
791
          throws DataException {
792
    DbaseFileHeader header = new DbaseFileHeader();
793
    Iterator iterator = featureType.iterator();
794
    header.myLanguageID = DbaseCodepage.getLdid(charsetName);
795
    header.charset = charsetName;
796
    while (iterator.hasNext()) {
797
      FeatureAttributeDescriptor descriptor = (FeatureAttributeDescriptor) iterator.next();
798

    
799
      if (descriptor.isComputed()) {
800
        continue;
801
      }
802
      int type = descriptor.getType();
803
      String colName = descriptor.getName();
804
      if (colName.length() > DbaseFile.MAX_FIELD_NAME_LENGTH) {      
805
        throw new FieldNameTooLongException("DBF file", colName);        
806
      }
807

    
808
      int size = descriptor.getSize();
809
      int scale = descriptor.getScale();
810
      int precision = descriptor.getPrecision();
811
      int displaySize = descriptor.getDisplaySize();
812
      switch (type) {
813
        case DataTypes.DECIMAL:
814
          if( precision < 0) {
815
            if( displaySize < 1){
816
                displaySize = DbaseFieldDescriptor.MAX_NUMBER_SIZE;
817
            }
818
          } else {
819
            displaySize = Math.max(displaySize, precision + 3);
820
          }
821
          header.addColumn(colName, DBFTYPE_NUMBER, displaySize, precision, scale);
822
          break;
823
        case DataTypes.DOUBLE:
824
          if( displaySize < 1 ) {
825
            displaySize = DataType.DOUBLE_MAX_PRECISION+3;
826
          }
827
          if( displaySize>DbaseFieldDescriptor.MAX_NUMBER_SIZE ) {
828
            displaySize = DbaseFieldDescriptor.MAX_NUMBER_SIZE;
829
          }
830
          header.addColumn(colName, DBFTYPE_FLOAT, displaySize, precision, scale);
831
          break;
832
        case DataTypes.FLOAT:
833
          if( displaySize < 1 ) {
834
            displaySize = DataType.FLOAT_MAX_PRECISION+3;
835
          }
836
          if( displaySize>DbaseFieldDescriptor.MAX_NUMBER_SIZE ) {
837
            displaySize = DbaseFieldDescriptor.MAX_NUMBER_SIZE;
838
          }
839
          header.addColumn(colName, DBFTYPE_FLOAT, displaySize, precision, scale);
840
          break;
841
        case DataTypes.INT:
842
          displaySize = Math.max(displaySize, precision + 1);
843
          if( displaySize >= DataType.INT_MAX_PRECISION ) {
844
            displaySize = DataType.INT_MAX_PRECISION;
845
          }
846
          if( displaySize>DbaseFieldDescriptor.MAX_NUMBER_SIZE ) {
847
            displaySize = DbaseFieldDescriptor.MAX_NUMBER_SIZE;
848
          }
849
          header.addColumn(colName, DBFTYPE_NUMBER, displaySize, precision, scale);
850
          break;
851
        case DataTypes.LONG:
852
          displaySize = Math.max(displaySize, precision + 1);
853
          if( displaySize >= DataType.LONG_MAX_PRECISION ) {
854
            displaySize = DataType.LONG_MAX_PRECISION;
855
          }
856
          if( displaySize>DbaseFieldDescriptor.MAX_NUMBER_SIZE ) {
857
            displaySize = DbaseFieldDescriptor.MAX_NUMBER_SIZE;
858
          }
859
          header.addColumn(colName, DBFTYPE_NUMBER, displaySize, precision, scale);
860
          break;
861
        case DataTypes.DATE:
862
          header.addColumn(colName, DBFTYPE_DATE, FieldFormatter.DATE_SIZE, 0, 0);
863
          break;
864
        case DataTypes.TIME:
865
          header.addColumn(colName, DBFTYPE_STRING, FieldFormatter.TIME_SIZE, 0, 0);
866
          break;
867
        case DataTypes.TIMESTAMP:
868
          header.addColumn(colName, DBFTYPE_STRING, FieldFormatter.TIMESTAMP_SIZE, 0, 0);
869
          break;
870
        case DataTypes.BOOLEAN:
871
          header.addColumn(colName, DBFTYPE_BOOLEAN, 1, 0, 0);
872
          break;
873
        case DataTypes.STRING:
874
          displaySize = Math.max(displaySize, size);
875
          if( displaySize>DbaseFieldDescriptor.MAX_SIZE ) {
876
            displaySize = DbaseFieldDescriptor.MAX_SIZE;
877
          }
878
          header.addColumn(colName, DBFTYPE_STRING, displaySize, 0, 0);
879
          break;
880
        case DataTypes.BYTE:
881
          displaySize = Math.max(displaySize, precision + 1);
882
          if( displaySize >= DataType.BYTE_MAX_PRECISION ) {
883
            displaySize = DataType.BYTE_MAX_PRECISION;
884
          }
885
          if( displaySize>DbaseFieldDescriptor.MAX_NUMBER_SIZE ) {
886
            displaySize = DbaseFieldDescriptor.MAX_NUMBER_SIZE;
887
          }
888
          header.addColumn(colName, DBFTYPE_NUMBER,displaySize, precision, scale);
889
          break;
890
        default:
891
          // Si no sabemos lo que es intentaremos guardarlo como un string
892
          displaySize = Math.max(displaySize, Math.max(size, 20));
893
          if( displaySize>DbaseFieldDescriptor.MAX_SIZE ) {
894
            displaySize = DbaseFieldDescriptor.MAX_SIZE;
895
          }
896
          header.addColumn(colName, DBFTYPE_STRING, displaySize, 0, 0);
897
          break;
898
      }
899
    }
900
    return header;
901
  }
902
}