Gff.java
// <editor-fold defaultstate="collapsed" desc="license">
/*
* Copyright (c) 2014, Karl H. Beckers
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* * Neither the name of the <ORGANIZATION> nor the names of its contributors
* may be used to endorse or promote products derived from this software
* without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
**/
// </editor-fold>
package net.jarre_de_the.griffin.file;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.IOException;
import java.io.RandomAccessFile;
import java.util.ArrayList;
import java.util.List;
import java.util.Objects;
import net.jarre_de_the.griffin.Util;
import net.jarre_de_the.griffin.types.data.DWordData;
import net.jarre_de_the.griffin.types.data.StructData;
import net.jarre_de_the.griffin.types.field.AbstractField;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* GFF File
* <p>
* @author charly4711
*/
public class Gff extends PersistableNwnFile implements EmbeddableNwnFile {
public static final String DEFAULT_FILE_TYPE = ContentType.GFF.name().toUpperCase() + " ";
public static final String DEFAULT_FILE_VERSION = "V3.2";
private static final Logger LOGGER = LoggerFactory.getLogger(Gff.class);
private StructData rootStruct;
private long embeddedOffset = 0;
private final long embeddedLength = 0L;
public Gff() {
setFileType(DEFAULT_FILE_TYPE);
setFileVersion(DEFAULT_FILE_VERSION);
}
/**
* Contructor to read a GFF file from disk.
* <p>
* @param in File on disk.
* @throws IOException
*/
public Gff(File in)
throws IOException {
parseFile(in);
}
private void parseFile(File inFile)
throws IOException {
int rc = 0;
try (RandomAccessFile in = new RandomAccessFile(inFile, "r")) {
parse(in);
}
}
@Override
public void parse(RandomAccessFile in) throws IOException {
// set the header fields
for (GffHeaderField f : GffHeaderField.values()) {
DWordData hf = DWordData.read(in);
getHeaderFields().put(f.field(), hf);
}
// remember that all structs beside the root struct are referenced
// FROM the root struct
this.rootStruct = StructData.read(in, this);
}
/**
* All fields contained in a GFF file are wrapped in a single struct. Struct
* objects implement Container and (in contrast to other objects in the data
* packate) are not immutable.
* <p>
* @return The root struct (or null if there is none.)
*/
public StructData getRootStruct() {
return rootStruct;
}
public void setRootStruct(StructData rootStruct) {
this.rootStruct = rootStruct;
}
public long getFieldCount() {
return getHeaderNumber(GffHeaderField.FIELD_COUNT.field(), null);
}
private void setFieldCount(long fieldCount) {
getHeaderFields().put(GffHeaderField.FIELD_COUNT.field(),
new DWordData(fieldCount));
}
public long getFieldDataCount() {
return getHeaderNumber(GffHeaderField.FIELD_DATA_COUNT.field(), null);
}
private void setFieldDataCount(long fieldDataCount) {
getHeaderFields().put(GffHeaderField.FIELD_DATA_COUNT.field(),
new DWordData(fieldDataCount));
}
public long getFieldDataOff() {
return getHeaderNumber(GffHeaderField.FIELD_DATA_OFFSET.field(), null);
}
private void setFieldDataOff(long fieldDataOff) {
getHeaderFields().put(GffHeaderField.FIELD_DATA_OFFSET.field(),
new DWordData(fieldDataOff));
}
public long getFieldIdxCount() {
return getHeaderNumber(GffHeaderField.FIELD_INDEX_COUNT.field(), null);
}
private void setFieldIdxCount(long fieldIdxCount) {
getHeaderFields().put(GffHeaderField.FIELD_INDEX_COUNT.field(),
new DWordData(fieldIdxCount));
}
public long getFieldIdxOff() {
return getHeaderNumber(GffHeaderField.FIELD_INDEX_OFFSET.field(), null);
}
private void setFieldIdxOff(long fieldIdxOff) {
getHeaderFields().put(GffHeaderField.FIELD_INDEX_OFFSET.field(),
new DWordData(fieldIdxOff));
}
public long getFieldOff() {
return getHeaderNumber(GffHeaderField.FIELD_OFFSET.field(), null);
}
private void setFieldOff(long fieldOff) {
getHeaderFields().put(GffHeaderField.FIELD_OFFSET.field(),
new DWordData(fieldOff));
}
public long getLabelCount() {
return getHeaderNumber(GffHeaderField.LABEL_COUNT.field(), null);
}
private void setLabelCount(long labelCount) {
getHeaderFields().put(GffHeaderField.LABEL_COUNT.field(),
new DWordData(labelCount));
}
public long getLabelOff() {
return getHeaderNumber(GffHeaderField.LABEL_OFFSET.field(), null);
}
private void setLabelOff(long labelOff) {
getHeaderFields().put(GffHeaderField.LABEL_OFFSET.field(),
new DWordData(labelOff));
}
public long getListIdxCount() {
return getHeaderNumber(GffHeaderField.LIST_INDEX_COUNT.field(), null);
}
private void setListIdxCount(long listIdxCount) {
getHeaderFields().put(GffHeaderField.LIST_INDEX_COUNT.field(),
new DWordData(listIdxCount));
}
public long getListIdxOff() {
return getHeaderNumber(GffHeaderField.LIST_INDEX_OFFSET.field(), null);
}
private void setListIdxOff(long listIdxOff) {
getHeaderFields().put(GffHeaderField.LIST_INDEX_OFFSET.field(),
new DWordData(listIdxOff));
}
public long getStructCount() {
return getHeaderNumber(GffHeaderField.STRUCT_COUNT.field(), null);
}
private void setStructCount(long structCount) {
getHeaderFields().put(GffHeaderField.STRUCT_COUNT.field(),
new DWordData(structCount));
}
public long getStructOff() {
return getHeaderNumber(GffHeaderField.STRUCT_OFFSET.field(), null);
}
private void setStructOff(long structOff) {
getHeaderFields().put(GffHeaderField.STRUCT_OFFSET.field(),
new DWordData(structOff));
}
@Override
public long getEmbeddedOffset() {
return embeddedOffset;
}
@Override
public void setEmbeddedOffset(long offsetFromStartOfFile) {
this.embeddedOffset = offsetFromStartOfFile;
}
@Override
public long getEmbeddedLength() {
throw new UnsupportedOperationException("GFF files should not rely on the embedded length");
}
@Override
public void setEmbeddedLength(long offsetFromStartOfFile) {
throw new UnsupportedOperationException("GFF files should not rely on the embedded length");
}
@Override
public Gff clone() throws CloneNotSupportedException {
Gff clone = (Gff) super.clone();
// header fields not in PersistableNwnFile
clone.setStructOff(getStructOff());
clone.setStructCount(getStructCount());
clone.setFieldOff(getFieldOff());
clone.setFieldCount(getFieldCount());
clone.setLabelOff(getLabelOff());
clone.setLabelCount(getLabelCount());
clone.setFieldDataOff(getFieldDataOff());
clone.setFieldDataCount(getFieldDataCount());
clone.setFieldIdxOff(getFieldIdxOff());
clone.setFieldIdxCount(getFieldIdxCount());
clone.setListIdxOff(getListIdxOff());
clone.setListIdxCount(getListIdxCount());
clone.setRootStruct(getRootStruct().clone());
clone.setEmbeddedOffset(getEmbeddedOffset());
// clone.setEmbeddedLength(embeddedLength);
return clone;
}
@Override
public boolean equals(Object compare) {
if (compare == this) {
return true;
}
if (!super.equals(compare)) {
return false;
}
if (!(compare instanceof Gff)) {
return false;
}
Gff file = (Gff) compare;
// return this.getEmbeddedLength() == file.getEmbeddedLength() &&
// this.getEmbeddedOffset() == file.getEmbeddedOffset() &&
return areNullablePropertiesEqual(this.getRootStruct(), file.getRootStruct())
&& this.getFieldCount() == file.getFieldCount()
// && this.getFieldDataCount() == file.getFieldDataCount()
// && this.getFieldDataOff() == file.getFieldDataOff()
// && this.getFieldIdxCount() == file.getFieldIdxCount()
// && this.getFieldIdxOff() == file.getFieldIdxOff()
// && this.getFieldOff() == file.getFieldOff()
&& this.getLabelCount() == file.getLabelCount()
// && this.getLabelOff() == file.getLabelOff()
// && this.getListIdxCount() == file.getListIdxCount()
// && this.getListIdxOff() == file.getListIdxOff()
&& this.getStructCount() == file.getStructCount();
// && this.getStructOff() == file.getStructOff();
}
@Override
public int hashCode() {
int hash = super.hashCode();
hash = 89 * hash + Objects.hashCode(this.rootStruct);
hash = 89 * hash + (int) (this.embeddedOffset ^ (this.embeddedOffset >>> 32));
hash = 89 * hash + (int) (this.embeddedLength ^ (this.embeddedLength >>> 32));
return hash;
}
protected enum GffHeaderField {
FILE_TYPE(CommonHeaderField.FILE_TYPE.field()),
FILE_VERSION(CommonHeaderField.FILE_VERSION.field()),
STRUCT_OFFSET("structOff"),
STRUCT_COUNT("structCount"),
FIELD_OFFSET("fieldOff"),
FIELD_COUNT("fieldCount"),
LABEL_OFFSET("labelOff"),
LABEL_COUNT("labelCount"),
FIELD_DATA_OFFSET("fieldDataOff"),
FIELD_DATA_COUNT("fieldDataCount"),
FIELD_INDEX_OFFSET("fieldIdxOff"),
FIELD_INDEX_COUNT("fieldIdxCount"),
LIST_INDEX_OFFSET("listIdxOff"),
LIST_INDEX_COUNT("listIdxCount");
private final String field;
private GffHeaderField(String field) {
this.field = field;
}
public String field() {
return field;
}
}
@Override
public byte[] persist()
throws IOException {
ByteArrayOutputStream structArray = new ByteArrayOutputStream();
ByteArrayOutputStream fieldArray = new ByteArrayOutputStream();
List<AbstractField.LabelWrapper> labelArrayList
= new ArrayList<AbstractField.LabelWrapper>();
ByteArrayOutputStream labelArray = new ByteArrayOutputStream();
ByteArrayOutputStream fieldIndicesArray = new ByteArrayOutputStream();
ByteArrayOutputStream listIndicesArray = new ByteArrayOutputStream();
ByteArrayOutputStream fieldData = new ByteArrayOutputStream();
DWordData[] header = new DWordData[GffHeaderField.values().length];
// recursively persist
// handle the root struct data explicitly to make it come first
// in the structArray, since that is the hard entry reference
if (null != rootStruct) {
if (!rootStruct.getId().getValue().equals(-1)) {
LOGGER.warn("root struct should have id -1 but has: "
+ rootStruct.getId().getValue());
}
structArray.write(new DWordData((int) -1).getValueAsByteArray());
List<AbstractField> fields = rootStruct.getValueAsList();
if (fields == null || fields.isEmpty()) {
structArray.write(new DWordData(0).getValueAsByteArray());
structArray.write(new DWordData(0).getValueAsByteArray());
} else if (fields.size() == 1) {
// here we have an index
structArray.write(new DWordData(fieldArray.size()
/ AbstractField.FIELD_LENGTH).getValueAsByteArray());
structArray.write(new DWordData(1).getValueAsByteArray());
fields.get(0).persist(structArray, fieldArray, labelArrayList,
fieldIndicesArray, listIndicesArray, fieldData);
} else {
// here we have a byte offset
structArray.write(new DWordData(fieldIndicesArray.size()).
getValueAsByteArray());
structArray.write(new DWordData(fields.size()).getValueAsByteArray());
// save the current state
ByteArrayOutputStream pre = new ByteArrayOutputStream();
pre.write(fieldIndicesArray.toByteArray());
for (AbstractField f : fields) {
// because I have put the pointer into the field index
// array into the struct above, I now need to make sure
// nobody else writes to the field index array before
// we complete.
// (If I hadn't done that above, I would have the same
// problem with the struct array)
// Now, we cannot prevent that, so we need to correct the
// field index array later on ... first we write some
// dummy entries
fieldIndicesArray.write(new DWordData(0).getValueAsByteArray());
}
for (AbstractField f : fields) {
DWordData fieldIdx = new DWordData(fieldArray.size()
/ AbstractField.FIELD_LENGTH);
f.persist(structArray, fieldArray, labelArrayList,
fieldIndicesArray, listIndicesArray, fieldData);
// add to the pre state
pre.write(fieldIdx.getValueAsByteArray());
}
// fix the field index array
byte[] post = fieldIndicesArray.toByteArray();
fieldIndicesArray.reset();
fieldIndicesArray.write(pre.toByteArray());
int preLength = pre.toByteArray().length;
fieldIndicesArray.write(post, preLength, post.length - preLength);
}
} else {
structArray.write(new DWordData((int) -1).getValueAsByteArray());
structArray.write(new DWordData(0).getValueAsByteArray());
structArray.write(new DWordData(0).getValueAsByteArray());
}
// write the label array
for (AbstractField.LabelWrapper elem : labelArrayList) {
elem.persist(labelArray);
}
// file type
DWordData hf = getHeaderFields().get(GffHeaderField.FILE_TYPE.field());
if (hf == null) {
hf = new DWordData(Util.stringToBitField(DEFAULT_FILE_TYPE));
}
header[GffHeaderField.FILE_TYPE.ordinal()] = hf;
LOGGER.debug("ft: ");
for (byte b : hf.getValueAsByteArray()) {
char c = (char) b;
LOGGER.debug("" + c);
}
LOGGER.debug("");
// file version
hf = getHeaderFields().get(GffHeaderField.FILE_VERSION.field());
if (hf == null) {
hf = new DWordData(Util.stringToBitField(DEFAULT_FILE_VERSION));
}
header[GffHeaderField.FILE_VERSION.ordinal()] = hf;
LOGGER.debug("fv: ");
for (byte b : hf.getValueAsByteArray()) {
char c = (char) b;
LOGGER.debug("" + c);
}
LOGGER.debug("");
// struct array
int structOffset = GffHeaderField.values().length * DWordData.LENGTH;
header[GffHeaderField.STRUCT_OFFSET.ordinal()] = new DWordData(
structOffset);
header[GffHeaderField.STRUCT_COUNT.ordinal()] = new DWordData(
structArray.size() / StructData.STRUCT_LENGTH);
LOGGER.debug("so: " + structOffset);
LOGGER.debug("sc: " + (structArray.size()
/ StructData.STRUCT_LENGTH));
// field array
// add the rootStruct which is not in the structArray
int fieldOffset = structOffset + structArray.size();
header[GffHeaderField.FIELD_OFFSET.ordinal()] = new DWordData(fieldOffset);
header[GffHeaderField.FIELD_COUNT.ordinal()] = new DWordData(fieldArray.
size() / AbstractField.FIELD_LENGTH);
LOGGER.debug("fo: " + fieldOffset);
LOGGER.debug("fc: " + (fieldArray.size()
/ AbstractField.FIELD_LENGTH));
// label array
int labelOffset = fieldOffset + fieldArray.size();
header[GffHeaderField.LABEL_OFFSET.ordinal()] = new DWordData(labelOffset);
header[GffHeaderField.LABEL_COUNT.ordinal()] = new DWordData(labelArray.
size() / AbstractField.LABEL_LENGTH);
LOGGER.debug("lo: " + labelOffset);
LOGGER.debug("lc: " + (labelArray.size()
/ AbstractField.LABEL_LENGTH));
// field data block
int fieldDataOffset = labelOffset + labelArray.size();
header[GffHeaderField.FIELD_DATA_OFFSET.ordinal()] = new DWordData(
fieldDataOffset);
header[GffHeaderField.FIELD_DATA_COUNT.ordinal()] = new DWordData(
fieldData.size());
LOGGER.debug("fdo: " + fieldDataOffset);
LOGGER.debug("fdc: " + fieldData.size());
// field indices array
int fieldIndicesOffset = fieldDataOffset + fieldData.size();
header[GffHeaderField.FIELD_INDEX_OFFSET.ordinal()] = new DWordData(
fieldIndicesOffset);
header[GffHeaderField.FIELD_INDEX_COUNT.ordinal()] = new DWordData(
fieldIndicesArray.size());
LOGGER.debug("fio: " + fieldIndicesOffset);
LOGGER.debug("fic: " + fieldIndicesArray.size());
// list indices array
int listIndicesOffset = fieldIndicesOffset + fieldIndicesArray.size();
header[GffHeaderField.LIST_INDEX_OFFSET.ordinal()] = new DWordData(
listIndicesOffset);
header[GffHeaderField.LIST_INDEX_COUNT.ordinal()] = new DWordData(
listIndicesArray.size());
LOGGER.debug("lio: " + listIndicesOffset);
LOGGER.debug("lic: " + listIndicesArray.size());
ByteArrayOutputStream out = new ByteArrayOutputStream();
for (DWordData d : header) {
out.write(d.getValueAsByteArray());
}
out.write(structArray.toByteArray());
out.write(fieldArray.toByteArray());
out.write(labelArray.toByteArray());
out.write(fieldData.toByteArray());
out.write(fieldIndicesArray.toByteArray());
out.write(listIndicesArray.toByteArray());
return out.toByteArray();
}
}