Erf.java
// <editor-fold defaultstate="collapsed" desc="license">
/*
* Copyright (c) 2009, Karl H. Beckers
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* * Neither the name of the <ORGANIZATION> nor the names of its contributors
* may be used to endorse or promote products derived from this software
* without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
**/
// </editor-fold>
package net.jarre_de_the.griffin.file;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.IOException;
import java.io.RandomAccessFile;
import java.util.ArrayList;
import java.util.List;
import java.util.Objects;
import net.jarre_de_the.griffin.Util;
import static net.jarre_de_the.griffin.file.Erf.ErfResource.RES_REF_LENGTH;
import net.jarre_de_the.griffin.types.data.CExoLocSubStringData;
import net.jarre_de_the.griffin.types.data.DWordData;
import net.jarre_de_the.griffin.types.data.WordData;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
*
* @author charly4711
*/
public class Erf extends PersistableNwnFile {
public static enum ErfType {
ERF, MOD, SAV, HAK;
}
public static final String DEFAULT_FILE_TYPE = ErfType.ERF.name().toUpperCase() + " ";
public static final String DEFAULT_FILE_VERSION = "V1.0";
private static final Logger LOGGER = LoggerFactory.getLogger(Erf.class);
private List<CExoLocSubStringData> strings = new ArrayList<CExoLocSubStringData>();
private List<ErfResource> resources = new ArrayList<ErfResource>();
public Erf() {
setFileType(DEFAULT_FILE_TYPE);
setFileVersion(DEFAULT_FILE_VERSION);
}
public Erf(File in) throws IOException {
parseFile(in);
}
private void parseFile(File inFile)
throws IOException {
try (RandomAccessFile in = new RandomAccessFile(inFile, "r")) {
// set the header fields
for (ErfHeaderField f : ErfHeaderField.values()) {
DWordData hf = DWordData.read(in);
getHeaderFields().put(f.field(), hf);
}
// go to the localized string table
in.seek(getOffsetStrings());
for (int i = 0; i < getStringCount(); i++) {
strings.add(CExoLocSubStringData.read(in));
}
// resource
in.seek(getOffsetKeys());
for (int i = 0; i < getEntryCount(); i++) {
ErfResource erf = new ErfResource();
erf.read(in, getOffsetResources());
resources.add(erf);
}
}
}
@Override
public byte[] persist() throws IOException {
ByteArrayOutputStream localizedStringList = new ByteArrayOutputStream();
ByteArrayOutputStream keyList = new ByteArrayOutputStream();
ByteArrayOutputStream resourceList = new ByteArrayOutputStream();
ByteArrayOutputStream resourceData = new ByteArrayOutputStream();
DWordData[] header = new DWordData[ErfHeaderField.values().length];
// file type
DWordData hf = getHeaderFields().get(ErfHeaderField.FILE_TYPE.field());
if (hf == null) {
hf = new DWordData(Util.stringToBitField(DEFAULT_FILE_TYPE));
}
header[ErfHeaderField.FILE_TYPE.ordinal()] = hf;
LOGGER.debug("ft: ");
for (byte b : hf.getValueAsByteArray()) {
char c = (char) b;
LOGGER.debug("" + c);
}
LOGGER.debug("");
// file version
hf = getHeaderFields().get(ErfHeaderField.FILE_VERSION.field());
if (hf == null) {
hf = new DWordData(Util.stringToBitField(DEFAULT_FILE_VERSION));
}
header[ErfHeaderField.FILE_VERSION.ordinal()] = hf;
LOGGER.debug("fv: ");
for (byte b : hf.getValueAsByteArray()) {
char c = (char) b;
LOGGER.debug("" + c);
}
LOGGER.debug("");
// now for the actual data
// strings
for (CExoLocSubStringData s : getStrings()) {
localizedStringList.write(new DWordData(s.getId()).getValueAsByteArray());
byte[] buf = s.getValueAsByteArray();
localizedStringList.write(new DWordData(buf.length).getValueAsByteArray());
localizedStringList.write(buf);
}
// keys
int keyId = 0;
for (ErfResource resource : getResources()) {
String rref = resource.getResRef();
if (rref.length() > RES_REF_LENGTH) {
rref = rref.substring(0, RES_REF_LENGTH);
}
byte[] buf = new byte[RES_REF_LENGTH];
System.arraycopy(rref.getBytes(Util.CHARSET_US_ASCII), 0,
buf, 0, Math.min(rref.length(), buf.length));
keyList.write(buf);
keyList.write(new DWordData(keyId).getValueAsByteArray());
keyList.write(new WordData((short) resource.getResourceType().id()).getValueAsByteArray());
keyList.write(resource.getUnused().getValueAsByteArray());
keyId++;
}
header[ErfHeaderField.LANGUAGE_COUNT.ordinal()]
= new DWordData(getStrings().size());
LOGGER.debug("string counts: " + getStrings().size() + " / "
+ getHeaderNumber(ErfHeaderField.LANGUAGE_COUNT.field(), null));
header[ErfHeaderField.LOCALIZED_STRING_SIZE.ordinal()]
= new DWordData(localizedStringList.size());
LOGGER.debug("string size : " + localizedStringList.size() + " / "
+ getHeaderNumber(ErfHeaderField.LOCALIZED_STRING_SIZE.field(), null));
header[ErfHeaderField.ENTRY_COUNT.ordinal()]
= new DWordData(getResources().size());
LOGGER.debug("entry count : " + getResources().size() + " / "
+ getHeaderNumber(ErfHeaderField.ENTRY_COUNT.field(), null));
// offsets calculated after filling byte arrays
header[ErfHeaderField.BUILD_YEAR.ordinal()]
= this.getHeaderFields().get(ErfHeaderField.BUILD_YEAR.field()); // no way to change it, for now
header[ErfHeaderField.BUILD_DAY.ordinal()]
= this.getHeaderFields().get(ErfHeaderField.BUILD_DAY.field()); // no way to change it, for now
header[ErfHeaderField.DESCRIPTION_STR_REF.ordinal()]
= getDescription();
for (int i = 1; i <= 29; i++) {
header[ErfHeaderField.valueOf("RESERVED" + i).ordinal()]
= getHeaderFields().get(ErfHeaderField.valueOf("RESERVED" + i).field());
}
long headerLength = ErfHeaderField.values().length * DWordData.LENGTH;
header[ErfHeaderField.OFFSET_TO_LOCALIZED_STRING.ordinal()]
= new DWordData(headerLength);
LOGGER.debug("off strings : " + headerLength + " / " + getOffsetStrings());
header[ErfHeaderField.OFFSET_TO_KEY_LIST.ordinal()]
= new DWordData(headerLength + localizedStringList.size());
LOGGER.debug("off keys : " + (headerLength
+ localizedStringList.size()) + " / " + getOffsetKeys());
header[ErfHeaderField.OFFSET_TO_RESOURCE_LIST.ordinal()]
= new DWordData(headerLength + localizedStringList.size()
+ keyList.size());
LOGGER.debug("off res list : " + (headerLength + localizedStringList.size()
+ keyList.size()) + " / " + getOffsetResources());
long resourceOffset = headerLength + localizedStringList.size()
+ keyList.size() + (getEntryCount() * 2 * DWordData.LENGTH);
// for resources and resource data, we need to know the offsets
// from the other lists, first
for (ErfResource resource : getResources()) {
resourceList.write(new DWordData(resourceOffset
+ resourceData.size()).getValueAsByteArray());
resourceList.write(new DWordData(
resource.getResourceLength()).getValueAsByteArray());
Object data = resource.getData();
byte[] resourceBytes = null;
Class clazz = resource.getResourceType().contentType().typeClass();
if (null != clazz) {
if (data instanceof PersistableNwnFile
&& (Gff.class.isAssignableFrom(clazz) || TwoDa.class.isAssignableFrom(clazz))) {
PersistableNwnFile file = (PersistableNwnFile) data;
resourceBytes = file.persist();
} else if (String.class.isAssignableFrom(clazz)) {
resourceBytes = ((String) data).getBytes(Util.CHARSET_US_ASCII);
} else {
resourceBytes = (byte[]) data;
}
} else {
// treat as binary
resourceBytes = (byte[]) data;
}
if (null != resourceBytes) {
resourceData.write(resourceBytes);
}
}
// put it all together
ByteArrayOutputStream out = new ByteArrayOutputStream();
for (DWordData d : header) {
out.write(d.getValueAsByteArray());
}
out.write(localizedStringList.toByteArray());
out.write(keyList.toByteArray());
out.write(resourceList.toByteArray());
out.write(resourceData.toByteArray());
return out.toByteArray();
}
protected enum ErfHeaderField {
FILE_TYPE(CommonHeaderField.FILE_TYPE.field()),
FILE_VERSION(CommonHeaderField.FILE_VERSION.field()),
LANGUAGE_COUNT("languageCount"),
LOCALIZED_STRING_SIZE("localizedStringSize"),
ENTRY_COUNT("entryCount"),
OFFSET_TO_LOCALIZED_STRING("offsetToLocalizedString"),
OFFSET_TO_KEY_LIST("offsetToKeyList"),
OFFSET_TO_RESOURCE_LIST("offsetToResourceList"),
BUILD_YEAR("buildYear"),
BUILD_DAY("buildDay"),
DESCRIPTION_STR_REF("descriptionStrRef"),
RESERVED1("RESERVED1"), // lazy way of reserving 116 bytes
RESERVED2("RESERVED2"),
RESERVED3("RESERVED3"),
RESERVED4("RESERVED4"),
RESERVED5("RESERVED5"),
RESERVED6("RESERVED6"),
RESERVED7("RESERVED7"),
RESERVED8("RESERVED8"),
RESERVED9("RESERVED9"),
RESERVED10("RESERVED10"),
RESERVED11("RESERVED11"),
RESERVED12("RESERVED12"),
RESERVED13("RESERVED13"),
RESERVED14("RESERVED14"),
RESERVED15("RESERVED15"),
RESERVED16("RESERVED16"),
RESERVED17("RESERVED17"),
RESERVED18("RESERVED18"),
RESERVED19("RESERVED19"),
RESERVED20("RESERVED20"),
RESERVED21("RESERVED21"),
RESERVED22("RESERVED22"),
RESERVED23("RESERVED23"),
RESERVED24("RESERVED24"),
RESERVED25("RESERVED25"),
RESERVED26("RESERVED26"),
RESERVED27("RESERVED27"),
RESERVED28("RESERVED28"),
RESERVED29("RESERVED29");
private final String field;
private ErfHeaderField(String field) {
this.field = field;
}
public String field() {
return field;
}
}
public List<CExoLocSubStringData> getStrings() {
return strings;
}
public void setStrings(List<CExoLocSubStringData> strings) {
this.strings = strings;
}
public List<ErfResource> getResources() {
return resources;
}
public void setResources(List<ErfResource> resources) {
this.resources = resources;
}
public DWordData getDescription() {
return getHeaderFields().get(ErfHeaderField.DESCRIPTION_STR_REF.field());
}
public void setDescription(DWordData strref) {
getHeaderFields().put(ErfHeaderField.DESCRIPTION_STR_REF.field(), strref);
}
// These are private, becasue we don't want them to be used like that
// note there are no setters, either
private long getStringCount() {
return getHeaderNumber(ErfHeaderField.LANGUAGE_COUNT.field(), getStrings());
}
private void setStringCount(long count) {
getHeaderFields().put(ErfHeaderField.LANGUAGE_COUNT.field(),
new DWordData(count));
}
private long getEntryCount() {
return getHeaderNumber(ErfHeaderField.ENTRY_COUNT.field(), getResources());
}
private void setEntryCount(long count) {
getHeaderFields().put(ErfHeaderField.ENTRY_COUNT.field(),
new DWordData(count));
}
private long getOffsetStrings() {
return getHeaderNumber(ErfHeaderField.OFFSET_TO_LOCALIZED_STRING.field(), null);
}
private void setOffsetStrings(long offset) {
getHeaderFields().put(ErfHeaderField.OFFSET_TO_LOCALIZED_STRING.field(),
new DWordData(offset));
}
private long getOffsetKeys() {
return getHeaderNumber(ErfHeaderField.OFFSET_TO_KEY_LIST.field(), null);
}
private void setOffsetKeys(long offset) {
getHeaderFields().put(ErfHeaderField.OFFSET_TO_KEY_LIST.field(),
new DWordData(offset));
}
private long getOffsetResources() {
return getHeaderNumber(ErfHeaderField.OFFSET_TO_RESOURCE_LIST.field(), null);
}
private void setOffsetResources(long offset) {
getHeaderFields().put(ErfHeaderField.OFFSET_TO_RESOURCE_LIST.field(),
new DWordData(offset));
}
@Override
public Erf clone() throws CloneNotSupportedException {
Erf clone = (Erf) super.clone();
clone.setResources(new ArrayList<ErfResource>());
clone.setStrings(new ArrayList<CExoLocSubStringData>());
clone.setFileType(getFileType());
clone.setFileVersion(getFileVersion());
clone.setDescription(getDescription());
clone.setEntryCount(getEntryCount());
clone.setOffsetKeys(getOffsetKeys());
clone.setOffsetResources(getOffsetResources());
clone.setOffsetStrings(getOffsetStrings());
clone.setStringCount(getStringCount());
for (Erf.ErfResource r : getResources()) {
clone.getResources().add(r.clone());
}
for (CExoLocSubStringData d : getStrings()) {
clone.getStrings().add(d.clone());
}
return clone;
}
@Override
public boolean equals(Object compare) {
if (compare == this) {
return true;
}
if (!super.equals(compare)) {
return false;
}
if (!(compare instanceof Erf)) {
return false;
}
Erf file = (Erf) compare;
return areNullablePropertiesEqual(this.getDescription(), file.getDescription())
&& areNullablePropertiesEqual(this.getResources(), file.getResources())
&& areNullablePropertiesEqual(this.getStrings(), file.getStrings())
&& this.getEntryCount() == file.getEntryCount()
// && this.getOffsetKeys() == file.getOffsetKeys()
// && this.getOffsetResources() == file.getOffsetResources()
// && this.getOffsetStrings() == file.getOffsetStrings()
&& this.getStringCount() == file.getStringCount();
}
@Override
public int hashCode() {
int hash = super.hashCode();
hash = 89 * hash + Objects.hashCode(this.strings);
hash = 89 * hash + Objects.hashCode(this.resources);
return hash;
}
/**
* Making the ERF file use the same KeyEntry as a KEY file would have been
* too easy.
*
*/
public static class ErfResource extends EmbeddedResource implements Cloneable {
private String resRef = "";
private long resId = 0;
private WordData unused = new WordData((short) 0);
public static final int RES_REF_LENGTH = 16;
public static final int LENGTH = DWordData.LENGTH + (2 * WordData.LENGTH) + RES_REF_LENGTH;
public ErfResource() {
}
@Override
public void read(RandomAccessFile in, long offset) throws IOException {
byte[] buf = new byte[RES_REF_LENGTH];
// and read the res ref
in.readFully(buf);
// set it
resRef = Util.getNullTerminatedString(buf);
resId = DWordData.read(in).getValueAsNumber();
setResourceType(ResourceType.getResourceTypeById(WordData.read(in).getValueAsNumber()));
unused = WordData.read(in);
long sPos = in.getFilePointer();
in.seek(offset + (resId * 2 * DWordData.LENGTH));
setOffset(DWordData.read(in).getValueAsNumber());
setResourceLength(DWordData.read(in).getValueAsNumber());
// readResource jumps to the right place, need to jump back
setData(readResource(in));
in.seek(sPos);
}
public String getResRef() {
return resRef;
}
public void setResRef(String resRef) {
this.resRef = resRef;
}
public long getResId() {
return resId;
}
public void setResId(long resId) {
this.resId = resId;
}
public WordData getUnused() {
return unused;
}
/**
* Overrides the {@code java.lang.Object} method to ensure we always get
* back an instance of {@code FileEntry} rather than just
* {@code Object}.
* <p>
* @return A deep copy of this object.
* @throws java.lang.CloneNotSupportedException
*/
@Override
public ErfResource clone() throws CloneNotSupportedException {
LOGGER.debug("Cloning object");
ErfResource clone = (ErfResource) super.clone();
clone.setResRef(resRef);
clone.setResId(resId);
clone.unused = unused;
return clone;
}
@Override
public boolean equals(Object compare) {
if (compare == this) {
return true;
}
if (!super.equals(compare)) {
return false;
}
if (!(compare instanceof ErfResource)) {
return false;
}
ErfResource res = (ErfResource) compare;
return res.getResId() == this.getResId()
&& res.getResRef().equals(this.getResRef());
// && res.getOffset() == this.getOffset()
// && res.getResourceLength() == this.getResourceLength();
}
@Override
public int hashCode() {
int hash = super.hashCode();
hash = 83 * hash + Objects.hashCode(this.resRef);
hash = 83 * hash + (int) (this.resId ^ (this.resId >>> 32));
return hash;
}
@Override
public String toString() {
return this.getClass().getSimpleName() + ": " + this.resRef
+ "." + this.getResourceType().extension() + " - " + getResId();
}
}
}