2012-05-08 10:25:24 -04:00

307 lines
10 KiB
Java

/*
* Autopsy Forensic Browser
*
* Copyright 2011 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.hashdatabase;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStreamWriter;
import java.io.UnsupportedEncodingException;
import java.util.ArrayList;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.transform.OutputKeys;
import javax.xml.transform.Result;
import javax.xml.transform.Transformer;
import javax.xml.transform.TransformerConfigurationException;
import javax.xml.transform.TransformerException;
import javax.xml.transform.TransformerFactory;
import javax.xml.transform.dom.DOMSource;
import javax.xml.transform.stream.StreamResult;
import org.sleuthkit.autopsy.coreutils.AutopsyPropFile;
import org.sleuthkit.autopsy.hashdatabase.HashDb.DBType;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.NodeList;
import org.xml.sax.SAXException;
/**
*
* @author dfickling
*/
public class HashDbXML {
private static final String ROOT_EL = "hash_sets";
private static final String SET_EL = "hash_set";
private static final String SET_NAME_ATTR = "name";
private static final String SET_TYPE_ATTR = "type";
private static final String SET_USE_FOR_INGEST_ATTR = "use_for_ingest";
private static final String PATH_EL = "hash_set_path";
private static final String PATH_NUMBER_ATTR = "number";
private static final String CUR_HASHSETS_FILE_NAME = "hashsets.xml";
private static final String ENCODING = "UTF-8";
private static final String CUR_HASHSET_FILE = AutopsyPropFile.getUserDirPath() + File.separator + CUR_HASHSETS_FILE_NAME;
private static final Logger logger = Logger.getLogger(HashDbXML.class.getName());
private static HashDbXML currentInstance;
private Map<String, HashDb> theSets;
private String xmlFile;
private HashDbXML(String xmlFile) {
theSets = new LinkedHashMap<String, HashDb>();
this.xmlFile = xmlFile;
}
/**
* get instance for managing the current keyword list of the application
*/
static HashDbXML getCurrent() {
if (currentInstance == null) {
currentInstance = new HashDbXML(CUR_HASHSET_FILE);
currentInstance.reload();
}
return currentInstance;
}
/**
* Get the hash sets
*/
public List<HashDb> getSets() {
List<HashDb> ret = new ArrayList<HashDb>();
ret.addAll(theSets.values());
return ret;
}
/**
* Get the NSRL sets
*/
public List<HashDb> getSets(DBType type) {
List<HashDb> ret = new ArrayList<HashDb>();
for(HashDb db : theSets.values()) {
if(db.getType().equals(type))
ret.add(db);
}
return ret;
}
/**
* Add a hash set (override old set)
*/
public void addSet(HashDb set) {
theSets.put(set.getName(), set);
save();
}
/**
* Remove a hash set
*/
public void removeSet(HashDb set) {
theSets.remove(set.getName());
save();
}
/**
* Put all the given DBs into this XML (overwrite old ones)
*/
public void putAll(List<HashDb> sets) {
for(HashDb set : sets) {
theSets.put(set.getName(), set);
}
save();
}
/**
* load the file or create new
*/
public void reload() {
boolean created = false;
theSets.clear();
if (!this.setsFileExists()) {
//create new if it doesn't exist
save();
created = true;
}
//load, if fails to load create new
if (!load() && !created) {
//create new if failed to load
save();
}
}
/**
* writes out current sets file replacing the last one
*/
private boolean save() {
boolean success = false;
DocumentBuilderFactory dbfac = DocumentBuilderFactory.newInstance();
try {
DocumentBuilder docBuilder = dbfac.newDocumentBuilder();
Document doc = docBuilder.newDocument();
Element rootEl = doc.createElement(ROOT_EL);
doc.appendChild(rootEl);
for (String setName : theSets.keySet()) {
HashDb set = theSets.get(setName);
String useForIngest = Boolean.toString(set.getUseForIngest());
List<String> paths = set.getDatabasePaths();
String type = set.getType().toString();
Element setEl = doc.createElement(SET_EL);
setEl.setAttribute(SET_NAME_ATTR, setName);
setEl.setAttribute(SET_TYPE_ATTR, type);
setEl.setAttribute(SET_USE_FOR_INGEST_ATTR, useForIngest);
for (int i = 0; i < paths.size(); i++) {
String path = paths.get(i);
Element pathEl = doc.createElement(PATH_EL);
pathEl.setAttribute(PATH_NUMBER_ATTR, Integer.toString(i));
pathEl.setTextContent(path);
setEl.appendChild(pathEl);
}
rootEl.appendChild(setEl);
}
success = saveDoc(doc);
} catch (ParserConfigurationException e) {
logger.log(Level.SEVERE, "Error saving hash sets: can't initialize parser.", e);
}
return success;
}
/**
* load and parse XML, then dispose
*/
public boolean load() {
final Document doc = loadDoc();
if (doc == null) {
return false;
}
Element root = doc.getDocumentElement();
if (root == null) {
logger.log(Level.SEVERE, "Error loading hash sets: invalid file format.");
return false;
}
NodeList setsNList = root.getElementsByTagName(SET_EL);
int numSets = setsNList.getLength();
for (int i = 0; i < numSets; ++i) {
Element setEl = (Element) setsNList.item(i);
final String name = setEl.getAttribute(SET_NAME_ATTR);
final String type = setEl.getAttribute(SET_TYPE_ATTR);
final String useForIngest = setEl.getAttribute(SET_USE_FOR_INGEST_ATTR);
Boolean useForIngestBool = Boolean.parseBoolean(useForIngest);
DBType typeDBType = DBType.valueOf(type);
List<String> paths = new ArrayList<String>();
//parse all words
NodeList pathsNList = setEl.getElementsByTagName(PATH_EL);
final int numPaths = pathsNList.getLength();
for (int j = 0; j < numPaths; ++j) {
Element pathEl = (Element) pathsNList.item(j);
String number = pathEl.getAttribute(PATH_NUMBER_ATTR);
String path = pathEl.getTextContent();
paths.add(path);
}
HashDb set = new HashDb(name, typeDBType, paths, useForIngestBool);
theSets.put(name, set);
}
return true;
}
private boolean setsFileExists() {
File f = new File(xmlFile);
return f.exists() && f.canRead() && f.canWrite();
}
private Document loadDoc() {
DocumentBuilderFactory builderFactory =
DocumentBuilderFactory.newInstance();
Document ret = null;
try {
DocumentBuilder builder = builderFactory.newDocumentBuilder();
ret = builder.parse(
new FileInputStream(xmlFile));
} catch (ParserConfigurationException e) {
logger.log(Level.SEVERE, "Error loading hash sets: can't initialize parser.", e);
} catch (SAXException e) {
logger.log(Level.SEVERE, "Error loading hash sets: can't parse XML.", e);
} catch (IOException e) {
//error reading file
logger.log(Level.SEVERE, "Error loading hash sets: can't read file.", e);
}
return ret;
}
private boolean saveDoc(final Document doc) {
TransformerFactory xf = TransformerFactory.newInstance();
xf.setAttribute("indent-number", new Integer(1));
boolean success = false;
try {
Transformer xformer = xf.newTransformer();
xformer.setOutputProperty(OutputKeys.METHOD, "xml");
xformer.setOutputProperty(OutputKeys.INDENT, "yes");
xformer.setOutputProperty(OutputKeys.ENCODING, ENCODING);
xformer.setOutputProperty(OutputKeys.STANDALONE, "yes");
xformer.setOutputProperty(OutputKeys.VERSION, "1.0");
File file = new File(xmlFile);
FileOutputStream stream = new FileOutputStream(file);
Result out = new StreamResult(new OutputStreamWriter(stream, ENCODING));
xformer.transform(new DOMSource(doc), out);
stream.flush();
stream.close();
success = true;
} catch (UnsupportedEncodingException e) {
logger.log(Level.SEVERE, "Should not happen", e);
} catch (TransformerConfigurationException e) {
logger.log(Level.SEVERE, "Error writing hash sets XML", e);
} catch (TransformerException e) {
logger.log(Level.SEVERE, "Error writing hash sets XML", e);
} catch (FileNotFoundException e) {
logger.log(Level.SEVERE, "Error writing hash sets XML: cannot write to file: " + xmlFile, e);
} catch (IOException e) {
logger.log(Level.SEVERE, "Error writing hash sets XML: cannot write to file: " + xmlFile, e);
}
return success;
}
}