diff --git a/Core/src/org/sleuthkit/autopsy/casemodule/Case.java b/Core/src/org/sleuthkit/autopsy/casemodule/Case.java
index 67bd3ce7b3..b820b4fac9 100644
--- a/Core/src/org/sleuthkit/autopsy/casemodule/Case.java
+++ b/Core/src/org/sleuthkit/autopsy/casemodule/Case.java
@@ -57,6 +57,7 @@ public class Case {
private static final String autopsyVer = Version.getVersion(); // current version of autopsy. Change it when the version is changed
private static final String appName = Version.getName() + " " + autopsyVer;
+ private static final String XSDFILE = "CaseSchema.xsd";
/**
* Property name that indicates the name of the current case has changed.
* Fired with the case is renamed, and when the current case is
@@ -195,7 +196,7 @@ public class Case {
XMLCaseManagement xmlcm = new XMLCaseManagement();
xmlcm.create(caseDir, caseName, examiner, caseNumber); // create a new XML config file
xmlcm.writeFile();
-
+
String dbPath = caseDir + File.separator + "autopsy.db";
SleuthkitCase db = SleuthkitCase.newCase(dbPath);
diff --git a/Core/src/org/sleuthkit/autopsy/casemodule/CaseSchema.xsd b/Core/src/org/sleuthkit/autopsy/casemodule/CaseSchema.xsd
new file mode 100644
index 0000000000..2aeddc4733
--- /dev/null
+++ b/Core/src/org/sleuthkit/autopsy/casemodule/CaseSchema.xsd
@@ -0,0 +1,97 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/Core/src/org/sleuthkit/autopsy/casemodule/XMLCaseManagement.java b/Core/src/org/sleuthkit/autopsy/casemodule/XMLCaseManagement.java
index 0e16cab60d..d560e51136 100644
--- a/Core/src/org/sleuthkit/autopsy/casemodule/XMLCaseManagement.java
+++ b/Core/src/org/sleuthkit/autopsy/casemodule/XMLCaseManagement.java
@@ -23,16 +23,15 @@ import java.io.*;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.Date;
-import java.util.HashSet;
-import java.util.TimeZone;
import java.util.logging.Level;
-import org.sleuthkit.autopsy.coreutils.Logger;
import javax.swing.JOptionPane;
import javax.swing.JPanel;
import javax.xml.parsers.*;
import javax.xml.transform.*;
import javax.xml.transform.dom.*;
import javax.xml.transform.stream.*;
+import org.sleuthkit.autopsy.coreutils.Logger;
+import org.sleuthkit.autopsy.coreutils.XMLUtil;
import org.w3c.dom.*;
/**
@@ -46,6 +45,7 @@ import org.w3c.dom.*;
* @author jantonius
*/
public class XMLCaseManagement implements CaseConfigFileInterface{
+ final static String XSDFILE = "CaseSchema.xsd";
final static String TOP_ROOT_NAME = "AutopsyCase";
final static String CASE_ROOT_NAME = "Case";
@@ -617,7 +617,12 @@ public class XMLCaseManagement implements CaseConfigFileInterface{
DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
DocumentBuilder db = dbf.newDocumentBuilder();
doc = db.parse(file);
+ doc.getDocumentElement().normalize();
doc.getDocumentElement().normalize();
+
+ if(!XMLUtil.xmlIsValid(doc, XMLCaseManagement.class, XSDFILE)){
+ Logger.getLogger(XMLCaseManagement.class.getName()).log(Level.WARNING, "Could not validate against [" + XSDFILE + "], results may not accurate");
+ }
Element rootEl = doc.getDocumentElement();
String rootName = rootEl.getNodeName();
diff --git a/Core/src/org/sleuthkit/autopsy/coreutils/PlatformUtil.java b/Core/src/org/sleuthkit/autopsy/coreutils/PlatformUtil.java
index 0cb925b2d6..3ef562d202 100644
--- a/Core/src/org/sleuthkit/autopsy/coreutils/PlatformUtil.java
+++ b/Core/src/org/sleuthkit/autopsy/coreutils/PlatformUtil.java
@@ -21,16 +21,15 @@ package org.sleuthkit.autopsy.coreutils;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.File;
-import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.nio.charset.Charset;
-import java.util.logging.Level;
import org.openide.modules.InstalledFileLocator;
import org.openide.modules.Places;
+
/**
*
* Platform utilities
@@ -42,6 +41,10 @@ public class PlatformUtil {
public static final String OS_VERSION_UNKNOWN = "unknown";
public static final String OS_ARCH_UNKNOWN = "unknown";
+
+
+
+
/**
* Get root path where the application is installed
*
diff --git a/Core/src/org/sleuthkit/autopsy/coreutils/XMLUtil.java b/Core/src/org/sleuthkit/autopsy/coreutils/XMLUtil.java
new file mode 100644
index 0000000000..d9073c90d3
--- /dev/null
+++ b/Core/src/org/sleuthkit/autopsy/coreutils/XMLUtil.java
@@ -0,0 +1,183 @@
+/*
+ * Autopsy Forensic Browser
+ *
+ * Copyright 2012 Basis Technology Corp.
+ * Contact: carrier sleuthkit org
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.sleuthkit.autopsy.coreutils;
+
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileNotFoundException;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.OutputStreamWriter;
+import java.io.UnsupportedEncodingException;
+import java.util.logging.Level;
+import javax.xml.XMLConstants;
+import javax.xml.parsers.DocumentBuilder;
+import javax.xml.parsers.DocumentBuilderFactory;
+import javax.xml.parsers.ParserConfigurationException;
+import javax.xml.transform.OutputKeys;
+import javax.xml.transform.Result;
+import javax.xml.transform.Transformer;
+import javax.xml.transform.TransformerConfigurationException;
+import javax.xml.transform.TransformerException;
+import javax.xml.transform.TransformerFactory;
+import javax.xml.transform.dom.DOMResult;
+import javax.xml.transform.dom.DOMSource;
+import javax.xml.transform.stream.StreamResult;
+import javax.xml.validation.Schema;
+import javax.xml.validation.SchemaFactory;
+import javax.xml.validation.Validator;
+import org.w3c.dom.Document;
+import org.xml.sax.SAXException;
+
+/** XML Utilities
+ *
+ * This class provides basic utilities for working with XML files, such as
+ * -Validating XML files against a given schema
+ * -Saving documents to disk
+ * -Loading documents from disk
+ *
+ */
+
+public class XMLUtil {
+ /** Utility to validate XML files against pre-defined schema files.
+ *
+ * The schema files are extracted automatically when this function is called, the XML being validated is not.
+ * Be sure the XML file is already extracted otherwise it will return false.
+ * @param xmlfile The XML file to validate, in DOMSource format
+ * @param type The file name of the schema to validate against, must exist as a resource in the same package as where this function is being called.
+ *
+ * For example usages, please see KeywordSearchListsXML, HashDbXML, or IngestModuleLoader.
+ *
+ */
+ public static boolean xmlIsValid(DOMSource xmlfile, Class clazz, String schemaFile) {
+ try{
+ PlatformUtil.extractResourceToUserConfigDir(clazz, schemaFile);
+ File schemaLoc = new File(PlatformUtil.getUserConfigDirectory() + File.separator + schemaFile);
+ SchemaFactory schm = SchemaFactory.newInstance(XMLConstants.W3C_XML_SCHEMA_NS_URI);
+ try{
+ Schema schema = schm.newSchema(schemaLoc);
+ Validator validator = schema.newValidator();
+ DOMResult result = new DOMResult();
+ validator.validate(xmlfile, result);
+ return true;
+ }
+ catch(SAXException e){
+ Logger.getLogger(clazz.getName()).log(Level.WARNING, "Unable to validate XML file.", e);
+ return false;
+ }
+ }
+ catch(IOException e){
+ Logger.getLogger(clazz.getName()).log(Level.WARNING, "Unable to load XML file [" + xmlfile.toString() + "] of type ["+schemaFile+"]", e);
+ return false;
+ }
+ }
+
+ /** Evaluates XML files against an XSD.
+ *
+ * The schema files are extracted automatically when this function is called, the XML being validated is not.
+ * Be sure the XML file is already extracted otherwise it will return false.
+ * @param xmlfile The XML file to validate
+ * @param type The file name of the schema to validate against, must exist as a resource in the same package as where this function is being called
+ *
+ * For example usages, please see KeywordSearchListsXML, HashDbXML, or IngestModuleLoader.
+ *
+ */
+ public static boolean xmlIsValid(Document doc, Class clazz, String type){
+ DOMSource dms = new DOMSource(doc);
+ return xmlIsValid(dms, clazz, type);
+ }
+
+
+
+ /** Loads XML files from disk
+ *
+ * @param clazz the class this method is invoked from
+ * @param xmlPath the full path to the file to load
+ * @param xsdPath the full path to the file to validate against
+ *
+ */
+ public static Document loadDoc(Class clazz, String xmlPath, String xsdPath) {
+ DocumentBuilderFactory builderFactory =
+ DocumentBuilderFactory.newInstance();
+ Document ret = null;
+
+ try {
+ DocumentBuilder builder = builderFactory.newDocumentBuilder();
+ ret = builder.parse(
+ new FileInputStream(xmlPath));
+ } catch (ParserConfigurationException e) {
+ Logger.getLogger(clazz.getName()).log(Level.SEVERE, "Error loading XML file: can't initialize parser.", e);
+
+ } catch (SAXException e) {
+ Logger.getLogger(clazz.getName()).log(Level.SEVERE, "Error loading XML file: can't parse XML.", e);
+
+ } catch (IOException e) {
+ //error reading file
+ Logger.getLogger(clazz.getName()).log(Level.SEVERE, "Error loading XML file: can't read file.", e);
+
+ }
+ if (!XMLUtil.xmlIsValid(ret, clazz, xsdPath)) {
+ Logger.getLogger(clazz.getName()).log(Level.SEVERE, "Error loading XML file: could not validate against [" + xsdPath + "], results may not be accurate");
+ }
+
+ return ret;
+ }
+
+ /** Saves XML files to disk
+ *
+ * @param clazz the class this method is invoked from
+ * @param xmlPath the full path to save the XML to
+ * @param encoding to encoding, such as "UTF-8", to encode the file with
+ * @param doc the document to save
+ *
+ */
+ public static boolean saveDoc(Class clazz, String xmlPath, String encoding, final Document doc) {
+ TransformerFactory xf = TransformerFactory.newInstance();
+ xf.setAttribute("indent-number", new Integer(1));
+ boolean success = false;
+ try {
+ Transformer xformer = xf.newTransformer();
+ xformer.setOutputProperty(OutputKeys.METHOD, "xml");
+ xformer.setOutputProperty(OutputKeys.INDENT, "yes");
+ xformer.setOutputProperty(OutputKeys.ENCODING, encoding);
+ xformer.setOutputProperty(OutputKeys.STANDALONE, "yes");
+ xformer.setOutputProperty(OutputKeys.VERSION, "1.0");
+ File file = new File(xmlPath);
+ FileOutputStream stream = new FileOutputStream(file);
+ Result out = new StreamResult(new OutputStreamWriter(stream, encoding));
+ xformer.transform(new DOMSource(doc), out);
+ stream.flush();
+ stream.close();
+ success = true;
+
+ } catch (UnsupportedEncodingException e) {
+ Logger.getLogger(clazz.getName()).log(Level.SEVERE, "Should not happen", e);
+ } catch (TransformerConfigurationException e) {
+ Logger.getLogger(clazz.getName()).log(Level.SEVERE, "Error writing XML file", e);
+ } catch (TransformerException e) {
+ Logger.getLogger(clazz.getName()).log(Level.SEVERE, "Error writing XML file", e);
+ } catch (FileNotFoundException e) {
+ Logger.getLogger(clazz.getName()).log(Level.SEVERE, "Error writing XML file: cannot write to file: " + xmlPath, e);
+ } catch (IOException e) {
+ Logger.getLogger(clazz.getName()).log(Level.SEVERE, "Error writing XML file: cannot write to file: " + xmlPath, e);
+ }
+ return success;
+ }
+
+}
diff --git a/Core/src/org/sleuthkit/autopsy/ingest/IngestModuleLoader.java b/Core/src/org/sleuthkit/autopsy/ingest/IngestModuleLoader.java
index 675f007c97..dadeb72d65 100644
--- a/Core/src/org/sleuthkit/autopsy/ingest/IngestModuleLoader.java
+++ b/Core/src/org/sleuthkit/autopsy/ingest/IngestModuleLoader.java
@@ -21,12 +21,8 @@ package org.sleuthkit.autopsy.ingest;
import java.beans.PropertyChangeListener;
import java.beans.PropertyChangeSupport;
import java.io.File;
-import java.io.FileInputStream;
-import java.io.FileNotFoundException;
-import java.io.FileOutputStream;
import java.io.FilenameFilter;
import java.io.IOException;
-import java.io.OutputStreamWriter;
import java.io.UnsupportedEncodingException;
import java.lang.reflect.Constructor;
import java.lang.reflect.InvocationTargetException;
@@ -51,15 +47,6 @@ import java.util.logging.Level;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
-import javax.xml.transform.OutputKeys;
-import javax.xml.transform.Result;
-import javax.xml.transform.Transformer;
-import javax.xml.transform.TransformerConfigurationException;
-import javax.xml.transform.TransformerException;
-import javax.xml.transform.TransformerFactory;
-import javax.xml.transform.dom.DOMSource;
-import javax.xml.transform.stream.StreamResult;
-import org.openide.filesystems.FileSystem;
import org.openide.modules.ModuleInfo;
import org.openide.util.Exceptions;
import org.openide.util.Lookup;
@@ -71,14 +58,13 @@ import org.reflections.scanners.SubTypesScanner;
import org.reflections.util.ConfigurationBuilder;
import org.reflections.util.FilterBuilder;
import org.sleuthkit.autopsy.coreutils.Logger;
+import org.sleuthkit.autopsy.coreutils.ModuleSettings;
import org.sleuthkit.autopsy.coreutils.PlatformUtil;
+import org.sleuthkit.autopsy.coreutils.XMLUtil;
import org.w3c.dom.Comment;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.NodeList;
-import org.xml.sax.SAXException;
-import org.openide.filesystems.Repository;
-import org.sleuthkit.autopsy.coreutils.ModuleSettings;
/**
* Class responsible for discovery and loading ingest modules specified in
@@ -107,10 +93,11 @@ import org.sleuthkit.autopsy.coreutils.ModuleSettings;
public final class IngestModuleLoader {
private static final String PIPELINE_CONFIG_XML = "pipeline_config.xml";
+ private static final String XSDFILE = "PipelineConfigSchema.xsd";
private String absFilePath;
private static IngestModuleLoader instance;
//raw XML pipeline representation for validation
- private final List pipelinesXML;
+ private final List pipelinesXML;
//validated pipelines with instantiated modules
private final List filePipeline;
private final List imagePipeline;
@@ -132,7 +119,7 @@ public final class IngestModuleLoader {
};
private IngestModuleLoader() {
- pipelinesXML = new ArrayList();
+ pipelinesXML = new ArrayList();
filePipeline = new ArrayList();
imagePipeline = new ArrayList();
dateFormatter = new SimpleDateFormat(DATE_FORMAT);
@@ -193,16 +180,16 @@ public final class IngestModuleLoader {
* @throws IngestModuleLoaderException
*/
private void validate() throws IngestModuleLoaderException {
- for (XmlPipelineRaw pRaw : pipelinesXML) {
+ for (IngestModuleLoader.XmlPipelineRaw pRaw : pipelinesXML) {
boolean pipelineErrors = false;
//check pipelineType
String pipelineType = pRaw.type;
- XmlPipelineRaw.PIPELINE_TYPE pType = null;
+ IngestModuleLoader.XmlPipelineRaw.PIPELINE_TYPE pType = null;
try {
- pType = XmlPipelineRaw.getPipelineType(pipelineType);
+ pType = IngestModuleLoader.XmlPipelineRaw.getPipelineType(pipelineType);
} catch (IllegalArgumentException e) {
pipelineErrors = true;
logger.log(Level.SEVERE, "Unknown pipeline type: " + pipelineType);
@@ -211,7 +198,7 @@ public final class IngestModuleLoader {
//ordering store
Map orderings = new HashMap();
- for (XmlModuleRaw pMod : pRaw.modules) {
+ for (IngestModuleLoader.XmlModuleRaw pMod : pRaw.modules) {
boolean moduleErrors = false;
//record ordering for validation
@@ -224,7 +211,7 @@ public final class IngestModuleLoader {
//check pipelineType
String modType = pMod.type;
- if (!modType.equals(XmlModuleRaw.MODULE_TYPE.PLUGIN.toString())) {
+ if (!modType.equals(IngestModuleLoader.XmlModuleRaw.MODULE_TYPE.PLUGIN.toString())) {
moduleErrors = true;
logger.log(Level.SEVERE, "Unknown module type: " + modType);
}
@@ -267,7 +254,7 @@ public final class IngestModuleLoader {
}
//if file module: check if has public static getDefault()
- if (pType == XmlPipelineRaw.PIPELINE_TYPE.FILE_ANALYSIS) {
+ if (pType == IngestModuleLoader.XmlPipelineRaw.PIPELINE_TYPE.FILE_ANALYSIS) {
try {
Method getDefaultMethod = moduleClass.getMethod("getDefault");
int modifiers = getDefaultMethod.getModifiers();
@@ -285,7 +272,7 @@ public final class IngestModuleLoader {
Exceptions.printStackTrace(ex);
}
} //if image module: check if has public constructor with no args
- else if (pType == XmlPipelineRaw.PIPELINE_TYPE.IMAGE_ANALYSIS) {
+ else if (pType == IngestModuleLoader.XmlPipelineRaw.PIPELINE_TYPE.IMAGE_ANALYSIS) {
try {
Constructor> constr = moduleClass.getConstructor();
int modifiers = constr.getModifiers();
@@ -519,12 +506,12 @@ public final class IngestModuleLoader {
boolean exists = false;
Class foundClass = (Class) it.next();
- for (XmlPipelineRaw rawP : pipelinesXML) {
- if (!rawP.type.equals(XmlPipelineRaw.PIPELINE_TYPE.FILE_ANALYSIS.toString())) {
+ for (IngestModuleLoader.XmlPipelineRaw rawP : pipelinesXML) {
+ if (!rawP.type.equals(IngestModuleLoader.XmlPipelineRaw.PIPELINE_TYPE.FILE_ANALYSIS.toString())) {
continue; //skip
}
- for (XmlModuleRaw rawM : rawP.modules) {
+ for (IngestModuleLoader.XmlModuleRaw rawM : rawP.modules) {
//logger.log(Level.INFO, "CLASS NAME : " + foundClass.getName());
if (foundClass.getName().equals(rawM.location)) {
exists = true;
@@ -539,7 +526,7 @@ public final class IngestModuleLoader {
if (exists == false) {
logger.log(Level.INFO, "Discovered a new file module to load: " + foundClass.getName());
//ADD MODULE
- addModuleToRawPipeline(foundClass, XmlPipelineRaw.PIPELINE_TYPE.FILE_ANALYSIS);
+ addModuleToRawPipeline(foundClass, IngestModuleLoader.XmlPipelineRaw.PIPELINE_TYPE.FILE_ANALYSIS);
modulesChanged = true;
}
@@ -550,13 +537,13 @@ public final class IngestModuleLoader {
boolean exists = false;
Class foundClass = (Class) it.next();
- for (XmlPipelineRaw rawP : pipelinesXML) {
- if (!rawP.type.equals(XmlPipelineRaw.PIPELINE_TYPE.IMAGE_ANALYSIS.toString())) {
+ for (IngestModuleLoader.XmlPipelineRaw rawP : pipelinesXML) {
+ if (!rawP.type.equals(IngestModuleLoader.XmlPipelineRaw.PIPELINE_TYPE.IMAGE_ANALYSIS.toString())) {
continue; //skip
}
- for (XmlModuleRaw rawM : rawP.modules) {
+ for (IngestModuleLoader.XmlModuleRaw rawM : rawP.modules) {
//logger.log(Level.INFO, "CLASS NAME : " + foundClass.getName());
if (foundClass.getName().equals(rawM.location)) {
exists = true;
@@ -571,7 +558,7 @@ public final class IngestModuleLoader {
if (exists == false) {
logger.log(Level.INFO, "Discovered a new image module to load: " + foundClass.getName());
//ADD MODULE
- addModuleToRawPipeline(foundClass, XmlPipelineRaw.PIPELINE_TYPE.IMAGE_ANALYSIS);
+ addModuleToRawPipeline(foundClass, IngestModuleLoader.XmlPipelineRaw.PIPELINE_TYPE.IMAGE_ANALYSIS);
modulesChanged = true;
}
@@ -579,7 +566,7 @@ public final class IngestModuleLoader {
if (modulesChanged) {
save();
- pcs.firePropertyChange(Event.ModulesReloaded.toString(), 0, 1);
+ pcs.firePropertyChange(IngestModuleLoader.Event.ModulesReloaded.toString(), 0, 1);
}
/*
@@ -604,7 +591,7 @@ public final class IngestModuleLoader {
* class path
* @param newOrder new order to set
*/
- void setModuleOrder(XmlPipelineRaw.PIPELINE_TYPE pipeLineType, String moduleLocation, int newOrder) throws IngestModuleLoaderException {
+ void setModuleOrder(IngestModuleLoader.XmlPipelineRaw.PIPELINE_TYPE pipeLineType, String moduleLocation, int newOrder) throws IngestModuleLoaderException {
throw new IngestModuleLoaderException("Not yet implemented");
}
@@ -615,22 +602,22 @@ public final class IngestModuleLoader {
* @param moduleClass
* @param pipelineType
*/
- private void addModuleToRawPipeline(Class> moduleClass, XmlPipelineRaw.PIPELINE_TYPE pipelineType) throws IngestModuleLoaderException {
+ private void addModuleToRawPipeline(Class> moduleClass, IngestModuleLoader.XmlPipelineRaw.PIPELINE_TYPE pipelineType) throws IngestModuleLoaderException {
String moduleLocation = moduleClass.getName();
- XmlModuleRaw modRaw = new XmlModuleRaw();
+ IngestModuleLoader.XmlModuleRaw modRaw = new IngestModuleLoader.XmlModuleRaw();
modRaw.arguments = ""; //default, no arguments
modRaw.location = moduleLocation;
modRaw.order = Integer.MAX_VALUE - (numModDiscovered++); //add to end
- modRaw.type = XmlModuleRaw.MODULE_TYPE.PLUGIN.toString();
+ modRaw.type = IngestModuleLoader.XmlModuleRaw.MODULE_TYPE.PLUGIN.toString();
modRaw.valid = false; //to be validated
//save the current numModDiscovered
ModuleSettings.setConfigSetting(IngestManager.MODULE_PROPERTIES, CUR_MODULES_DISCOVERED_SETTING, Integer.toString(numModDiscovered));
//find the pipeline of that type
- XmlPipelineRaw pipeline = null;
- for (XmlPipelineRaw rawP : this.pipelinesXML) {
+ IngestModuleLoader.XmlPipelineRaw pipeline = null;
+ for (IngestModuleLoader.XmlPipelineRaw rawP : this.pipelinesXML) {
if (rawP.type.equals(pipelineType.toString())) {
pipeline = rawP;
break;
@@ -681,27 +668,27 @@ public final class IngestModuleLoader {
Comment comment = doc.createComment("Saved by: " + getClass().getName()
+ " on: " + dateFormatter.format(System.currentTimeMillis()));
doc.appendChild(comment);
- Element rootEl = doc.createElement(XmlPipelineRaw.XML_PIPELINE_ROOT);
+ Element rootEl = doc.createElement(IngestModuleLoader.XmlPipelineRaw.XML_PIPELINE_ROOT);
doc.appendChild(rootEl);
- for (XmlPipelineRaw rawP : this.pipelinesXML) {
- Element pipelineEl = doc.createElement(XmlPipelineRaw.XML_PIPELINE_EL);
- pipelineEl.setAttribute(XmlPipelineRaw.XML_PIPELINE_TYPE_ATTR, rawP.type);
+ for (IngestModuleLoader.XmlPipelineRaw rawP : this.pipelinesXML) {
+ Element pipelineEl = doc.createElement(IngestModuleLoader.XmlPipelineRaw.XML_PIPELINE_EL);
+ pipelineEl.setAttribute(IngestModuleLoader.XmlPipelineRaw.XML_PIPELINE_TYPE_ATTR, rawP.type);
rootEl.appendChild(pipelineEl);
- for (XmlModuleRaw rawM : rawP.modules) {
- Element moduleEl = doc.createElement(XmlModuleRaw.XML_MODULE_EL);
+ for (IngestModuleLoader.XmlModuleRaw rawM : rawP.modules) {
+ Element moduleEl = doc.createElement(IngestModuleLoader.XmlModuleRaw.XML_MODULE_EL);
- moduleEl.setAttribute(XmlModuleRaw.XML_MODULE_LOC_ATTR, rawM.location);
- moduleEl.setAttribute(XmlModuleRaw.XML_MODULE_TYPE_ATTR, rawM.type);
- moduleEl.setAttribute(XmlModuleRaw.XML_MODULE_ORDER_ATTR, Integer.toString(rawM.order));
- moduleEl.setAttribute(XmlModuleRaw.XML_MODULE_TYPE_ATTR, rawM.type);
+ moduleEl.setAttribute(IngestModuleLoader.XmlModuleRaw.XML_MODULE_LOC_ATTR, rawM.location);
+ moduleEl.setAttribute(IngestModuleLoader.XmlModuleRaw.XML_MODULE_TYPE_ATTR, rawM.type);
+ moduleEl.setAttribute(IngestModuleLoader.XmlModuleRaw.XML_MODULE_ORDER_ATTR, Integer.toString(rawM.order));
+ moduleEl.setAttribute(IngestModuleLoader.XmlModuleRaw.XML_MODULE_TYPE_ATTR, rawM.type);
pipelineEl.appendChild(moduleEl);
}
}
- saveDoc(doc);
+ XMLUtil.saveDoc(IngestModuleLoader.class, absFilePath, ENCODING, doc);
logger.log(Level.INFO, "Pipeline configuration saved to: " + this.absFilePath);
} catch (ParserConfigurationException e) {
logger.log(Level.SEVERE, "Error saving pipeline config XML: can't initialize parser.", e);
@@ -709,39 +696,6 @@ public final class IngestModuleLoader {
}
- private boolean saveDoc(final Document doc) {
- TransformerFactory xf = TransformerFactory.newInstance();
- xf.setAttribute("indent-number", new Integer(1));
- boolean success = false;
- try {
- Transformer xformer = xf.newTransformer();
- xformer.setOutputProperty(OutputKeys.METHOD, "xml");
- xformer.setOutputProperty(OutputKeys.INDENT, "yes");
- xformer.setOutputProperty(OutputKeys.ENCODING, ENCODING);
- xformer.setOutputProperty(OutputKeys.STANDALONE, "yes");
- xformer.setOutputProperty(OutputKeys.VERSION, "1.0");
- File file = new File(this.absFilePath);
- FileOutputStream stream = new FileOutputStream(file);
- Result out = new StreamResult(new OutputStreamWriter(stream, ENCODING));
- xformer.transform(new DOMSource(doc), out);
- stream.flush();
- stream.close();
- success = true;
-
- } catch (UnsupportedEncodingException e) {
- logger.log(Level.SEVERE, "Should not happen", e);
- } catch (TransformerConfigurationException e) {
- logger.log(Level.SEVERE, "Error writing pipeline config XML", e);
- } catch (TransformerException e) {
- logger.log(Level.SEVERE, "Error writing pipeline config XML", e);
- } catch (FileNotFoundException e) {
- logger.log(Level.SEVERE, "Error writing pipeline config XML: cannot write to file: " + this.absFilePath, e);
- } catch (IOException e) {
- logger.log(Level.SEVERE, "Error writing pipeline config XML: cannot write to file: " + this.absFilePath, e);
- }
- return success;
- }
-
/**
* Instantiate valid pipeline and modules and store the module object
* references
@@ -762,24 +716,24 @@ public final class IngestModuleLoader {
validate();
- for (XmlPipelineRaw pRaw : pipelinesXML) {
+ for (IngestModuleLoader.XmlPipelineRaw pRaw : pipelinesXML) {
if (pRaw.valid == false) {
//skip invalid pipelines
continue;
}
//sort modules by order parameter, in case XML order is different
- Collections.sort(pRaw.modules, new Comparator() {
+ Collections.sort(pRaw.modules, new Comparator() {
@Override
- public int compare(XmlModuleRaw o1, XmlModuleRaw o2) {
+ public int compare(IngestModuleLoader.XmlModuleRaw o1, IngestModuleLoader.XmlModuleRaw o2) {
return Integer.valueOf(o1.order).compareTo(Integer.valueOf(o2.order));
}
});
//check pipelineType, add to right pipeline collection
- XmlPipelineRaw.PIPELINE_TYPE pType = XmlPipelineRaw.getPipelineType(pRaw.type);
+ IngestModuleLoader.XmlPipelineRaw.PIPELINE_TYPE pType = IngestModuleLoader.XmlPipelineRaw.getPipelineType(pRaw.type);
- for (XmlModuleRaw pMod : pRaw.modules) {
+ for (IngestModuleLoader.XmlModuleRaw pMod : pRaw.modules) {
try {
if (pMod.valid == false) {
//skip invalid modules
@@ -906,31 +860,6 @@ public final class IngestModuleLoader {
}
- private Document loadDoc() {
- DocumentBuilderFactory builderFactory =
- DocumentBuilderFactory.newInstance();
-
- Document ret = null;
-
-
- try {
- DocumentBuilder builder = builderFactory.newDocumentBuilder();
- ret = builder.parse(
- new FileInputStream(absFilePath));
- } catch (ParserConfigurationException e) {
- logger.log(Level.SEVERE, "Error loading pipeline configuration: can't initialize parser.", e);
-
- } catch (SAXException e) {
- logger.log(Level.SEVERE, "Error loading pipeline configuration: can't parse XML.", e);
-
- } catch (IOException e) {
- //error reading file
- logger.log(Level.SEVERE, "Error loading pipeline configuration: can't read file.", e);
-
- }
- return ret;
-
- }
/**
* Load XML into raw pipeline representation
@@ -938,7 +867,7 @@ public final class IngestModuleLoader {
* @throws IngestModuleLoaderException
*/
private void loadRawPipeline() throws IngestModuleLoaderException {
- final Document doc = loadDoc();
+ final Document doc = XMLUtil.loadDoc(IngestModuleLoader.class, absFilePath, XSDFILE);
if (doc == null) {
throw new IngestModuleLoaderException("Could not load pipeline config XML: " + this.absFilePath);
}
@@ -948,7 +877,7 @@ public final class IngestModuleLoader {
logger.log(Level.SEVERE, msg);
throw new IngestModuleLoaderException(msg);
}
- NodeList pipelineNodes = root.getElementsByTagName(XmlPipelineRaw.XML_PIPELINE_EL);
+ NodeList pipelineNodes = root.getElementsByTagName(IngestModuleLoader.XmlPipelineRaw.XML_PIPELINE_EL);
int numPipelines = pipelineNodes.getLength();
if (numPipelines == 0) {
throw new IngestModuleLoaderException("No pipelines found in the pipeline configuration: " + absFilePath);
@@ -956,15 +885,15 @@ public final class IngestModuleLoader {
for (int pipelineNum = 0; pipelineNum < numPipelines; ++pipelineNum) {
//process pipelines
Element pipelineEl = (Element) pipelineNodes.item(pipelineNum);
- final String pipelineType = pipelineEl.getAttribute(XmlPipelineRaw.XML_PIPELINE_TYPE_ATTR);
+ final String pipelineType = pipelineEl.getAttribute(IngestModuleLoader.XmlPipelineRaw.XML_PIPELINE_TYPE_ATTR);
logger.log(Level.INFO, "Found pipeline type: " + pipelineType);
- XmlPipelineRaw pipelineRaw = new XmlPipelineRaw();
+ IngestModuleLoader.XmlPipelineRaw pipelineRaw = new IngestModuleLoader.XmlPipelineRaw();
pipelineRaw.type = pipelineType;
this.pipelinesXML.add(pipelineRaw);
//process modules
- NodeList modulesNodes = pipelineEl.getElementsByTagName(XmlModuleRaw.XML_MODULE_EL);
+ NodeList modulesNodes = pipelineEl.getElementsByTagName(IngestModuleLoader.XmlModuleRaw.XML_MODULE_EL);
int numModules = modulesNodes.getLength();
if (numModules == 0) {
logger.log(Level.WARNING, "Pipeline: " + pipelineType + " has no modules defined.");
@@ -972,11 +901,11 @@ public final class IngestModuleLoader {
for (int moduleNum = 0; moduleNum < numModules; ++moduleNum) {
//process modules
Element moduleEl = (Element) modulesNodes.item(moduleNum);
- final String moduleType = moduleEl.getAttribute(XmlModuleRaw.XML_MODULE_TYPE_ATTR);
- final String moduleOrder = moduleEl.getAttribute(XmlModuleRaw.XML_MODULE_ORDER_ATTR);
- final String moduleLoc = moduleEl.getAttribute(XmlModuleRaw.XML_MODULE_LOC_ATTR);
- final String moduleArgs = moduleEl.getAttribute(XmlModuleRaw.XML_MODULE_ARGS_ATTR);
- XmlModuleRaw module = new XmlModuleRaw();
+ final String moduleType = moduleEl.getAttribute(IngestModuleLoader.XmlModuleRaw.XML_MODULE_TYPE_ATTR);
+ final String moduleOrder = moduleEl.getAttribute(IngestModuleLoader.XmlModuleRaw.XML_MODULE_ORDER_ATTR);
+ final String moduleLoc = moduleEl.getAttribute(IngestModuleLoader.XmlModuleRaw.XML_MODULE_LOC_ATTR);
+ final String moduleArgs = moduleEl.getAttribute(IngestModuleLoader.XmlModuleRaw.XML_MODULE_ARGS_ATTR);
+ IngestModuleLoader.XmlModuleRaw module = new IngestModuleLoader.XmlModuleRaw();
module.arguments = moduleArgs;
module.location = moduleLoc;
try {
@@ -1073,8 +1002,8 @@ public final class IngestModuleLoader {
* @param s string equals to one of the types toString() representation
* @return matching type
*/
- static PIPELINE_TYPE getPipelineType(String s) throws IllegalArgumentException {
- PIPELINE_TYPE[] types = PIPELINE_TYPE.values();
+ static IngestModuleLoader.XmlPipelineRaw.PIPELINE_TYPE getPipelineType(String s) throws IllegalArgumentException {
+ IngestModuleLoader.XmlPipelineRaw.PIPELINE_TYPE[] types = IngestModuleLoader.XmlPipelineRaw.PIPELINE_TYPE.values();
for (int i = 0; i < types.length; ++i) {
if (types[i].toString().equals(s)) {
return types[i];
@@ -1086,7 +1015,7 @@ public final class IngestModuleLoader {
private static final String XML_PIPELINE_EL = "PIPELINE";
private static final String XML_PIPELINE_TYPE_ATTR = "type";
String type;
- List modules = new ArrayList();
+ List modules = new ArrayList();
boolean valid = false; // if passed validation
}
diff --git a/Core/src/org/sleuthkit/autopsy/ingest/PipelineConfigSchema.xsd b/Core/src/org/sleuthkit/autopsy/ingest/PipelineConfigSchema.xsd
new file mode 100644
index 0000000000..027337ad07
--- /dev/null
+++ b/Core/src/org/sleuthkit/autopsy/ingest/PipelineConfigSchema.xsd
@@ -0,0 +1,38 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/Core/src/org/sleuthkit/autopsy/report/ReportXML.java b/Core/src/org/sleuthkit/autopsy/report/ReportXML.java
index 77ad93e303..c5979a6669 100644
--- a/Core/src/org/sleuthkit/autopsy/report/ReportXML.java
+++ b/Core/src/org/sleuthkit/autopsy/report/ReportXML.java
@@ -20,12 +20,9 @@
*/
package org.sleuthkit.autopsy.report;
-import java.io.FileOutputStream;
import java.io.File;
-import java.io.IOException;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
-import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
@@ -33,12 +30,14 @@ import java.util.Map.Entry;
import java.util.logging.Level;
import org.sleuthkit.autopsy.coreutils.Logger;
import java.util.regex.Pattern;
+import javax.xml.parsers.DocumentBuilder;
+import javax.xml.parsers.DocumentBuilderFactory;
import org.apache.commons.lang.StringEscapeUtils;
-import org.jdom.Comment;
-import org.jdom.Document;
-import org.jdom.Element;
-import org.jdom.output.XMLOutputter;
+import org.w3c.dom.Element;
+import org.w3c.dom.Document;
+import org.w3c.dom.Comment;
import org.sleuthkit.autopsy.casemodule.Case;
+import org.sleuthkit.autopsy.coreutils.XMLUtil;
import org.sleuthkit.autopsy.ingest.IngestManager;
import org.sleuthkit.datamodel.*;
@@ -47,7 +46,7 @@ import org.sleuthkit.datamodel.*;
*/
public class ReportXML implements ReportModule {
- public static Document xmldoc = new Document();
+ public static Document xmldoc;
private ReportConfiguration reportconfig;
private String xmlPath;
private static ReportXML instance = null;
@@ -75,41 +74,61 @@ public class ReportXML implements ReportModule {
Integer filesystemcount = currentCase.getRootObjectsCount();
Integer totalfiles = skCase.countFsContentType(TskData.TSK_FS_META_TYPE_ENUM.TSK_FS_META_TYPE_REG);
Integer totaldirs = skCase.countFsContentType(TskData.TSK_FS_META_TYPE_ENUM.TSK_FS_META_TYPE_DIR);
- Element root = new Element("Case");
- xmldoc = new Document(root);
+ DocumentBuilder builder = DocumentBuilderFactory.newInstance().newDocumentBuilder();
+ Document ret = builder.newDocument();
+ Element root = ret.createElement("Case");
DateFormat datetimeFormat = new SimpleDateFormat("yyyy/MM/dd HH:mm:ss");
DateFormat dateFormat = new SimpleDateFormat("MM-dd-yyyy-HH-mm-ss");
Date date = new Date();
String datetime = datetimeFormat.format(date);
String datenotime = dateFormat.format(date);
- Comment comment = new Comment("XML Report Generated by Autopsy 3 on " + datetime);
- root.addContent(comment);
+ Comment comment = ret.createComment("XML Report Generated by Autopsy 3 on " + datetime);
+ root.appendChild(comment);
//Create summary node involving how many of each type
- Element summary = new Element("Summary");
+ Element summary = ret.createElement("Summary");
if (IngestManager.getDefault().isIngestRunning()) {
- summary.addContent(new Element("Warning").setText("Report was run before ingest services completed!"));
+ Element warning = ret.createElement("Warning");
+ warning.setTextContent("Report was run before ingest services completed!");
+ summary.appendChild(warning);
}
- summary.addContent(new Element("Name").setText(caseName));
- summary.addContent(new Element("Total-Images").setText(imagecount.toString()));
- summary.addContent(new Element("Total-FileSystems").setText(filesystemcount.toString()));
- summary.addContent(new Element("Total-Files").setText(totalfiles.toString()));
- summary.addContent(new Element("Total-Directories").setText(totaldirs.toString()));
- root.addContent(summary);
+ Element name = ret.createElement("Name");
+ name.setTextContent(caseName);
+ summary.appendChild(name);
+
+ Element timages = ret.createElement("Total-Images");
+ timages.setTextContent(imagecount.toString());
+ summary.appendChild(timages);
+
+ Element tfilesys = ret.createElement("Total-FileSystems");
+ tfilesys.setTextContent(filesystemcount.toString());
+ summary.appendChild(tfilesys);
+
+ Element tfiles = ret.createElement("Total-Files");
+ tfiles.setTextContent(totalfiles.toString());
+ summary.appendChild(tfiles);
+
+ Element tdir = ret.createElement("Total-Directories");
+ tdir.setTextContent(totaldirs.toString());
+ summary.appendChild(tdir);
+
+ root.appendChild(summary);
//generate the nodes for each of the types so we can use them later
- Element nodeGen = new Element("General-Information");
- Element nodeWebBookmark = new Element("Web-Bookmarks");
- Element nodeWebCookie = new Element("Web-Cookies");
- Element nodeWebHistory = new Element("Web-History");
- Element nodeWebDownload = new Element("Web-Downloads");
- Element nodeRecentObjects = new Element("Recent-Documents");
- Element nodeTrackPoint = new Element("Track-Points");
- Element nodeInstalled = new Element("Installed-Programfiles");
- Element nodeKeyword = new Element("Keyword-Search-Hits");
- Element nodeHash = new Element("Hashset-Hits");
- Element nodeDevice = new Element("Attached-Devices");
- Element nodeEmail = new Element("Email-Messages");
- Element nodeWebSearch = new Element("Web-Search-Queries");
- Element nodeExif = new Element("Exif-Metadata");
+
+ Element nodeGen = ret.createElement("General-Information");
+ Element nodeWebBookmark = ret.createElement("Web-Bookmarks");
+ Element nodeWebCookie = ret.createElement("Web-Cookies");
+ Element nodeWebHistory = ret.createElement("Web-History");
+ Element nodeWebDownload = ret.createElement("Web-Downloads");
+ Element nodeRecentObjects =ret.createElement("Recent-Documents");
+ Element nodeTrackPoint = ret.createElement("Track-Points");
+ Element nodeInstalled = ret.createElement("Installed-Programfiles");
+ Element nodeKeyword = ret.createElement("Keyword-Search-Hits");
+ Element nodeHash = ret.createElement("Hashset-Hits");
+ Element nodeDevice = ret.createElement("Attached-Devices");
+ Element nodeEmail = ret.createElement("Email-Messages");
+ Element nodeWebSearch = ret.createElement("Web-Search-Queries");
+ Element nodeExif = ret.createElement("Exif-Metadata");
+
//remove bytes
Pattern INVALID_XML_CHARS = Pattern.compile("[^\\u0009\\u000A\\u000D\\u0020-\\uD7FF\\uE000-\\uFFFD\uD800\uDC00-\uDBFF\uDFFF]");
for (Entry> entry : report.entrySet()) {
@@ -117,7 +136,7 @@ public class ReportXML implements ReportModule {
break;
}
int cc = 0;
- Element artifact = new Element("Artifact");
+ Element artifact = ret.createElement("Artifact");
Long objId = entry.getKey().getObjectID();
Content cont = skCase.getContentById(objId);
Long filesize = cont.getSize();
@@ -133,14 +152,17 @@ public class ReportXML implements ReportModule {
if (ReportFilter.cancel == true) {
break;
}
- Element attribute = new Element("Attribute").setAttribute("Type", tempatt.getAttributeTypeDisplayName());
+ Element attribute = ret.createElement("Attribute");
+ attribute.setAttribute("Type", tempatt.getAttributeTypeDisplayName());
String tempvalue = tempatt.getValueString();
//INVALID_XML_CHARS.matcher(tempvalue).replaceAll("");
- Element value = new Element("Value").setText(StringEscapeUtils.escapeXml(tempvalue));
- attribute.addContent(value);
- Element context = new Element("Context").setText(StringEscapeUtils.escapeXml(tempatt.getContext()));
- attribute.addContent(context);
- artifact.addContent(attribute);
+ Element value = ret.createElement("Value");
+ value.setTextContent(StringEscapeUtils.escapeXml(tempvalue));
+ attribute.appendChild(value);
+ Element context = ret.createElement("Context");
+ context.setTextContent(StringEscapeUtils.escapeXml(tempatt.getContext()));
+ attribute.appendChild(context);
+ artifact.appendChild(attribute);
cc++;
}
@@ -148,71 +170,74 @@ public class ReportXML implements ReportModule {
//while (entry.getValue().iterator().hasNext())
// {
// }
- nodeGen.addContent(artifact);
+ nodeGen.appendChild(artifact);
}
if (entry.getKey().getArtifactTypeID() == BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_BOOKMARK.getTypeID()) {
- nodeWebBookmark.addContent(artifact);
+ nodeWebBookmark.appendChild(artifact);
}
if (entry.getKey().getArtifactTypeID() == BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_COOKIE.getTypeID()) {
- nodeWebCookie.addContent(artifact);
+ nodeWebCookie.appendChild(artifact);
}
if (entry.getKey().getArtifactTypeID() == BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_HISTORY.getTypeID()) {
- nodeWebHistory.addContent(artifact);
+ nodeWebHistory.appendChild(artifact);
}
if (entry.getKey().getArtifactTypeID() == BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_DOWNLOAD.getTypeID()) {
- nodeWebDownload.addContent(artifact);
+ nodeWebDownload.appendChild(artifact);
}
if (entry.getKey().getArtifactTypeID() == BlackboardArtifact.ARTIFACT_TYPE.TSK_RECENT_OBJECT.getTypeID()) {
- nodeRecentObjects.addContent(artifact);
+ nodeRecentObjects.appendChild(artifact);
}
if (entry.getKey().getArtifactTypeID() == BlackboardArtifact.ARTIFACT_TYPE.TSK_TRACKPOINT.getTypeID()) {
- nodeTrackPoint.addContent(artifact);
+ nodeTrackPoint.appendChild(artifact);
}
if (entry.getKey().getArtifactTypeID() == BlackboardArtifact.ARTIFACT_TYPE.TSK_INSTALLED_PROG.getTypeID()) {
- nodeInstalled.addContent(artifact);
+ nodeInstalled.appendChild(artifact);
}
if (entry.getKey().getArtifactTypeID() == BlackboardArtifact.ARTIFACT_TYPE.TSK_KEYWORD_HIT.getTypeID()) {
- nodeKeyword.addContent(artifact);
+ nodeKeyword.appendChild(artifact);
}
if (entry.getKey().getArtifactTypeID() == BlackboardArtifact.ARTIFACT_TYPE.TSK_HASHSET_HIT.getTypeID()) {
- nodeHash.addContent(artifact);
+ nodeHash.appendChild(artifact);
}
if (entry.getKey().getArtifactTypeID() == BlackboardArtifact.ARTIFACT_TYPE.TSK_DEVICE_ATTACHED.getTypeID()) {
- nodeDevice.addContent(artifact);
+ nodeDevice.appendChild(artifact);
}
if (entry.getKey().getArtifactTypeID() == BlackboardArtifact.ARTIFACT_TYPE.TSK_EMAIL_MSG.getTypeID()) {
- nodeEmail.addContent(artifact);
+ nodeEmail.appendChild(artifact);
}
if (entry.getKey().getArtifactTypeID() == BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_SEARCH_QUERY.getTypeID()) {
- nodeWebSearch.addContent(artifact);
+ nodeWebSearch.appendChild(artifact);
}
if(entry.getKey().getArtifactTypeID() == BlackboardArtifact.ARTIFACT_TYPE.TSK_METADATA_EXIF.getTypeID()){
- nodeExif.addContent(artifact);
+ nodeExif.appendChild(artifact);
}
+
//end of master loop
}
//add them in the order we want them to the document
- root.addContent(nodeGen);
- root.addContent(nodeWebBookmark);
- root.addContent(nodeWebCookie);
- root.addContent(nodeWebHistory);
- root.addContent(nodeWebDownload);
- root.addContent(nodeRecentObjects);
- root.addContent(nodeTrackPoint);
- root.addContent(nodeInstalled);
- root.addContent(nodeKeyword);
- root.addContent(nodeHash);
- root.addContent(nodeDevice);
- root.addContent(nodeEmail);
- root.addContent(nodeWebSearch);
- root.addContent(nodeExif);
+ root.appendChild(nodeGen);
+ root.appendChild(nodeWebBookmark);
+ root.appendChild(nodeWebCookie);
+ root.appendChild(nodeWebHistory);
+ root.appendChild(nodeWebDownload);
+ root.appendChild(nodeRecentObjects);
+ root.appendChild(nodeTrackPoint);
+ root.appendChild(nodeInstalled);
+ root.appendChild(nodeKeyword);
+ root.appendChild(nodeHash);
+ root.appendChild(nodeDevice);
+ root.appendChild(nodeEmail);
+ root.appendChild(nodeWebSearch);
+ root.appendChild(nodeExif);
+ ret.appendChild(root);
+ xmldoc = ret;
//Export it the first time
xmlPath = currentCase.getCaseDirectory() + File.separator + "Reports" + File.separator + caseName + "-" + datenotime + ".xml";
@@ -227,18 +252,7 @@ public class ReportXML implements ReportModule {
@Override
public void save(String path) {
-
- try {
-
- FileOutputStream out = new FileOutputStream(path);
- XMLOutputter serializer = new XMLOutputter();
- serializer.output(xmldoc, out);
- out.flush();
- out.close();
- } catch (IOException e) {
- System.err.println(e);
- }
-
+ XMLUtil.saveDoc(ReportXML.class, xmlPath, "UTF-8", xmldoc);
}
@Override
diff --git a/CoreLibs/ivy.xml b/CoreLibs/ivy.xml
index 19e09402cc..32fd5a84d2 100644
--- a/CoreLibs/ivy.xml
+++ b/CoreLibs/ivy.xml
@@ -21,8 +21,6 @@
-
-
diff --git a/HashDatabase/src/org/sleuthkit/autopsy/hashdatabase/HashDbXML.java b/HashDatabase/src/org/sleuthkit/autopsy/hashdatabase/HashDbXML.java
index 3935241138..9ddc638b9e 100644
--- a/HashDatabase/src/org/sleuthkit/autopsy/hashdatabase/HashDbXML.java
+++ b/HashDatabase/src/org/sleuthkit/autopsy/hashdatabase/HashDbXML.java
@@ -19,38 +19,25 @@
package org.sleuthkit.autopsy.hashdatabase;
import java.io.File;
-import java.io.FileInputStream;
-import java.io.FileNotFoundException;
-import java.io.FileOutputStream;
import java.io.IOException;
-import java.io.OutputStreamWriter;
-import java.io.UnsupportedEncodingException;
import java.util.ArrayList;
import java.util.List;
import java.util.logging.Level;
import javax.swing.JFileChooser;
import javax.swing.JOptionPane;
import javax.swing.filechooser.FileNameExtensionFilter;
-import org.sleuthkit.autopsy.coreutils.Logger;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
-import javax.xml.transform.OutputKeys;
-import javax.xml.transform.Result;
-import javax.xml.transform.Transformer;
-import javax.xml.transform.TransformerConfigurationException;
-import javax.xml.transform.TransformerException;
-import javax.xml.transform.TransformerFactory;
-import javax.xml.transform.dom.DOMSource;
-import javax.xml.transform.stream.StreamResult;
+import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.coreutils.PlatformUtil;
+import org.sleuthkit.autopsy.coreutils.XMLUtil;
import org.sleuthkit.autopsy.hashdatabase.HashDb.DBType;
import org.sleuthkit.datamodel.SleuthkitJNI;
import org.sleuthkit.datamodel.TskCoreException;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.NodeList;
-import org.xml.sax.SAXException;
public class HashDbXML {
private static final String ROOT_EL = "hash_sets";
@@ -62,6 +49,7 @@ public class HashDbXML {
private static final String PATH_EL = "hash_set_path";
private static final String PATH_NUMBER_ATTR = "number";
private static final String CUR_HASHSETS_FILE_NAME = "hashsets.xml";
+ private static final String XSDFILE = "HashsetsSchema.xsd";
private static final String ENCODING = "UTF-8";
private static final String CUR_HASHSET_FILE = PlatformUtil.getUserConfigDirectory() + File.separator + CUR_HASHSETS_FILE_NAME;
private static final String SET_CALC = "hash_calculate";
@@ -263,7 +251,7 @@ public class HashDbXML {
setCalc.setAttribute(SET_VALUE, calcValue);
rootEl.appendChild(setCalc);
- success = saveDoc(doc);
+ success = XMLUtil.saveDoc(HashDbXML.class, xmlFile, ENCODING, doc);
} catch (ParserConfigurationException e) {
logger.log(Level.SEVERE, "Error saving hash sets: can't initialize parser.", e);
}
@@ -274,7 +262,7 @@ public class HashDbXML {
* load and parse XML, then dispose
*/
public boolean load() {
- final Document doc = loadDoc();
+ final Document doc = XMLUtil.loadDoc(HashDbXML.class, xmlFile, XSDFILE);
if (doc == null) {
return false;
}
@@ -432,65 +420,6 @@ public class HashDbXML {
File f = new File(xmlFile);
return f.exists() && f.canRead() && f.canWrite();
}
-
- private Document loadDoc() {
- DocumentBuilderFactory builderFactory =
- DocumentBuilderFactory.newInstance();
-
- Document ret = null;
-
-
- try {
- DocumentBuilder builder = builderFactory.newDocumentBuilder();
- ret = builder.parse(
- new FileInputStream(xmlFile));
- } catch (ParserConfigurationException e) {
- logger.log(Level.SEVERE, "Error loading hash sets: can't initialize parser.", e);
-
- } catch (SAXException e) {
- logger.log(Level.SEVERE, "Error loading hash sets: can't parse XML.", e);
-
- } catch (IOException e) {
- //error reading file
- logger.log(Level.SEVERE, "Error loading hash sets: can't read file.", e);
-
- }
- return ret;
-
- }
-
- private boolean saveDoc(final Document doc) {
- TransformerFactory xf = TransformerFactory.newInstance();
- xf.setAttribute("indent-number", new Integer(1));
- boolean success = false;
- try {
- Transformer xformer = xf.newTransformer();
- xformer.setOutputProperty(OutputKeys.METHOD, "xml");
- xformer.setOutputProperty(OutputKeys.INDENT, "yes");
- xformer.setOutputProperty(OutputKeys.ENCODING, ENCODING);
- xformer.setOutputProperty(OutputKeys.STANDALONE, "yes");
- xformer.setOutputProperty(OutputKeys.VERSION, "1.0");
- File file = new File(xmlFile);
- FileOutputStream stream = new FileOutputStream(file);
- Result out = new StreamResult(new OutputStreamWriter(stream, ENCODING));
- xformer.transform(new DOMSource(doc), out);
- stream.flush();
- stream.close();
- success = true;
-
- } catch (UnsupportedEncodingException e) {
- logger.log(Level.SEVERE, "Should not happen", e);
- } catch (TransformerConfigurationException e) {
- logger.log(Level.SEVERE, "Error writing hash sets XML", e);
- } catch (TransformerException e) {
- logger.log(Level.SEVERE, "Error writing hash sets XML", e);
- } catch (FileNotFoundException e) {
- logger.log(Level.SEVERE, "Error writing hash sets XML: cannot write to file: " + xmlFile, e);
- } catch (IOException e) {
- logger.log(Level.SEVERE, "Error writing hash sets XML: cannot write to file: " + xmlFile, e);
- }
- return success;
- }
}
diff --git a/HashDatabase/src/org/sleuthkit/autopsy/hashdatabase/HashsetsSchema.xsd b/HashDatabase/src/org/sleuthkit/autopsy/hashdatabase/HashsetsSchema.xsd
new file mode 100644
index 0000000000..ee97c7d824
--- /dev/null
+++ b/HashDatabase/src/org/sleuthkit/autopsy/hashdatabase/HashsetsSchema.xsd
@@ -0,0 +1,49 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchListsXML.java b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchListsXML.java
index 65f4337de2..e13a65be77 100644
--- a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchListsXML.java
+++ b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchListsXML.java
@@ -18,13 +18,6 @@
*/
package org.sleuthkit.autopsy.keywordsearch;
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.FileNotFoundException;
-import java.io.FileOutputStream;
-import java.io.IOException;
-import java.io.OutputStreamWriter;
-import java.io.UnsupportedEncodingException;
import java.text.DateFormat;
import java.text.ParseException;
import java.text.SimpleDateFormat;
@@ -35,20 +28,12 @@ import java.util.logging.Level;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
-import javax.xml.transform.OutputKeys;
-import javax.xml.transform.Result;
-import javax.xml.transform.Transformer;
-import javax.xml.transform.TransformerConfigurationException;
-import javax.xml.transform.TransformerException;
-import javax.xml.transform.TransformerFactory;
-import javax.xml.transform.dom.DOMSource;
-import javax.xml.transform.stream.StreamResult;
import org.sleuthkit.autopsy.coreutils.Logger;
+import org.sleuthkit.autopsy.coreutils.XMLUtil;
import org.sleuthkit.datamodel.BlackboardAttribute;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.NodeList;
-import org.xml.sax.SAXException;
/**
* Manages reading and writing of keyword lists to user settings XML file keywords.xml
@@ -68,6 +53,7 @@ public class KeywordSearchListsXML extends KeywordSearchListsAbstract{
private static final String KEYWORD_SELECTOR_ATTR = "selector";
private static final String DATE_FORMAT = "yyyy-MM-dd HH:mm:ss";
private static final String ENCODING = "UTF-8";
+ private static final String XSDFILE = "KeywordsSchema.xsd";
private static final Logger logger = Logger.getLogger(KeywordSearchListsXML.class.getName());
private DateFormat dateFormatter;
@@ -132,7 +118,7 @@ public class KeywordSearchListsXML extends KeywordSearchListsAbstract{
rootEl.appendChild(listEl);
}
- success = saveDoc(doc);
+ success = XMLUtil.saveDoc(KeywordSearchListsXML.class, filePath, ENCODING, doc);
} catch (ParserConfigurationException e) {
logger.log(Level.SEVERE, "Error saving keyword list: can't initialize parser.", e);
}
@@ -144,7 +130,7 @@ public class KeywordSearchListsXML extends KeywordSearchListsAbstract{
*/
@Override
public boolean load() {
- final Document doc = loadDoc();
+ final Document doc = XMLUtil.loadDoc(KeywordSearchListsXML.class, filePath, XSDFILE);
if (doc == null) {
return false;
}
@@ -212,63 +198,4 @@ public class KeywordSearchListsXML extends KeywordSearchListsAbstract{
}
return true;
}
-
- private Document loadDoc() {
- DocumentBuilderFactory builderFactory =
- DocumentBuilderFactory.newInstance();
-
- Document ret = null;
-
-
- try {
- DocumentBuilder builder = builderFactory.newDocumentBuilder();
- ret = builder.parse(
- new FileInputStream(filePath));
- } catch (ParserConfigurationException e) {
- logger.log(Level.SEVERE, "Error loading keyword list: can't initialize parser.", e);
-
- } catch (SAXException e) {
- logger.log(Level.SEVERE, "Error loading keyword list: can't parse XML.", e);
-
- } catch (IOException e) {
- //error reading file
- logger.log(Level.SEVERE, "Error loading keyword list: can't read file.", e);
-
- }
- return ret;
-
- }
-
- private boolean saveDoc(final Document doc) {
- TransformerFactory xf = TransformerFactory.newInstance();
- xf.setAttribute("indent-number", new Integer(1));
- boolean success = false;
- try {
- Transformer xformer = xf.newTransformer();
- xformer.setOutputProperty(OutputKeys.METHOD, "xml");
- xformer.setOutputProperty(OutputKeys.INDENT, "yes");
- xformer.setOutputProperty(OutputKeys.ENCODING, ENCODING);
- xformer.setOutputProperty(OutputKeys.STANDALONE, "yes");
- xformer.setOutputProperty(OutputKeys.VERSION, "1.0");
- File file = new File(filePath);
- FileOutputStream stream = new FileOutputStream(file);
- Result out = new StreamResult(new OutputStreamWriter(stream, ENCODING));
- xformer.transform(new DOMSource(doc), out);
- stream.flush();
- stream.close();
- success = true;
-
- } catch (UnsupportedEncodingException e) {
- logger.log(Level.SEVERE, "Should not happen", e);
- } catch (TransformerConfigurationException e) {
- logger.log(Level.SEVERE, "Error writing keyword lists XML", e);
- } catch (TransformerException e) {
- logger.log(Level.SEVERE, "Error writing keyword lists XML", e);
- } catch (FileNotFoundException e) {
- logger.log(Level.SEVERE, "Error writing keyword lists XML: cannot write to file: " + filePath, e);
- } catch (IOException e) {
- logger.log(Level.SEVERE, "Error writing keyword lists XML: cannot write to file: " + filePath, e);
- }
- return success;
- }
}
diff --git a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordsSchema.xsd b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordsSchema.xsd
new file mode 100644
index 0000000000..901b06653c
--- /dev/null
+++ b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordsSchema.xsd
@@ -0,0 +1,58 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/SEUQAMappings.xml b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/SEUQAMappings.xml
index c463d9df41..786192b39b 100644
--- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/SEUQAMappings.xml
+++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/SEUQAMappings.xml
@@ -1,3 +1,4 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/SearchEngineURLQueryAnalyzer.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/SearchEngineURLQueryAnalyzer.java
index 047397f42d..fee36c6501 100644
--- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/SearchEngineURLQueryAnalyzer.java
+++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/SearchEngineURLQueryAnalyzer.java
@@ -19,6 +19,7 @@
package org.sleuthkit.autopsy.recentactivity;
+
import java.io.File;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
@@ -33,7 +34,9 @@ import javax.swing.JPanel;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import org.sleuthkit.autopsy.coreutils.PlatformUtil;
+import org.sleuthkit.autopsy.coreutils.XMLUtil;
import org.sleuthkit.autopsy.ingest.IngestImageWorkerController;
+import org.sleuthkit.autopsy.ingest.IngestModuleAbstract;
import org.sleuthkit.autopsy.ingest.IngestModuleImage;
import org.sleuthkit.autopsy.ingest.IngestModuleInit;
import org.sleuthkit.autopsy.ingest.IngestServices;
@@ -65,13 +68,14 @@ public class SearchEngineURLQueryAnalyzer extends Extract implements IngestModul
public final static String MODULE_VERSION = "1.0";
private String args;
- public static final String XMLFile = "SEUQAMappings.xml";
+ public static final String XMLFILE = "SEUQAMappings.xml";
+ private static final String XSDFILE = "SearchEngineSchema.xsd";
private static String[] searchEngineNames;
- private static SearchEngine[] engines;
+ private static SearchEngineURLQueryAnalyzer.SearchEngine[] engines;
private static Document xmlinput;
- private static final SearchEngine NullEngine = new SearchEngine("NONE", "NONE", new HashMap());
+ private static final SearchEngineURLQueryAnalyzer.SearchEngine NullEngine = new SearchEngineURLQueryAnalyzer.SearchEngine("NONE", "NONE", new HashMap());
//hide public constructor to prevent from instantiation by ingest module loader
@@ -123,10 +127,9 @@ public class SearchEngineURLQueryAnalyzer extends Extract implements IngestModul
}
-
private void createEngines(){
NodeList nlist = xmlinput.getElementsByTagName("SearchEngine");
- SearchEngine[] listEngines = new SearchEngine[nlist.getLength()];
+ SearchEngineURLQueryAnalyzer.SearchEngine[] listEngines = new SearchEngineURLQueryAnalyzer.SearchEngine[nlist.getLength()];
for(int i = 0;i < nlist.getLength(); i++){
try{
NamedNodeMap nnm = nlist.item(i).getAttributes();
@@ -142,7 +145,7 @@ public class SearchEngineURLQueryAnalyzer extends Extract implements IngestModul
}
}
- SearchEngine Se = new SearchEngine(EngineName, EnginedomainSubstring, splits);
+ SearchEngineURLQueryAnalyzer.SearchEngine Se = new SearchEngineURLQueryAnalyzer.SearchEngine(EngineName, EnginedomainSubstring, splits);
System.out.println("Search Engine: " + Se.toString());
listEngines[i] = Se;
}
@@ -162,7 +165,7 @@ public class SearchEngineURLQueryAnalyzer extends Extract implements IngestModul
*
*/
- private static SearchEngine getSearchEngine(String domain){
+ private static SearchEngineURLQueryAnalyzer.SearchEngine getSearchEngine(String domain){
if (engines == null) {
return SearchEngineURLQueryAnalyzer.NullEngine;
}
@@ -196,7 +199,7 @@ public class SearchEngineURLQueryAnalyzer extends Extract implements IngestModul
private String extractSearchEngineQuery(String url){
String x = "NoQuery";
- SearchEngine eng = getSearchEngine(url);
+ SearchEngineURLQueryAnalyzer.SearchEngine eng = getSearchEngine(url);
for(Map.Entry kvp : eng.getSplits()){
if(url.contains(kvp.getKey())){
x = split2(url, kvp.getValue());
@@ -257,7 +260,7 @@ public class SearchEngineURLQueryAnalyzer extends Extract implements IngestModul
long last_accessed = -1;
//from tsk_files
FsContent fs = this.extractFiles(image, "select * from tsk_files where `obj_id` = '" + artifact.getObjectID() + "'").get(0); //associated file
- SearchEngine se = NullEngine;
+ SearchEngineURLQueryAnalyzer.SearchEngine se = NullEngine;
//from blackboard_attributes
Collection listAttributes = currentCase.getSleuthkitCase().getMatchingAttributes("Where `artifact_id` = " + artifact.getArtifactID());
getAttributes:
@@ -322,7 +325,7 @@ public class SearchEngineURLQueryAnalyzer extends Extract implements IngestModul
if (engines == null) {
return total;
}
- for (SearchEngine se : engines) {
+ for (SearchEngineURLQueryAnalyzer.SearchEngine se : engines) {
total+= se.getEngineName() + " : "+ se.getTotal() + "\n";
}
return total;
@@ -330,6 +333,7 @@ public class SearchEngineURLQueryAnalyzer extends Extract implements IngestModul
@Override
public void process(Image image, IngestImageWorkerController controller) {
+ logger.info("LAUNCHING COOKIES, ALL COOKIES ENGAGE.");
this.getURLs(image, controller);
logger.info("Search Engine stats: \n" + getTotals());
}
@@ -338,40 +342,40 @@ public class SearchEngineURLQueryAnalyzer extends Extract implements IngestModul
public void init(IngestModuleInit initContext) {
try{
services = IngestServices.getDefault();
- if(PlatformUtil.extractResourceToUserConfigDir(SearchEngineURLQueryAnalyzer.class, XMLFile)){
- init2();
- }
- else{
- logger.warning("Unable to find " + XMLFile);
- }
+ PlatformUtil.extractResourceToUserConfigDir(SearchEngineURLQueryAnalyzer.class, XMLFILE);
+ init2();
}
-
catch(IOException e){
- logger.log(Level.WARNING, "Unable to find " + XMLFile , e);
+ logger.log(Level.WARNING, "Unable to find " + XMLFILE , e);
}
}
+
- private void init2(){
- try{
- String path = PlatformUtil.getUserConfigDirectory() + File.separator + XMLFile;
- File f = new File(path);
- System.out.println("Load successful");
- DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
- DocumentBuilder db = dbf.newDocumentBuilder();
- Document xml = db.parse(f);
- xmlinput = xml;
- try{
- createEngines();
- getSearchEngineNames();
- }
- catch(Exception e){
- logger.log(Level.WARNING, "Unable to create Search Engines!", e);
- }
+ private void init2() {
+ try {
+ String path = PlatformUtil.getUserConfigDirectory() + File.separator + XMLFILE;
+ File f = new File(path);
+ System.out.println("Load successful");
+ DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
+ DocumentBuilder db = dbf.newDocumentBuilder();
+ Document xml = db.parse(f);
+ xmlinput = xml;
+
+ if (!XMLUtil.xmlIsValid(xml, SearchEngineURLQueryAnalyzer.class, XSDFILE)) {
+ logger.log(Level.WARNING, "Error loading Search Engines: could not validate against [" + XSDFILE + "], results may not be accurate.");
}
- catch(Exception e){
- logger.log(Level.WARNING, "Was not able to load SEUQAMappings.xml", e);
+ try {
+ createEngines();
+ getSearchEngineNames();
+ } catch (Exception e) {
+ logger.log(Level.WARNING, "Unable to create Search Engines!", e);
}
+ } catch (Exception e) {
+ logger.log(Level.WARNING, "Was not able to load SEUQAMappings.xml", e);
+ }
}
+
+
@Override
public void complete() {
@@ -414,8 +418,8 @@ public class SearchEngineURLQueryAnalyzer extends Extract implements IngestModul
}
@Override
- public ModuleType getType() {
- return ModuleType.Image;
+ public IngestModuleAbstract.ModuleType getType() {
+ return IngestModuleAbstract.ModuleType.Image;
}
@Override
diff --git a/thirdparty/crt/x86-32/10.0.40219.1/crt.zip b/thirdparty/crt/x86-32/10.0.40219.1/crt.zip
new file mode 100644
index 0000000000..450e13b8c7
Binary files /dev/null and b/thirdparty/crt/x86-32/10.0.40219.1/crt.zip differ