From d4ef8013ffc76b1ec10cb55a353bdf92bf01ee9d Mon Sep 17 00:00:00 2001 From: Jeff Wallace Date: Thu, 10 Oct 2013 16:12:43 -0400 Subject: [PATCH 1/9] Added new ewfVerify ingest module --- ewfVerify/build.xml | 8 + ewfVerify/manifest.mf | 5 + ewfVerify/nbproject/build-impl.xml | 45 +++++ ewfVerify/nbproject/platform.properties | 120 ++++++++++++ ewfVerify/nbproject/project.properties | 2 + ewfVerify/nbproject/project.xml | 31 ++++ ewfVerify/nbproject/suite.properties | 1 + .../autopsy/ewfverify/Bundle.properties | 1 + .../ewfverify/EwfVerifyIngestModule.java | 173 ++++++++++++++++++ nbproject/project.properties | 95 +++++----- 10 files changed, 435 insertions(+), 46 deletions(-) create mode 100755 ewfVerify/build.xml create mode 100755 ewfVerify/manifest.mf create mode 100755 ewfVerify/nbproject/build-impl.xml create mode 100755 ewfVerify/nbproject/platform.properties create mode 100755 ewfVerify/nbproject/project.properties create mode 100755 ewfVerify/nbproject/project.xml create mode 100755 ewfVerify/nbproject/suite.properties create mode 100755 ewfVerify/src/org/sleuthkit/autopsy/ewfverify/Bundle.properties create mode 100755 ewfVerify/src/org/sleuthkit/autopsy/ewfverify/EwfVerifyIngestModule.java diff --git a/ewfVerify/build.xml b/ewfVerify/build.xml new file mode 100755 index 0000000000..a84a569408 --- /dev/null +++ b/ewfVerify/build.xml @@ -0,0 +1,8 @@ + + + + + + Builds, tests, and runs the project org.sleuthkit.autopsy.ewfverify. + + diff --git a/ewfVerify/manifest.mf b/ewfVerify/manifest.mf new file mode 100755 index 0000000000..bcbc512e12 --- /dev/null +++ b/ewfVerify/manifest.mf @@ -0,0 +1,5 @@ +Manifest-Version: 1.0 +OpenIDE-Module: org.sleuthkit.autopsy.ewfverify +OpenIDE-Module-Localizing-Bundle: org/sleuthkit/autopsy/ewfverify/Bundle.properties +OpenIDE-Module-Specification-Version: 1.0 + diff --git a/ewfVerify/nbproject/build-impl.xml b/ewfVerify/nbproject/build-impl.xml new file mode 100755 index 0000000000..98ad2ccc3b --- /dev/null +++ b/ewfVerify/nbproject/build-impl.xml @@ -0,0 +1,45 @@ + + + + + + + + + + + + + You must set 'suite.dir' to point to your containing module suite + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/ewfVerify/nbproject/platform.properties b/ewfVerify/nbproject/platform.properties new file mode 100755 index 0000000000..e0bdd68b73 --- /dev/null +++ b/ewfVerify/nbproject/platform.properties @@ -0,0 +1,120 @@ +branding.token=autopsy +netbeans-plat-version=7.3.1 +suite.dir=${basedir} +nbplatform.active.dir=${suite.dir}/netbeans-plat/${netbeans-plat-version} +harness.dir=${nbplatform.active.dir}/harness +bootstrap.url=http://deadlock.netbeans.org/hudson/job/nbms-and-javadoc/lastStableBuild/artifact/nbbuild/netbeans/harness/tasks.jar +autoupdate.catalog.url=http://dlc.sun.com.edgesuite.net/netbeans/updates/${netbeans-plat-version}/uc/final/distribution/catalog.xml.gz +cluster.path=\ + ${nbplatform.active.dir}/harness:\ + ${nbplatform.active.dir}/java:\ + ${nbplatform.active.dir}/platform +disabled.modules=\ + org.apache.tools.ant.module,\ + org.netbeans.api.debugger.jpda,\ + org.netbeans.api.java,\ + org.netbeans.lib.nbjavac,\ + org.netbeans.libs.cglib,\ + org.netbeans.libs.javacapi,\ + org.netbeans.libs.javacimpl,\ + org.netbeans.libs.springframework,\ + org.netbeans.modules.ant.browsetask,\ + org.netbeans.modules.ant.debugger,\ + org.netbeans.modules.ant.freeform,\ + org.netbeans.modules.ant.grammar,\ + org.netbeans.modules.ant.kit,\ + org.netbeans.modules.beans,\ + org.netbeans.modules.classfile,\ + org.netbeans.modules.dbschema,\ + org.netbeans.modules.debugger.jpda,\ + org.netbeans.modules.debugger.jpda.ant,\ + org.netbeans.modules.debugger.jpda.kit,\ + org.netbeans.modules.debugger.jpda.projects,\ + org.netbeans.modules.debugger.jpda.ui,\ + org.netbeans.modules.debugger.jpda.visual,\ + org.netbeans.modules.findbugs.installer,\ + org.netbeans.modules.form,\ + org.netbeans.modules.form.binding,\ + org.netbeans.modules.form.j2ee,\ + org.netbeans.modules.form.kit,\ + org.netbeans.modules.form.nb,\ + org.netbeans.modules.form.refactoring,\ + org.netbeans.modules.hibernate,\ + org.netbeans.modules.hibernatelib,\ + org.netbeans.modules.hudson.ant,\ + org.netbeans.modules.hudson.maven,\ + org.netbeans.modules.i18n,\ + org.netbeans.modules.i18n.form,\ + org.netbeans.modules.j2ee.core.utilities,\ + org.netbeans.modules.j2ee.eclipselink,\ + org.netbeans.modules.j2ee.eclipselinkmodelgen,\ + org.netbeans.modules.j2ee.jpa.refactoring,\ + org.netbeans.modules.j2ee.jpa.verification,\ + org.netbeans.modules.j2ee.metadata,\ + org.netbeans.modules.j2ee.metadata.model.support,\ + org.netbeans.modules.j2ee.persistence,\ + org.netbeans.modules.j2ee.persistence.kit,\ + org.netbeans.modules.j2ee.persistenceapi,\ + org.netbeans.modules.java.api.common,\ + org.netbeans.modules.java.debug,\ + org.netbeans.modules.java.editor,\ + org.netbeans.modules.java.editor.lib,\ + org.netbeans.modules.java.examples,\ + org.netbeans.modules.java.freeform,\ + org.netbeans.modules.java.guards,\ + org.netbeans.modules.java.helpset,\ + org.netbeans.modules.java.hints,\ + org.netbeans.modules.java.hints.declarative,\ + org.netbeans.modules.java.hints.declarative.test,\ + org.netbeans.modules.java.hints.legacy.spi,\ + org.netbeans.modules.java.hints.test,\ + org.netbeans.modules.java.hints.ui,\ + org.netbeans.modules.java.j2seplatform,\ + org.netbeans.modules.java.j2seproject,\ + org.netbeans.modules.java.kit,\ + org.netbeans.modules.java.lexer,\ + org.netbeans.modules.java.navigation,\ + org.netbeans.modules.java.platform,\ + org.netbeans.modules.java.preprocessorbridge,\ + org.netbeans.modules.java.project,\ + org.netbeans.modules.java.source,\ + org.netbeans.modules.java.source.ant,\ + org.netbeans.modules.java.source.queries,\ + org.netbeans.modules.java.source.queriesimpl,\ + org.netbeans.modules.java.sourceui,\ + org.netbeans.modules.java.testrunner,\ + org.netbeans.modules.javadoc,\ + org.netbeans.modules.javawebstart,\ + org.netbeans.modules.junit,\ + org.netbeans.modules.maven,\ + org.netbeans.modules.maven.checkstyle,\ + org.netbeans.modules.maven.coverage,\ + org.netbeans.modules.maven.embedder,\ + org.netbeans.modules.maven.grammar,\ + org.netbeans.modules.maven.graph,\ + org.netbeans.modules.maven.hints,\ + org.netbeans.modules.maven.indexer,\ + org.netbeans.modules.maven.junit,\ + org.netbeans.modules.maven.kit,\ + org.netbeans.modules.maven.model,\ + org.netbeans.modules.maven.osgi,\ + org.netbeans.modules.maven.persistence,\ + org.netbeans.modules.maven.refactoring,\ + org.netbeans.modules.maven.repository,\ + org.netbeans.modules.maven.search,\ + org.netbeans.modules.maven.spring,\ + org.netbeans.modules.projectimport.eclipse.core,\ + org.netbeans.modules.projectimport.eclipse.j2se,\ + org.netbeans.modules.refactoring.java,\ + org.netbeans.modules.spellchecker.bindings.java,\ + org.netbeans.modules.spring.beans,\ + org.netbeans.modules.testng,\ + org.netbeans.modules.testng.ant,\ + org.netbeans.modules.testng.maven,\ + org.netbeans.modules.websvc.jaxws21,\ + org.netbeans.modules.websvc.jaxws21api,\ + org.netbeans.modules.websvc.saas.codegen.java,\ + org.netbeans.modules.xml.jaxb,\ + org.netbeans.modules.xml.tools.java,\ + org.netbeans.spi.java.hints + diff --git a/ewfVerify/nbproject/project.properties b/ewfVerify/nbproject/project.properties new file mode 100755 index 0000000000..b0194c4977 --- /dev/null +++ b/ewfVerify/nbproject/project.properties @@ -0,0 +1,2 @@ +javac.source=1.7 +javac.compilerargs=-Xlint -Xlint:-serial diff --git a/ewfVerify/nbproject/project.xml b/ewfVerify/nbproject/project.xml new file mode 100755 index 0000000000..ae8089cea0 --- /dev/null +++ b/ewfVerify/nbproject/project.xml @@ -0,0 +1,31 @@ + + + org.netbeans.modules.apisupport.project + + + org.sleuthkit.autopsy.ewfverify + + + + org.sleuthkit.autopsy.core + + + + 9 + 7.0 + + + + org.sleuthkit.autopsy.corelibs + + + + 3 + 1.1 + + + + + + + diff --git a/ewfVerify/nbproject/suite.properties b/ewfVerify/nbproject/suite.properties new file mode 100755 index 0000000000..364e160e16 --- /dev/null +++ b/ewfVerify/nbproject/suite.properties @@ -0,0 +1 @@ +suite.dir=${basedir}/.. diff --git a/ewfVerify/src/org/sleuthkit/autopsy/ewfverify/Bundle.properties b/ewfVerify/src/org/sleuthkit/autopsy/ewfverify/Bundle.properties new file mode 100755 index 0000000000..3f1d100810 --- /dev/null +++ b/ewfVerify/src/org/sleuthkit/autopsy/ewfverify/Bundle.properties @@ -0,0 +1 @@ +OpenIDE-Module-Name=ewfVerify diff --git a/ewfVerify/src/org/sleuthkit/autopsy/ewfverify/EwfVerifyIngestModule.java b/ewfVerify/src/org/sleuthkit/autopsy/ewfverify/EwfVerifyIngestModule.java new file mode 100755 index 0000000000..891f6ae9ca --- /dev/null +++ b/ewfVerify/src/org/sleuthkit/autopsy/ewfverify/EwfVerifyIngestModule.java @@ -0,0 +1,173 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2013 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.ewfverify; + + +import java.security.MessageDigest; +import java.security.NoSuchAlgorithmException; +import java.util.logging.Level; +import java.util.logging.Logger; +import org.sleuthkit.autopsy.coreutils.StopWatch; +import org.sleuthkit.autopsy.ingest.IngestDataSourceWorkerController; +import org.sleuthkit.autopsy.ingest.IngestMessage; +import org.sleuthkit.autopsy.ingest.IngestMessage.MessageType; +import org.sleuthkit.autopsy.ingest.IngestModuleDataSource; +import org.sleuthkit.autopsy.ingest.IngestModuleInit; +import org.sleuthkit.autopsy.ingest.IngestServices; +import org.sleuthkit.autopsy.ingest.PipelineContext; +import org.sleuthkit.datamodel.Content; +import org.sleuthkit.datamodel.Image; +import org.sleuthkit.datamodel.TskCoreException; +import org.sleuthkit.datamodel.TskData; + +/** + * + * @author jwallace + */ +public class EwfVerifyIngestModule extends IngestModuleDataSource { + private static final String MODULE_NAME = "ewf Verify"; + private static final String MODULE_VERSION = "1.0"; + private static final String MODULE_DESCRIPTION = "Validates the integrity of E01 files."; + private static final long CHUNK_SIZE = 16 * 1024; + private IngestServices services; + private volatile boolean running = false; + private Image img; + private MessageDigest md; + private Logger logger; + private static int messageId = 0; + private volatile boolean cancelled = false; + private boolean verified = false; + + public EwfVerifyIngestModule() { + } + + @Override + public void process(PipelineContext pipelineContext, Content dataSource, IngestDataSourceWorkerController controller) { + try { + img = dataSource.getImage(); + } catch (TskCoreException ex) { + img = null; + logger.log(Level.SEVERE, "Failed to get image from Content.", ex); + services.postMessage(IngestMessage.createMessage(++messageId, MessageType.ERROR, this, "Error processing " + dataSource.getName())); + } + + if (img.getType() != TskData.TSK_IMG_TYPE_ENUM.TSK_IMG_TYPE_EWF_EWF) { + img = null; + // TODO notify? + logger.log(Level.INFO, "Skipping non-ewf image " + img.getName()); + return; + } + + services.postMessage(IngestMessage.createMessage(++messageId, MessageType.INFO, this, "Starting " + dataSource.getName())); + long size = img.getSize(); // size of the image + + // TODO handle size = 0 + + int totalChunks = (int) Math.ceil(size / CHUNK_SIZE); + System.out.println("TOTAL CHUNKS = " + totalChunks); + int read; + + // TODO find an appropriate size for this. + byte[] data; + controller.switchToDeterminate(totalChunks); + + running = true; + StopWatch timer = new StopWatch(); + timer.start(); + for (int i = 0; i < totalChunks; i++) { + if (cancelled) { + timer.stop(); + running = false; + return; + } + data = new byte[ (int) CHUNK_SIZE ]; + try { + read = img.read(data, i * CHUNK_SIZE, CHUNK_SIZE); + } catch (TskCoreException ex) { + services.postMessage(IngestMessage.createMessage(++messageId, MessageType.ERROR, this, "Error processing " + img.getName())); + logger.log(Level.SEVERE, "Error reading from image: " + img.getName(), ex); + } + md.update(data); + controller.progress(i); + } + timer.stop(); + byte[] byteHash = md.digest(); + String hash = bytesToString(byteHash); + System.out.println("MD5 HASH: " + hash); + System.out.println("GENERATING HASH TOOK " + timer.getElapsedTimeSecs() + " SECONDS"); + running = false; + // TODO logic to check if it is verified. + verified = true; + } + + @Override + public void init(IngestModuleInit initContext) { + services = IngestServices.getDefault(); + logger = services.getLogger(this); + try { + md = MessageDigest.getInstance("MD5"); + } catch (NoSuchAlgorithmException ex) { + logger.log(Level.WARNING, "Error getting md5 algorithm", ex); + throw new RuntimeException("Failed to get MD5 algorithm"); + } + cancelled = false; + running = false; + img = null; + } + + @Override + public void complete() { + logger.info("complete() " + this.getName()); + String msg = verified ? " verified." : " not verified."; + services.postMessage(IngestMessage.createMessage(++messageId, MessageType.INFO, this, img.getName() + msg)); + } + + @Override + public void stop() { + cancelled = true; + } + + @Override + public String getName() { + return MODULE_NAME; + } + + @Override + public String getVersion() { + return MODULE_VERSION; + } + + @Override + public String getDescription() { + return MODULE_DESCRIPTION; + } + + @Override + public boolean hasBackgroundJobsRunning() { + return running; + } + + private String bytesToString(byte[] byteHash) { + StringBuilder sb = new StringBuilder(); + for (byte b : byteHash) { + sb.append(String.format("%02x", b&0xff)); + } + return sb.toString(); + } +} diff --git a/nbproject/project.properties b/nbproject/project.properties index e70fffba4b..c9722d329b 100644 --- a/nbproject/project.properties +++ b/nbproject/project.properties @@ -1,46 +1,49 @@ -app.icon=branding/core/core.jar/org/netbeans/core/startup/frame48.gif -### Title of the application -app.title=Autopsy -### lowercase version of above -app.name=autopsy -### if left unset, version will default to today's date -app.version=3.0.7 -### Build type isn't used at this point, but it may be useful -### Must be one of: DEVELOPMENT, RELEASE -build.type=RELEASE -#build.type=DEVELOPMENT -update_versions=false -#custom JVM options -#Note: can be higher on 64 bit systems, should be in sync with build.xml -run.args.extra=-J-Xms24m -J-XX:MaxPermSize=128M -J-Xverify:none -auxiliary.org-netbeans-modules-apisupport-installer.license-type=apache.v2 -auxiliary.org-netbeans-modules-apisupport-installer.os-linux=false -auxiliary.org-netbeans-modules-apisupport-installer.os-macosx=false -auxiliary.org-netbeans-modules-apisupport-installer.os-solaris=false -auxiliary.org-netbeans-modules-apisupport-installer.os-windows=true -auxiliary.org-netbeans-modules-apisupport-installer.pack200-enabled=false -branding.token=${app.name} -modules=\ - ${project.org.sleuthkit.autopsy.keywordsearch}:\ - ${project.org.sleuthkit.autopsy.hashdatabase}:\ - ${project.org.sleuthkit.autopsy.recentactivity}:\ - ${project.org.sleuthkit.autopsy.testing}:\ - ${project.org.sleuthkit.autopsy.thunderbirdparser}:\ - ${project.org.sleuthkit.autopsy.exifparser}:\ - ${project.org.sleuthkit.autopsy.core}:\ - ${project.org.sleuthkit.autopsy.corelibs}:\ - ${project.org.sleuthkit.autopsy.sevenzip}:\ - ${project.org.sleuthkit.autopsy.scalpel}:\ - ${project.org.sleuthkit.autopsy.timeline} -project.org.sleuthkit.autopsy.core=Core -project.org.sleuthkit.autopsy.corelibs=CoreLibs -project.org.sleuthkit.autopsy.hashdatabase=HashDatabase -project.org.sleuthkit.autopsy.keywordsearch=KeywordSearch -project.org.sleuthkit.autopsy.recentactivity=RecentActivity -project.org.sleuthkit.autopsy.testing=Testing -project.org.sleuthkit.autopsy.thunderbirdparser=thunderbirdparser -project.org.sleuthkit.autopsy.exifparser=ExifParser -project.org.sleuthkit.autopsy.sevenzip=SevenZip -project.org.sleuthkit.autopsy.scalpel=ScalpelCarver -project.org.sleuthkit.autopsy.timeline=Timeline - +app.icon=branding/core/core.jar/org/netbeans/core/startup/frame48.gif +### Title of the application +app.title=Autopsy +### lowercase version of above +app.name=autopsy +### if left unset, version will default to today's date +app.version=3.0.7 +### Build type isn't used at this point, but it may be useful +### Must be one of: DEVELOPMENT, RELEASE +build.type=RELEASE +project.org.sleuthkit.autopsy.ewfverify=EWFVerify +#build.type=DEVELOPMENT +update_versions=false +#custom JVM options +#Note: can be higher on 64 bit systems, should be in sync with build.xml +run.args.extra=-J-Xms24m -J-XX:MaxPermSize=128M -J-Xverify:none +auxiliary.org-netbeans-modules-apisupport-installer.license-type=apache.v2 +auxiliary.org-netbeans-modules-apisupport-installer.os-linux=false +auxiliary.org-netbeans-modules-apisupport-installer.os-macosx=false +auxiliary.org-netbeans-modules-apisupport-installer.os-solaris=false +auxiliary.org-netbeans-modules-apisupport-installer.os-windows=true +auxiliary.org-netbeans-modules-apisupport-installer.pack200-enabled=false +branding.token=${app.name} +modules=\ + ${project.org.sleuthkit.autopsy.keywordsearch}:\ + ${project.org.sleuthkit.autopsy.hashdatabase}:\ + ${project.org.sleuthkit.autopsy.recentactivity}:\ + ${project.org.sleuthkit.autopsy.testing}:\ + ${project.org.sleuthkit.autopsy.thunderbirdparser}:\ + ${project.org.sleuthkit.autopsy.exifparser}:\ + ${project.org.sleuthkit.autopsy.core}:\ + ${project.org.sleuthkit.autopsy.corelibs}:\ + ${project.org.sleuthkit.autopsy.sevenzip}:\ + ${project.org.sleuthkit.autopsy.scalpel}:\ + ${project.org.sleuthkit.autopsy.timeline}:\ + ${project.org.sleuthkit.autopsy.ewfverify} +project.org.sleuthkit.autopsy.core=Core +project.org.sleuthkit.autopsy.corelibs=CoreLibs +project.org.sleuthkit.autopsy.hashdatabase=HashDatabase +project.org.sleuthkit.autopsy.keywordsearch=KeywordSearch +project.org.sleuthkit.autopsy.recentactivity=RecentActivity +project.org.sleuthkit.autopsy.testing=Testing +project.org.sleuthkit.autopsy.thunderbirdparser=thunderbirdparser +project.org.sleuthkit.autopsy.exifparser=ExifParser +project.org.sleuthkit.autopsy.sevenzip=SevenZip +project.org.sleuthkit.autopsy.scalpel=ScalpelCarver +project.org.sleuthkit.autopsy.timeline=Timeline +project.org.sleuthkit.autopsy.ewfverify=ewfVerify + From d71dd5f9ad706bce84026291d813e06236786e2b Mon Sep 17 00:00:00 2001 From: Jeff Wallace Date: Wed, 16 Oct 2013 13:08:52 -0400 Subject: [PATCH 2/9] Updated Image Details panel. --- .../autopsy/directorytree/Bundle.properties | 4 + .../ExplorerNodeActionVisitor.java | 7 ++ .../directorytree/ImageDetailsPanel.form | 111 ++++++++++++------ .../directorytree/ImageDetailsPanel.java | 110 ++++++++++++----- 4 files changed, 171 insertions(+), 61 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/directorytree/Bundle.properties b/Core/src/org/sleuthkit/autopsy/directorytree/Bundle.properties index 481f8e930e..f6b85f40fd 100644 --- a/Core/src/org/sleuthkit/autopsy/directorytree/Bundle.properties +++ b/Core/src/org/sleuthkit/autopsy/directorytree/Bundle.properties @@ -61,3 +61,7 @@ TagAndCommentDialog.commentText.toolTipText=Enter an optional tag comment or lea TagAndCommentDialog.commentText.text= TagAndCommentDialog.commentLabel.text=Comment: TagAndCommentDialog.newTagButton.text=New Tag +ImageDetailsPanel.imgTotalSizeValue.text=... +ImageDetailsPanel.imgTotalSizeLabel.text=Total Size: +ImageDetailsPanel.imgHashValue.text=... +ImageDetailsPanel.imgHashLabel.text=Hash Value: diff --git a/Core/src/org/sleuthkit/autopsy/directorytree/ExplorerNodeActionVisitor.java b/Core/src/org/sleuthkit/autopsy/directorytree/ExplorerNodeActionVisitor.java index 72601900a9..a6c16c69fc 100755 --- a/Core/src/org/sleuthkit/autopsy/directorytree/ExplorerNodeActionVisitor.java +++ b/Core/src/org/sleuthkit/autopsy/directorytree/ExplorerNodeActionVisitor.java @@ -249,6 +249,13 @@ public class ExplorerNodeActionVisitor extends ContentVisitor.Default +
@@ -17,31 +17,33 @@ + + + + + + - - - - - - - - - - - - - - - - - - - - - - + + + + + - + + + + + + + + + + + + + + @@ -52,24 +54,39 @@ - - - - - - - + + + + + + + + + + + + + + + + + + + + + + - + - + @@ -134,5 +151,33 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/Core/src/org/sleuthkit/autopsy/directorytree/ImageDetailsPanel.java b/Core/src/org/sleuthkit/autopsy/directorytree/ImageDetailsPanel.java index eeadbee6d4..07241253e6 100644 --- a/Core/src/org/sleuthkit/autopsy/directorytree/ImageDetailsPanel.java +++ b/Core/src/org/sleuthkit/autopsy/directorytree/ImageDetailsPanel.java @@ -55,6 +55,10 @@ class ImageDetailsPanel extends javax.swing.JPanel { imgTypeValue = new javax.swing.JLabel(); imgSectorSizeValue = new javax.swing.JLabel(); OKButton = new javax.swing.JButton(); + imgTotalSizeLabel = new javax.swing.JLabel(); + imgTotalSizeValue = new javax.swing.JLabel(); + imgHashLabel = new javax.swing.JLabel(); + imgHashValue = new javax.swing.JLabel(); imageInfoLabel.setFont(new java.awt.Font("Tahoma", 1, 18)); // NOI18N imageInfoLabel.setText(org.openide.util.NbBundle.getMessage(ImageDetailsPanel.class, "ImageDetailsPanel.imageInfoLabel.text")); // NOI18N @@ -73,30 +77,42 @@ class ImageDetailsPanel extends javax.swing.JPanel { OKButton.setText(org.openide.util.NbBundle.getMessage(ImageDetailsPanel.class, "ImageDetailsPanel.OKButton.text")); // NOI18N + imgTotalSizeLabel.setText(org.openide.util.NbBundle.getMessage(ImageDetailsPanel.class, "ImageDetailsPanel.imgTotalSizeLabel.text")); // NOI18N + + imgTotalSizeValue.setText(org.openide.util.NbBundle.getMessage(ImageDetailsPanel.class, "ImageDetailsPanel.imgTotalSizeValue.text")); // NOI18N + + imgHashLabel.setText(org.openide.util.NbBundle.getMessage(ImageDetailsPanel.class, "ImageDetailsPanel.imgHashLabel.text")); // NOI18N + + imgHashValue.setText(org.openide.util.NbBundle.getMessage(ImageDetailsPanel.class, "ImageDetailsPanel.imgHashValue.text")); // NOI18N + javax.swing.GroupLayout layout = new javax.swing.GroupLayout(this); this.setLayout(layout); layout.setHorizontalGroup( layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(layout.createSequentialGroup() + .addGap(0, 68, Short.MAX_VALUE) + .addComponent(imageInfoLabel) + .addContainerGap(78, Short.MAX_VALUE)) + .addGroup(layout.createSequentialGroup() + .addContainerGap(javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) - .addGroup(layout.createSequentialGroup() - .addGap(89, 89, 89) - .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) - .addComponent(imgNameLabel) - .addComponent(imgTypeLabel) - .addComponent(imgSectorSizeLabel)) - .addGap(29, 29, 29) - .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) - .addComponent(imgNameValue) - .addComponent(imgTypeValue) - .addComponent(imgSectorSizeValue))) - .addGroup(layout.createSequentialGroup() - .addGap(118, 118, 118) - .addComponent(OKButton, javax.swing.GroupLayout.PREFERRED_SIZE, 80, javax.swing.GroupLayout.PREFERRED_SIZE)) - .addGroup(layout.createSequentialGroup() - .addGap(71, 71, 71) - .addComponent(imageInfoLabel))) - .addContainerGap(75, Short.MAX_VALUE)) + .addComponent(imgNameLabel) + .addComponent(imgTypeLabel) + .addComponent(imgSectorSizeLabel) + .addComponent(imgTotalSizeLabel) + .addComponent(imgHashLabel)) + .addGap(18, 18, 18) + .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) + .addComponent(imgNameValue) + .addComponent(imgTypeValue) + .addComponent(imgSectorSizeValue) + .addComponent(imgTotalSizeValue) + .addComponent(imgHashValue)) + .addContainerGap(javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)) + .addGroup(layout.createSequentialGroup() + .addContainerGap(javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) + .addComponent(OKButton, javax.swing.GroupLayout.PREFERRED_SIZE, 80, javax.swing.GroupLayout.PREFERRED_SIZE) + .addContainerGap(javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)) ); layout.setVerticalGroup( layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) @@ -105,21 +121,33 @@ class ImageDetailsPanel extends javax.swing.JPanel { .addComponent(imageInfoLabel) .addGap(18, 18, 18) .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.TRAILING) - .addGroup(layout.createSequentialGroup() - .addComponent(imgNameLabel) - .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED) - .addComponent(imgTypeLabel) - .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED) - .addComponent(imgSectorSizeLabel)) .addGroup(layout.createSequentialGroup() .addComponent(imgNameValue) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED) .addComponent(imgTypeValue) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED) - .addComponent(imgSectorSizeValue))) - .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED, 29, Short.MAX_VALUE) + .addComponent(imgSectorSizeValue) + .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED) + .addComponent(imgTotalSizeValue) + .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED) + .addComponent(imgHashValue)) + .addGroup(layout.createSequentialGroup() + .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.TRAILING) + .addGroup(layout.createSequentialGroup() + .addComponent(imgNameLabel) + .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED) + .addComponent(imgTypeLabel) + .addGap(25, 25, 25)) + .addGroup(layout.createSequentialGroup() + .addGap(50, 50, 50) + .addComponent(imgSectorSizeLabel))) + .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED) + .addComponent(imgTotalSizeLabel) + .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED) + .addComponent(imgHashLabel))) + .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED, 64, Short.MAX_VALUE) .addComponent(OKButton) - .addGap(22, 22, 22)) + .addContainerGap()) ); }// //GEN-END:initComponents @@ -127,10 +155,14 @@ class ImageDetailsPanel extends javax.swing.JPanel { // Variables declaration - do not modify//GEN-BEGIN:variables private javax.swing.JButton OKButton; private javax.swing.JLabel imageInfoLabel; + private javax.swing.JLabel imgHashLabel; + private javax.swing.JLabel imgHashValue; private javax.swing.JLabel imgNameLabel; private javax.swing.JLabel imgNameValue; private javax.swing.JLabel imgSectorSizeLabel; private javax.swing.JLabel imgSectorSizeValue; + private javax.swing.JLabel imgTotalSizeLabel; + private javax.swing.JLabel imgTotalSizeValue; private javax.swing.JLabel imgTypeLabel; private javax.swing.JLabel imgTypeValue; // End of variables declaration//GEN-END:variables @@ -155,14 +187,36 @@ class ImageDetailsPanel extends javax.swing.JPanel { } /** - * Sets the image size value on this panel. + * Sets the image sector size value on this panel. * * @param arg the new image size value */ public void setImgSectorSizeValue(String arg){ imgSectorSizeValue.setText(arg); } + + /** + * Sets the image size value on this panel. + * + * @param arg the new image size value + */ + public void setImgTotalSizeValue(String arg) { + imgTotalSizeValue.setText(arg); + } + + /** + * Sets the image hash value on this panel. + * + * @param arg the new image size value + */ + public void setImgHashValue(String arg) { + imgHashValue.setText(arg); + } + public void setVisibleHashInfo(boolean visible) { + imgHashLabel.setVisible(visible); + imgHashValue.setVisible(visible); + } /** * Sets the OK button action listener. * From 82a43bdf03359705372f0e2bf875e04377f5632c Mon Sep 17 00:00:00 2001 From: Jeff Wallace Date: Wed, 16 Oct 2013 13:09:55 -0400 Subject: [PATCH 3/9] Added verification after processing data is completed, fixed cancellation bug. --- .../ewfverify/EwfVerifyIngestModule.java | 101 ++++++++++++------ 1 file changed, 66 insertions(+), 35 deletions(-) diff --git a/ewfVerify/src/org/sleuthkit/autopsy/ewfverify/EwfVerifyIngestModule.java b/ewfVerify/src/org/sleuthkit/autopsy/ewfverify/EwfVerifyIngestModule.java index 891f6ae9ca..f7f8c8c1ab 100755 --- a/ewfVerify/src/org/sleuthkit/autopsy/ewfverify/EwfVerifyIngestModule.java +++ b/ewfVerify/src/org/sleuthkit/autopsy/ewfverify/EwfVerifyIngestModule.java @@ -23,6 +23,8 @@ import java.security.MessageDigest; import java.security.NoSuchAlgorithmException; import java.util.logging.Level; import java.util.logging.Logger; +import javax.xml.bind.DatatypeConverter; +import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.coreutils.StopWatch; import org.sleuthkit.autopsy.ingest.IngestDataSourceWorkerController; import org.sleuthkit.autopsy.ingest.IngestMessage; @@ -33,6 +35,7 @@ import org.sleuthkit.autopsy.ingest.IngestServices; import org.sleuthkit.autopsy.ingest.PipelineContext; import org.sleuthkit.datamodel.Content; import org.sleuthkit.datamodel.Image; +import org.sleuthkit.datamodel.SleuthkitCase; import org.sleuthkit.datamodel.TskCoreException; import org.sleuthkit.datamodel.TskData; @@ -44,82 +47,117 @@ public class EwfVerifyIngestModule extends IngestModuleDataSource { private static final String MODULE_NAME = "ewf Verify"; private static final String MODULE_VERSION = "1.0"; private static final String MODULE_DESCRIPTION = "Validates the integrity of E01 files."; - private static final long CHUNK_SIZE = 16 * 1024; + private static final long DEFAULT_CHUNK_SIZE = 32 * 1024; private IngestServices services; private volatile boolean running = false; private Image img; + private String imgName; private MessageDigest md; - private Logger logger; + private static Logger logger = null; private static int messageId = 0; private volatile boolean cancelled = false; private boolean verified = false; + private SleuthkitCase skCase; public EwfVerifyIngestModule() { } @Override public void process(PipelineContext pipelineContext, Content dataSource, IngestDataSourceWorkerController controller) { + imgName = dataSource.getName(); try { img = dataSource.getImage(); } catch (TskCoreException ex) { img = null; logger.log(Level.SEVERE, "Failed to get image from Content.", ex); - services.postMessage(IngestMessage.createMessage(++messageId, MessageType.ERROR, this, "Error processing " + dataSource.getName())); + services.postMessage(IngestMessage.createMessage(++messageId, MessageType.ERROR, this, + "Error processing " + imgName)); + return; } + // Skip images that are not E01 if (img.getType() != TskData.TSK_IMG_TYPE_ENUM.TSK_IMG_TYPE_EWF_EWF) { img = null; - // TODO notify? logger.log(Level.INFO, "Skipping non-ewf image " + img.getName()); + services.postMessage(IngestMessage.createMessage(++messageId, MessageType.INFO, this, + "Skipping non-ewf image " + imgName)); + return; + } + + // Get the hash stored in the E01 file from the database + String storedHash = ""; + if (skCase.imageHasHash(img)) { + try { + storedHash = skCase.getImageHash(img).toLowerCase(); + } catch (TskCoreException ex) { + logger.log(Level.SEVERE, "Failed to get stored hash from image " + imgName, ex); + services.postMessage(IngestMessage.createMessage(++messageId, MessageType.ERROR, this, + "Error retrieving stored hash value from " + imgName)); + return; + } + } else { + services.postMessage(IngestMessage.createMessage(++messageId, MessageType.ERROR, this, + "Image " + imgName + " does not have stored hash.")); return; } - services.postMessage(IngestMessage.createMessage(++messageId, MessageType.INFO, this, "Starting " + dataSource.getName())); - long size = img.getSize(); // size of the image + logger.log(Level.INFO, "Starting ewf verification of " + img.getName()); + services.postMessage(IngestMessage.createMessage(++messageId, MessageType.INFO, this, + "Starting " + imgName)); - // TODO handle size = 0 + long size = img.getSize(); + if (size == 0) { + logger.log(Level.WARNING, "Size of image " + imgName + " was 0 when queried."); + services.postMessage(IngestMessage.createMessage(++messageId, MessageType.ERROR, this, + "Error getting size of " + imgName + ". Image will not be processed.")); + } - int totalChunks = (int) Math.ceil(size / CHUNK_SIZE); - System.out.println("TOTAL CHUNKS = " + totalChunks); + // Libewf uses a sector size of 64 times the sector size, which is the + // motivation for using it here. + long chunkSize = 64 * img.getSsize(); + chunkSize = (chunkSize == 0) ? DEFAULT_CHUNK_SIZE : chunkSize; + + int totalChunks = (int) Math.ceil(size / chunkSize); + logger.log(Level.INFO, "Total chunks = " + totalChunks); int read; - // TODO find an appropriate size for this. byte[] data; controller.switchToDeterminate(totalChunks); running = true; - StopWatch timer = new StopWatch(); - timer.start(); + // Read in byte size chunks and update the hash value with the data. for (int i = 0; i < totalChunks; i++) { - if (cancelled) { - timer.stop(); + if (controller.isCancelled()) { running = false; return; } - data = new byte[ (int) CHUNK_SIZE ]; + data = new byte[ (int) chunkSize ]; try { - read = img.read(data, i * CHUNK_SIZE, CHUNK_SIZE); + read = img.read(data, i * chunkSize, chunkSize); } catch (TskCoreException ex) { services.postMessage(IngestMessage.createMessage(++messageId, MessageType.ERROR, this, "Error processing " + img.getName())); - logger.log(Level.SEVERE, "Error reading from image: " + img.getName(), ex); + logger.log(Level.SEVERE, "Error reading from image: " + imgName, ex); + return; } md.update(data); controller.progress(i); } - timer.stop(); - byte[] byteHash = md.digest(); - String hash = bytesToString(byteHash); - System.out.println("MD5 HASH: " + hash); - System.out.println("GENERATING HASH TOOK " + timer.getElapsedTimeSecs() + " SECONDS"); + + // Finish generating the hash and get it as a string value + String hash = DatatypeConverter.printHexBinary(md.digest()).toLowerCase(); +// String hash = bytesToString(md.digest()); + verified = hash.equals(storedHash); + logger.log(Level.INFO, "Calculated MD5 hash: " + hash); running = false; - // TODO logic to check if it is verified. - verified = true; } @Override public void init(IngestModuleInit initContext) { services = IngestServices.getDefault(); - logger = services.getLogger(this); + skCase = Case.getCurrentCase().getSleuthkitCase(); + if (logger == null) { + logger = services.getLogger(this); + } try { md = MessageDigest.getInstance("MD5"); } catch (NoSuchAlgorithmException ex) { @@ -129,18 +167,19 @@ public class EwfVerifyIngestModule extends IngestModuleDataSource { cancelled = false; running = false; img = null; + imgName = ""; } @Override public void complete() { logger.info("complete() " + this.getName()); String msg = verified ? " verified." : " not verified."; - services.postMessage(IngestMessage.createMessage(++messageId, MessageType.INFO, this, img.getName() + msg)); + services.postMessage(IngestMessage.createMessage(++messageId, MessageType.INFO, this, imgName + msg)); + logger.info(imgName + msg); } @Override public void stop() { - cancelled = true; } @Override @@ -162,12 +201,4 @@ public class EwfVerifyIngestModule extends IngestModuleDataSource { public boolean hasBackgroundJobsRunning() { return running; } - - private String bytesToString(byte[] byteHash) { - StringBuilder sb = new StringBuilder(); - for (byte b : byteHash) { - sb.append(String.format("%02x", b&0xff)); - } - return sb.toString(); - } } From b5a8a07ff1fc094d83d6d329ce388f5f4913df2a Mon Sep 17 00:00:00 2001 From: Jeff Wallace Date: Wed, 16 Oct 2013 16:04:54 -0400 Subject: [PATCH 4/9] Added better logging and reporting. --- .../ewfverify/EwfVerifyIngestModule.java | 70 ++++++++++++------- 1 file changed, 45 insertions(+), 25 deletions(-) diff --git a/ewfVerify/src/org/sleuthkit/autopsy/ewfverify/EwfVerifyIngestModule.java b/ewfVerify/src/org/sleuthkit/autopsy/ewfverify/EwfVerifyIngestModule.java index f7f8c8c1ab..a0da52a378 100755 --- a/ewfVerify/src/org/sleuthkit/autopsy/ewfverify/EwfVerifyIngestModule.java +++ b/ewfVerify/src/org/sleuthkit/autopsy/ewfverify/EwfVerifyIngestModule.java @@ -25,7 +25,6 @@ import java.util.logging.Level; import java.util.logging.Logger; import javax.xml.bind.DatatypeConverter; import org.sleuthkit.autopsy.casemodule.Case; -import org.sleuthkit.autopsy.coreutils.StopWatch; import org.sleuthkit.autopsy.ingest.IngestDataSourceWorkerController; import org.sleuthkit.autopsy.ingest.IngestMessage; import org.sleuthkit.autopsy.ingest.IngestMessage.MessageType; @@ -40,7 +39,9 @@ import org.sleuthkit.datamodel.TskCoreException; import org.sleuthkit.datamodel.TskData; /** - * + * Data Source Ingest Module that generates a hash of an E01 image file and + * verifies it with the value stored in the image. + * * @author jwallace */ public class EwfVerifyIngestModule extends IngestModuleDataSource { @@ -52,11 +53,13 @@ public class EwfVerifyIngestModule extends IngestModuleDataSource { private volatile boolean running = false; private Image img; private String imgName; - private MessageDigest md; + private MessageDigest messageDigest; private static Logger logger = null; private static int messageId = 0; - private volatile boolean cancelled = false; private boolean verified = false; + private boolean skipped = false; + private String calculatedHash = ""; + private String storedHash = ""; private SleuthkitCase skCase; public EwfVerifyIngestModule() { @@ -78,17 +81,18 @@ public class EwfVerifyIngestModule extends IngestModuleDataSource { // Skip images that are not E01 if (img.getType() != TskData.TSK_IMG_TYPE_ENUM.TSK_IMG_TYPE_EWF_EWF) { img = null; - logger.log(Level.INFO, "Skipping non-ewf image " + img.getName()); + logger.log(Level.INFO, "Skipping non-ewf image " + imgName); services.postMessage(IngestMessage.createMessage(++messageId, MessageType.INFO, this, "Skipping non-ewf image " + imgName)); + skipped = true; return; } // Get the hash stored in the E01 file from the database - String storedHash = ""; if (skCase.imageHasHash(img)) { try { storedHash = skCase.getImageHash(img).toLowerCase(); + logger.info("Hash value stored in " + imgName + ": " + storedHash); } catch (TskCoreException ex) { logger.log(Level.SEVERE, "Failed to get stored hash from image " + imgName, ex); services.postMessage(IngestMessage.createMessage(++messageId, MessageType.ERROR, this, @@ -135,19 +139,19 @@ public class EwfVerifyIngestModule extends IngestModuleDataSource { try { read = img.read(data, i * chunkSize, chunkSize); } catch (TskCoreException ex) { - services.postMessage(IngestMessage.createMessage(++messageId, MessageType.ERROR, this, "Error processing " + img.getName())); - logger.log(Level.SEVERE, "Error reading from image: " + imgName, ex); + String msg = "Error reading " + imgName + " at chunk " + i; + services.postMessage(IngestMessage.createMessage(++messageId, MessageType.ERROR, this, msg)); + logger.log(Level.SEVERE, msg, ex); return; } - md.update(data); + messageDigest.update(data); controller.progress(i); } // Finish generating the hash and get it as a string value - String hash = DatatypeConverter.printHexBinary(md.digest()).toLowerCase(); -// String hash = bytesToString(md.digest()); - verified = hash.equals(storedHash); - logger.log(Level.INFO, "Calculated MD5 hash: " + hash); + calculatedHash = DatatypeConverter.printHexBinary(messageDigest.digest()).toLowerCase(); + verified = calculatedHash.equals(storedHash); + logger.info("Hash calculated from " + imgName + ": " + calculatedHash); running = false; } @@ -155,31 +159,47 @@ public class EwfVerifyIngestModule extends IngestModuleDataSource { public void init(IngestModuleInit initContext) { services = IngestServices.getDefault(); skCase = Case.getCurrentCase().getSleuthkitCase(); + running = false; + verified = false; + skipped = false; + img = null; + imgName = ""; + storedHash = ""; + calculatedHash = ""; + if (logger == null) { logger = services.getLogger(this); } - try { - md = MessageDigest.getInstance("MD5"); - } catch (NoSuchAlgorithmException ex) { - logger.log(Level.WARNING, "Error getting md5 algorithm", ex); - throw new RuntimeException("Failed to get MD5 algorithm"); + + if (messageDigest == null) { + try { + messageDigest = MessageDigest.getInstance("MD5"); + } catch (NoSuchAlgorithmException ex) { + logger.log(Level.WARNING, "Error getting md5 algorithm", ex); + throw new RuntimeException("Failed to get MD5 algorithm"); + } + } else { + messageDigest.reset(); } - cancelled = false; - running = false; - img = null; - imgName = ""; } @Override public void complete() { logger.info("complete() " + this.getName()); - String msg = verified ? " verified." : " not verified."; - services.postMessage(IngestMessage.createMessage(++messageId, MessageType.INFO, this, imgName + msg)); - logger.info(imgName + msg); + if (skipped == false) { + String msg = verified ? " verified" : " not verified"; + String extra = "

EWF Verification Results for " + imgName + "

"; + extra += "
  • Result:" + msg + "
  • "; + extra += "
  • Calculated hash: " + calculatedHash + "
  • "; + extra += "
  • Stored hash: " + storedHash + "
  • "; + services.postMessage(IngestMessage.createMessage(++messageId, MessageType.INFO, this, imgName + msg, extra)); + logger.info(imgName + msg); + } } @Override public void stop() { + running = false; } @Override From 02257a09c095098a1a922538907407e60b4ae113 Mon Sep 17 00:00:00 2001 From: Jeff Wallace Date: Thu, 17 Oct 2013 09:30:35 -0400 Subject: [PATCH 5/9] Fixed error made during merge conflict. --- nbproject/project.properties | 1 + 1 file changed, 1 insertion(+) diff --git a/nbproject/project.properties b/nbproject/project.properties index 1d1b769dc3..7f25b50edc 100644 --- a/nbproject/project.properties +++ b/nbproject/project.properties @@ -44,4 +44,5 @@ project.org.sleuthkit.autopsy.exifparser=ExifParser project.org.sleuthkit.autopsy.sevenzip=SevenZip project.org.sleuthkit.autopsy.scalpel=ScalpelCarver project.org.sleuthkit.autopsy.timeline=Timeline +project.org.sleuthkit.autopsy.ewfverify=ewfVerify From fe72573c2ae6bcde6381d1f00590bb0051c9eccc Mon Sep 17 00:00:00 2001 From: Jeff Wallace Date: Wed, 30 Oct 2013 15:51:33 -0400 Subject: [PATCH 6/9] Changed module display name and matched version number with autopsy's --- .../sleuthkit/autopsy/ewfverify/EwfVerifyIngestModule.java | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/ewfVerify/src/org/sleuthkit/autopsy/ewfverify/EwfVerifyIngestModule.java b/ewfVerify/src/org/sleuthkit/autopsy/ewfverify/EwfVerifyIngestModule.java index a0da52a378..256980d35e 100755 --- a/ewfVerify/src/org/sleuthkit/autopsy/ewfverify/EwfVerifyIngestModule.java +++ b/ewfVerify/src/org/sleuthkit/autopsy/ewfverify/EwfVerifyIngestModule.java @@ -25,6 +25,7 @@ import java.util.logging.Level; import java.util.logging.Logger; import javax.xml.bind.DatatypeConverter; import org.sleuthkit.autopsy.casemodule.Case; +import org.sleuthkit.autopsy.coreutils.Version; import org.sleuthkit.autopsy.ingest.IngestDataSourceWorkerController; import org.sleuthkit.autopsy.ingest.IngestMessage; import org.sleuthkit.autopsy.ingest.IngestMessage.MessageType; @@ -45,8 +46,8 @@ import org.sleuthkit.datamodel.TskData; * @author jwallace */ public class EwfVerifyIngestModule extends IngestModuleDataSource { - private static final String MODULE_NAME = "ewf Verify"; - private static final String MODULE_VERSION = "1.0"; + private static final String MODULE_NAME = "EWF Verify"; + private static final String MODULE_VERSION = Version.getVersion(); private static final String MODULE_DESCRIPTION = "Validates the integrity of E01 files."; private static final long DEFAULT_CHUNK_SIZE = 32 * 1024; private IngestServices services; From d414130570b6f6509a8418139a626f7df885b008 Mon Sep 17 00:00:00 2001 From: Jeff Wallace Date: Thu, 12 Dec 2013 14:53:45 -0500 Subject: [PATCH 7/9] line endings --- ExifParser/manifest.mf | 12 +- HashDatabase/manifest.mf | 14 +- docs/QuickStartGuide/index.html | 442 ++++++++++++++++---------------- docs/doxygen/needs_a_home.dox | 60 ++--- docs/doxygen/workflow.dox | 106 ++++---- 5 files changed, 317 insertions(+), 317 deletions(-) diff --git a/ExifParser/manifest.mf b/ExifParser/manifest.mf index dbf05fee2f..44ad288f51 100644 --- a/ExifParser/manifest.mf +++ b/ExifParser/manifest.mf @@ -1,6 +1,6 @@ -Manifest-Version: 1.0 -AutoUpdate-Show-In-Client: true -OpenIDE-Module: org.sleuthkit.autopsy.exifparser/3 -OpenIDE-Module-Implementation-Version: 9 -OpenIDE-Module-Layer: org/sleuthkit/autopsy/exifparser/layer.xml -OpenIDE-Module-Localizing-Bundle: org/sleuthkit/autopsy/exifparser/Bundle.properties +Manifest-Version: 1.0 +AutoUpdate-Show-In-Client: true +OpenIDE-Module: org.sleuthkit.autopsy.exifparser/3 +OpenIDE-Module-Implementation-Version: 9 +OpenIDE-Module-Layer: org/sleuthkit/autopsy/exifparser/layer.xml +OpenIDE-Module-Localizing-Bundle: org/sleuthkit/autopsy/exifparser/Bundle.properties diff --git a/HashDatabase/manifest.mf b/HashDatabase/manifest.mf index ba201a294c..b8c105413c 100644 --- a/HashDatabase/manifest.mf +++ b/HashDatabase/manifest.mf @@ -1,7 +1,7 @@ -Manifest-Version: 1.0 -AutoUpdate-Show-In-Client: true -OpenIDE-Module: org.sleuthkit.autopsy.hashdatabase/3 -OpenIDE-Module-Implementation-Version: 9 -OpenIDE-Module-Layer: org/sleuthkit/autopsy/hashdatabase/layer.xml -OpenIDE-Module-Localizing-Bundle: org/sleuthkit/autopsy/hashdatabase/Bundle.properties - +Manifest-Version: 1.0 +AutoUpdate-Show-In-Client: true +OpenIDE-Module: org.sleuthkit.autopsy.hashdatabase/3 +OpenIDE-Module-Implementation-Version: 9 +OpenIDE-Module-Layer: org/sleuthkit/autopsy/hashdatabase/layer.xml +OpenIDE-Module-Localizing-Bundle: org/sleuthkit/autopsy/hashdatabase/Bundle.properties + diff --git a/docs/QuickStartGuide/index.html b/docs/QuickStartGuide/index.html index 7fe6d0867b..7bafa3b452 100644 --- a/docs/QuickStartGuide/index.html +++ b/docs/QuickStartGuide/index.html @@ -1,221 +1,221 @@ - - - - - - Autopsy 3 Quick Start Guide - - - -

    Autopsy 3 Quick Start Guide

    -

    June 2013

    -

    www.sleuthkit.org/autopsy/

    - - -

    Installation

    -

    - The current version of Autopsy 3 runs only on Microsoft Windows. - We have gotten it to run on other platforms, such as Linux and OS X, but we do not have it in a state that makes it easy to distribute and find the needed libraries. -

    -

    - The Windows installer will make a directory for Autopsy and place all of the needed files inside of it. - The installer includes all dependencies, including Sleuth Kit and Java. -

    -

    Note that Autopsy 3 is a complete rewrite from Autopsy 2 and none of this document is relevant to Autopsy 2.

    - -

    Adding a Data Source (image, local disk, logical files)

    -

    - Data sources are added to a case. A case can have a single data source or it can have multiple data source if they are related. - Currently, a single report is generated for an entire case, so if you need to report on individual data sources, then you should use one data source per case. -

    - -

    Creating a Case

    -

    - To create a case, use either the "Create New Case" option on the Welcome screen or from the "File" menu. - This will start the New Case Wizard. You will need to supply it with the name of the case and a directory to store the case results into. - You can optionally provide case numbers and other details. -

    - - -

    Adding a Data Source

    -

    - The next step is to add input data source to the case. - The Add Data Source Wizard will start automatically after the case is created or you can manually start it from the "File" menu or toolbar. - You will need to choose the type of input data source to add (image, local disk or logical files and folders). - Next, supply it with the location of the source to add. -

    -
      -
    • For a disk image, browse to the first file in the set (Autopsy will find the rest of the files). Autopsy currently supports E01 and raw (dd) files. -
    • -
    • - For local disk, select one of the detected disks. - Autopsy will add the current view of the disk to the case (i.e. snapshot of the meta-data). - However, the individual file content (not meta-data) does get updated with the changes made to the disk. - Note, you may need run Autopsy as an Administrator to detect all disks. -
    • -
    • For logical files (a single file or folder of files), use the "Add" button to add one or more files or folders on your system to the case. Folders will be recursively added to the case.
    • -
    - - -

    - There are a couple of options in the wizard that will allow you to make the ingest process faster. - These typically deal with deleted files. - It will take longer if unallocated space is analyzed and the entire drive is searched for deleted files. - In some scenarios, these recovery steps must be performed and in other scenarios these steps are not needed and instead fast results on the allocated files are needed. - Use these options to control how long the analysis will take. -

    - -

    - Autopsy will start to analyze these data sources and add them to the case and internal database. While it is doing that, it will prompt you to configure the Ingest Modules.

    - - -

    Ingest Modules

    -

    - You will next be prompted to configure the Ingest Modules. - Ingest modules will run in the background and perform specific tasks. - The Ingest Modules analyze files in a prioritized order so that files in a user's directory are analyzed before files in other folders. - Ingest modules can be developed by third-parties and here are some of the standard ingest modules that come with Autopsy: -

    -
      -
    • Recent Activity - extracts user activity as saved by web browsers and the OS. Also runs regripper on the registry hive. -
    • -
    • Hash Lookup - uses hash databases to ignore known files from the NIST NSRL and flag known bad files. - Use the "Advanced" button to add and configure the hash databases to use during this process. - You will get updates on known bad file hits as the ingest occurs. You can later add hash databases - via the Tools -> Options menu in the main UI. You can download an index of the NIST NSRL from - here. -
    • -
    • Keyword Search - uses keyword lists to identify files with specific words in them. - You can select the keyword lists to search for automatically and you can create new lists using the "Advanced" button. - Note that with keyword search, you can always conduct searches after ingest has finished. - The keyword lists that you select during ingest will be searched for at periodic intervals and you will get the results in real-time. - You do not need to wait for all files to be indexed. -
    • -
    • Archive Extractor opens ZIP, RAR, and other archive formats and sends the files from those archive files back - through the pipelines for analysis.
    • -
    • Exif Image Parser extracts EXIF information from JPEG files and posts the results into the tree in the main UI.
    • -
    • Thunderbird Parser Identifies Thunderbird MBOX files and extracts the e-mails from them.
    • -
    -

    - When you select a module, you will have the option to change its settings. - For example, you can configure which keyword search lists to use during ingest and which hash databases to use. - Refer to the help system inside of Autopsy for details on configuring each module. -

    -

    - While ingest modules are running in the background, you will see a progress bar in the lower right. - You can use the GUI to review incoming results and perform other tasks while ingest at that time. -

    - - -

    Analysis Basics

    - Autopsy Screenshot -

    You will start all of your analysis techniques from the tree on the left.

    -
      -
    • The Data Sources root node shows all data in the case.
    • -
        -
      • The individual image nodes show the file system structure of the disk images or local disks in the case.
      • -
      • The LogicalFileSet nodes show the logical files in the case.
      • -
      -
    • The Views node shows the same data from a file type or timeline perspective.
    • -
    • The Results node shows the output from the ingest modules.
    • -
    - -

    - When you select a node from the tree on the left, a list of files will be shown in the upper right. - You can use the Thumbnail view in the upper right to view the pictures. - When you select a file from the upper right, its contents will be shown in the lower right. - You can use the tabs in the lower right to view the text of the file, an image, or the hex data. -

    - -

    - If you are viewing files from the Views and Results nodes, you can right-click on a file to go to its file system location. - This feature is useful to see what else the user stored in the same folder as the file that you are currently looking at. - You can also right click on a file to extract it to the local system. -

    -

    - If you want to search for single keywords, then you can use the search box in the upper right of the program. - The results will be shown in a table in the upper right. -

    - -

    You can tag (or bookmark) arbitrary files so that you can more quickly find them later or so that you can include them specifically in a report.

    - -

    Ingest Inbox

    -

    - As you are going through the results in the tree, the ingest modules are running in the background. - The results are shown in the tree as soon as the ingest modules find them and report them. -

    -

    - The Ingest Inbox receives messages from the ingest modules as they find results. - You can open the inbox to see what has been recently found. - It keeps track of what messages you have read. -

    -

    - The intended use of this inbox is that you can focus on some data for a while and then check back on the inbox at a time that is convenient for them. - You can then see what else was found while you were focused on the previous task. - You may learn that a known bad file was found or that a file was found with a relevant keyword and then decide to focus on that for a while. -

    -

    When you select a message, you can then jump to the Results tree where more details can be found or jump to the file's location in the filesystem.

    - -

    Timeline (Beta)

    -

    There is a basic timeline view that you can access via the Tools -> Make Timeline feature. This will take a few minutes to create the timeline for analysis. Its features are still in development.

    - - -

    Example Use Cases

    -

    In this section, we will provide examples of how to do common analysis tasks.

    - -

    Web Artifacts

    -

    - If you want to view the user's recent web activity, make sure that the Recent Activity ingest module was enabled. - You can then go to the "Results " node in the tree on the left and then into the "Extracted Data" node. - There, you can find bookmarks, cookies, downloads, and history. -

    - -

    Known Bad Hash Files

    -

    - If you want to see if the data source had known bad files, make sure that the Hash Lookup ingest module was enabled. - You can then view the "Hashset Hits" section in the "Results" area of the tree on the left. - Note that hash lookup can take a long time, so this section will be updated as long as the ingest process is occurring. - Use the Ingest Inbox to keep track of what known bad files were recently found. -

    -

    - When you find a known bad file in this interface, you may want to right click on the file to also view the file's original location. - You may find additional files that are relevant and stored in the same folder as this file. -

    - -

    Media: Images and Videos

    -

    - If you want to see all images and video on the disk image, then go to the "Views" section in the tree on the left and then "File Types". - Select either "Images" or "Videos". - You can use the thumbnail option in the upper right to view thumbnails of all images. -

    -
      -
    • Note: - We are working on making this more efficient when there are lots of images and we are working on the feature to display video thumbnails. -
    • -
    -

    You can select an image or video from the upper right and view the video or image in the lower right. Video will be played with sound.

    - - -

    Reporting

    -

    - A final report can be generated that will include all analysis results. - Use the "Generate Report" button to create this. - It will create an HTML or XLS report in the Reports folder of the case folder. - If you forgot the location of your case folder, you can determine it using the "Case Properties" option in the "File" menu. - There is also an option to export report files to a separate folder outside of the case folder. -

    - -
    -

    Copyright © 2012-2013 Basis Technology.

    -

    - This work is licensed under a - Creative Commons Attribution-Share Alike 3.0 United States License. -

    - - + + + + + + Autopsy 3 Quick Start Guide + + + +

    Autopsy 3 Quick Start Guide

    +

    June 2013

    +

    www.sleuthkit.org/autopsy/

    + + +

    Installation

    +

    + The current version of Autopsy 3 runs only on Microsoft Windows. + We have gotten it to run on other platforms, such as Linux and OS X, but we do not have it in a state that makes it easy to distribute and find the needed libraries. +

    +

    + The Windows installer will make a directory for Autopsy and place all of the needed files inside of it. + The installer includes all dependencies, including Sleuth Kit and Java. +

    +

    Note that Autopsy 3 is a complete rewrite from Autopsy 2 and none of this document is relevant to Autopsy 2.

    + +

    Adding a Data Source (image, local disk, logical files)

    +

    + Data sources are added to a case. A case can have a single data source or it can have multiple data source if they are related. + Currently, a single report is generated for an entire case, so if you need to report on individual data sources, then you should use one data source per case. +

    + +

    Creating a Case

    +

    + To create a case, use either the "Create New Case" option on the Welcome screen or from the "File" menu. + This will start the New Case Wizard. You will need to supply it with the name of the case and a directory to store the case results into. + You can optionally provide case numbers and other details. +

    + + +

    Adding a Data Source

    +

    + The next step is to add input data source to the case. + The Add Data Source Wizard will start automatically after the case is created or you can manually start it from the "File" menu or toolbar. + You will need to choose the type of input data source to add (image, local disk or logical files and folders). + Next, supply it with the location of the source to add. +

    +
      +
    • For a disk image, browse to the first file in the set (Autopsy will find the rest of the files). Autopsy currently supports E01 and raw (dd) files. +
    • +
    • + For local disk, select one of the detected disks. + Autopsy will add the current view of the disk to the case (i.e. snapshot of the meta-data). + However, the individual file content (not meta-data) does get updated with the changes made to the disk. + Note, you may need run Autopsy as an Administrator to detect all disks. +
    • +
    • For logical files (a single file or folder of files), use the "Add" button to add one or more files or folders on your system to the case. Folders will be recursively added to the case.
    • +
    + + +

    + There are a couple of options in the wizard that will allow you to make the ingest process faster. + These typically deal with deleted files. + It will take longer if unallocated space is analyzed and the entire drive is searched for deleted files. + In some scenarios, these recovery steps must be performed and in other scenarios these steps are not needed and instead fast results on the allocated files are needed. + Use these options to control how long the analysis will take. +

    + +

    + Autopsy will start to analyze these data sources and add them to the case and internal database. While it is doing that, it will prompt you to configure the Ingest Modules.

    + + +

    Ingest Modules

    +

    + You will next be prompted to configure the Ingest Modules. + Ingest modules will run in the background and perform specific tasks. + The Ingest Modules analyze files in a prioritized order so that files in a user's directory are analyzed before files in other folders. + Ingest modules can be developed by third-parties and here are some of the standard ingest modules that come with Autopsy: +

    +
      +
    • Recent Activity + extracts user activity as saved by web browsers and the OS. Also runs regripper on the registry hive. +
    • +
    • Hash Lookup + uses hash databases to ignore known files from the NIST NSRL and flag known bad files. + Use the "Advanced" button to add and configure the hash databases to use during this process. + You will get updates on known bad file hits as the ingest occurs. You can later add hash databases + via the Tools -> Options menu in the main UI. You can download an index of the NIST NSRL from + here. +
    • +
    • Keyword Search + uses keyword lists to identify files with specific words in them. + You can select the keyword lists to search for automatically and you can create new lists using the "Advanced" button. + Note that with keyword search, you can always conduct searches after ingest has finished. + The keyword lists that you select during ingest will be searched for at periodic intervals and you will get the results in real-time. + You do not need to wait for all files to be indexed. +
    • +
    • Archive Extractor opens ZIP, RAR, and other archive formats and sends the files from those archive files back + through the pipelines for analysis.
    • +
    • Exif Image Parser extracts EXIF information from JPEG files and posts the results into the tree in the main UI.
    • +
    • Thunderbird Parser Identifies Thunderbird MBOX files and extracts the e-mails from them.
    • +
    +

    + When you select a module, you will have the option to change its settings. + For example, you can configure which keyword search lists to use during ingest and which hash databases to use. + Refer to the help system inside of Autopsy for details on configuring each module. +

    +

    + While ingest modules are running in the background, you will see a progress bar in the lower right. + You can use the GUI to review incoming results and perform other tasks while ingest at that time. +

    + + +

    Analysis Basics

    + Autopsy Screenshot +

    You will start all of your analysis techniques from the tree on the left.

    +
      +
    • The Data Sources root node shows all data in the case.
    • +
        +
      • The individual image nodes show the file system structure of the disk images or local disks in the case.
      • +
      • The LogicalFileSet nodes show the logical files in the case.
      • +
      +
    • The Views node shows the same data from a file type or timeline perspective.
    • +
    • The Results node shows the output from the ingest modules.
    • +
    + +

    + When you select a node from the tree on the left, a list of files will be shown in the upper right. + You can use the Thumbnail view in the upper right to view the pictures. + When you select a file from the upper right, its contents will be shown in the lower right. + You can use the tabs in the lower right to view the text of the file, an image, or the hex data. +

    + +

    + If you are viewing files from the Views and Results nodes, you can right-click on a file to go to its file system location. + This feature is useful to see what else the user stored in the same folder as the file that you are currently looking at. + You can also right click on a file to extract it to the local system. +

    +

    + If you want to search for single keywords, then you can use the search box in the upper right of the program. + The results will be shown in a table in the upper right. +

    + +

    You can tag (or bookmark) arbitrary files so that you can more quickly find them later or so that you can include them specifically in a report.

    + +

    Ingest Inbox

    +

    + As you are going through the results in the tree, the ingest modules are running in the background. + The results are shown in the tree as soon as the ingest modules find them and report them. +

    +

    + The Ingest Inbox receives messages from the ingest modules as they find results. + You can open the inbox to see what has been recently found. + It keeps track of what messages you have read. +

    +

    + The intended use of this inbox is that you can focus on some data for a while and then check back on the inbox at a time that is convenient for them. + You can then see what else was found while you were focused on the previous task. + You may learn that a known bad file was found or that a file was found with a relevant keyword and then decide to focus on that for a while. +

    +

    When you select a message, you can then jump to the Results tree where more details can be found or jump to the file's location in the filesystem.

    + +

    Timeline (Beta)

    +

    There is a basic timeline view that you can access via the Tools -> Make Timeline feature. This will take a few minutes to create the timeline for analysis. Its features are still in development.

    + + +

    Example Use Cases

    +

    In this section, we will provide examples of how to do common analysis tasks.

    + +

    Web Artifacts

    +

    + If you want to view the user's recent web activity, make sure that the Recent Activity ingest module was enabled. + You can then go to the "Results " node in the tree on the left and then into the "Extracted Data" node. + There, you can find bookmarks, cookies, downloads, and history. +

    + +

    Known Bad Hash Files

    +

    + If you want to see if the data source had known bad files, make sure that the Hash Lookup ingest module was enabled. + You can then view the "Hashset Hits" section in the "Results" area of the tree on the left. + Note that hash lookup can take a long time, so this section will be updated as long as the ingest process is occurring. + Use the Ingest Inbox to keep track of what known bad files were recently found. +

    +

    + When you find a known bad file in this interface, you may want to right click on the file to also view the file's original location. + You may find additional files that are relevant and stored in the same folder as this file. +

    + +

    Media: Images and Videos

    +

    + If you want to see all images and video on the disk image, then go to the "Views" section in the tree on the left and then "File Types". + Select either "Images" or "Videos". + You can use the thumbnail option in the upper right to view thumbnails of all images. +

    +
      +
    • Note: + We are working on making this more efficient when there are lots of images and we are working on the feature to display video thumbnails. +
    • +
    +

    You can select an image or video from the upper right and view the video or image in the lower right. Video will be played with sound.

    + + +

    Reporting

    +

    + A final report can be generated that will include all analysis results. + Use the "Generate Report" button to create this. + It will create an HTML or XLS report in the Reports folder of the case folder. + If you forgot the location of your case folder, you can determine it using the "Case Properties" option in the "File" menu. + There is also an option to export report files to a separate folder outside of the case folder. +

    + +
    +

    Copyright © 2012-2013 Basis Technology.

    +

    + This work is licensed under a + Creative Commons Attribution-Share Alike 3.0 United States License. +

    + + diff --git a/docs/doxygen/needs_a_home.dox b/docs/doxygen/needs_a_home.dox index c6badf6b36..b0a2b42d4f 100755 --- a/docs/doxygen/needs_a_home.dox +++ b/docs/doxygen/needs_a_home.dox @@ -1,30 +1,30 @@ - - - -The component is by default registered with the ingest manager as an ingest event listener. -The viewer first loads all the viewer-supported data currently in the blackboard when Autopsy starts. -During the ingest process the viewer receives events from ingest modules -(relayed by ingest manager) and it selectively refreshes parts of the tree providing real-time updates to the user. -When ingest is completed, the viewer responds to the final ingest data event generated by the ingest manager, -and performs a final refresh of all viewer-supported data in the blackboard. - - -Node content support capabilities are registered in the node's Lookup. - - - - -\section design_data_flow Data Flow - -\subsection design_data_flow_create Creating Nodes in DataExplorer - -Data flows between the UI zones using a NetBeans node. The DataExplorer modules create the NetBeans nodes. They query the SQLite database or do whatever they want to identify the set of files that are of interest. They create the NetBeans nodes based on Sleuthkit data model objects. See the org.sleuthkit.autopsy.datamodel package for more details on this. - -\subsection design_data_flow_toResult Getting Nodes to DataResult - -Each DataExplorer TopComponent is responsible for creating its own DataResult TopComponent to display its results. It can choose to re-use the same TopComponent for multiple searches (as DirectoryTree does) or it can choose to make a new one each time (as FileSearch does). The setNode() method on the DataResult object is used to set the root node to display. A dummy root node must be created as the parent if a parent does not already exist. - -The DataExplorer is responsible for setting the double-click and right-click actions associated with the node. The default single click action is to pass data to DataContent. To override this, you must create a new DataResultViewer instance that overrides the propertyChange() method. The DataExplorer adds actions to wrapping the node in a FilterNode variant. The FilterNode then defines the actions for the node by overriding the getPreferredAction() and getActions() methods. As an example, org.sleuthkit.autopsy.directorytree.DataResultFilterNode and org.sleuthkit.autopsy.directorytree.DataResultFilterChildren wraps the nodes that are passed over by the DirectoryTree DataExplorer. - -DataResult can send data back to its DataExplorer by making a custom action that looks up it's instance (DataExplorer.getInstance()). + + + +The component is by default registered with the ingest manager as an ingest event listener. +The viewer first loads all the viewer-supported data currently in the blackboard when Autopsy starts. +During the ingest process the viewer receives events from ingest modules +(relayed by ingest manager) and it selectively refreshes parts of the tree providing real-time updates to the user. +When ingest is completed, the viewer responds to the final ingest data event generated by the ingest manager, +and performs a final refresh of all viewer-supported data in the blackboard. + + +Node content support capabilities are registered in the node's Lookup. + + + + +\section design_data_flow Data Flow + +\subsection design_data_flow_create Creating Nodes in DataExplorer + +Data flows between the UI zones using a NetBeans node. The DataExplorer modules create the NetBeans nodes. They query the SQLite database or do whatever they want to identify the set of files that are of interest. They create the NetBeans nodes based on Sleuthkit data model objects. See the org.sleuthkit.autopsy.datamodel package for more details on this. + +\subsection design_data_flow_toResult Getting Nodes to DataResult + +Each DataExplorer TopComponent is responsible for creating its own DataResult TopComponent to display its results. It can choose to re-use the same TopComponent for multiple searches (as DirectoryTree does) or it can choose to make a new one each time (as FileSearch does). The setNode() method on the DataResult object is used to set the root node to display. A dummy root node must be created as the parent if a parent does not already exist. + +The DataExplorer is responsible for setting the double-click and right-click actions associated with the node. The default single click action is to pass data to DataContent. To override this, you must create a new DataResultViewer instance that overrides the propertyChange() method. The DataExplorer adds actions to wrapping the node in a FilterNode variant. The FilterNode then defines the actions for the node by overriding the getPreferredAction() and getActions() methods. As an example, org.sleuthkit.autopsy.directorytree.DataResultFilterNode and org.sleuthkit.autopsy.directorytree.DataResultFilterChildren wraps the nodes that are passed over by the DirectoryTree DataExplorer. + +DataResult can send data back to its DataExplorer by making a custom action that looks up it's instance (DataExplorer.getInstance()). diff --git a/docs/doxygen/workflow.dox b/docs/doxygen/workflow.dox index e7e3b9c882..c9bdf78486 100644 --- a/docs/doxygen/workflow.dox +++ b/docs/doxygen/workflow.dox @@ -1,53 +1,53 @@ -/*! \page workflow_page General Workflow and Design - -\section design_overview Overview -This section outlines the internal Autopsy design from the typical analysis work flow perspective. -This page is organized based on these phases: -- A Case is created. -- Images are added to the case and ingest modules are run. -- Results are manually reviewed and searched. -- Reports are generated. - -\section design_case Creating a Case -The first step in Autopsy work flow is creating a case. This is done in the org.sleuthkit.autopsy.casemodule package (see \ref casemodule_overview for details). This module contains the wizards needed and deals with how to store the information. You should not need to do much modifications in this package. But, you will want to use the org.sleuthkit.autopsy.casemodule.Case object to access all data related to this case. - - -\section design_image Adding an Image and Running Ingest Modules - -After case is created, one or more disk images can be added to the case. There is a wizard to guide that process and it is located in the org.sleuthkit.autopsy.casemodule package. Refer to the package section \ref casemodule_add_image for more details on the wizard. Most developers will not need to touch this code though. An important concept though is that adding an image to a case means that Autopsy uses The Sleuth Kit to enumerate all of the files in the file system and make a database entry for them in the embedded SQLite database that was created for the case. The database will be used for all further analysis. - -After image has been added to the case, the user can select one or more ingest modules to be executed on the image. Ingest modules focus on a specific type of analysis task and run in the background. They either analyze the entire disk image or individual files. The user will see the results from the modules in the result tree and in the ingest inbox. - -The org.sleuthkit.autopsy.ingest package provides the basic infrastructure for the ingest module management. - -If you want to develop a module that analyzes drive data, then this is probably the type of module that you want to build. See \ref mod_ingest_page for more details on making an ingest module. - - -\section design_view Viewing Results - -The UI has three main areas. The tree on the left-hand side, the result viewers in the upper right, and the content viewers in the lower right. Data passes between these areas by encapsulating them in Netbeans Node objects (see org.openide.nodes.Node). These allow Autopsy to generically handle all types of data. The org.sleuthkit.autopsy.datamodel package wraps the generic org.sleuthkit.datamodel Sleuth Kit objects as Netbeans Nodes. - -Nodes are modeled in a parent-child hierarchy with other nodes. All data within a Case is represented in a hierarchy with the disk images being one level below the case and volumes and such below the image. - -The tree on the left hand-side shows the analysis results. -Its contents are populated from the central database. -This is where you can browse the file system contents and see the results from the blackboard. - -The tree is implemented in the org.sleuthkit.autopsy.directorytree package. - -The area in the upper right is the result viewer area. When a node is selected from the tree, the node and its children are sent to this area. This area is used to view a set of nodes. The viewer is itself a framework with modules that display the data in different layouts. For example, the standard version comes with a table viewer and a thumbnail viewer. Refer to \ref mod_result_page for details on building a data result module. - -When an item is selected from the result viewer area, it is passed to the bottom right content viewers. It too is a framework with many modules that know how to show information about a specific file in different ways. For example, there are viewers that show the data in a hex dump format, extract the strings, and display pictures and movies. -See \ref mod_content_page for details on building new content viewers. - -\section design_report Report generation - -When ingest is complete, the user can generate reports. -There is a reporting framework to enable many different formats. Autopsy currently comes with generic html, xml and Excel reports. See the org.sleuthkit.autopsy.report package for details on the framework and -\ref mod_report_page for details on building a new report module. - - - - - -*/ +/*! \page workflow_page General Workflow and Design + +\section design_overview Overview +This section outlines the internal Autopsy design from the typical analysis work flow perspective. +This page is organized based on these phases: +- A Case is created. +- Images are added to the case and ingest modules are run. +- Results are manually reviewed and searched. +- Reports are generated. + +\section design_case Creating a Case +The first step in Autopsy work flow is creating a case. This is done in the org.sleuthkit.autopsy.casemodule package (see \ref casemodule_overview for details). This module contains the wizards needed and deals with how to store the information. You should not need to do much modifications in this package. But, you will want to use the org.sleuthkit.autopsy.casemodule.Case object to access all data related to this case. + + +\section design_image Adding an Image and Running Ingest Modules + +After case is created, one or more disk images can be added to the case. There is a wizard to guide that process and it is located in the org.sleuthkit.autopsy.casemodule package. Refer to the package section \ref casemodule_add_image for more details on the wizard. Most developers will not need to touch this code though. An important concept though is that adding an image to a case means that Autopsy uses The Sleuth Kit to enumerate all of the files in the file system and make a database entry for them in the embedded SQLite database that was created for the case. The database will be used for all further analysis. + +After image has been added to the case, the user can select one or more ingest modules to be executed on the image. Ingest modules focus on a specific type of analysis task and run in the background. They either analyze the entire disk image or individual files. The user will see the results from the modules in the result tree and in the ingest inbox. + +The org.sleuthkit.autopsy.ingest package provides the basic infrastructure for the ingest module management. + +If you want to develop a module that analyzes drive data, then this is probably the type of module that you want to build. See \ref mod_ingest_page for more details on making an ingest module. + + +\section design_view Viewing Results + +The UI has three main areas. The tree on the left-hand side, the result viewers in the upper right, and the content viewers in the lower right. Data passes between these areas by encapsulating them in Netbeans Node objects (see org.openide.nodes.Node). These allow Autopsy to generically handle all types of data. The org.sleuthkit.autopsy.datamodel package wraps the generic org.sleuthkit.datamodel Sleuth Kit objects as Netbeans Nodes. + +Nodes are modeled in a parent-child hierarchy with other nodes. All data within a Case is represented in a hierarchy with the disk images being one level below the case and volumes and such below the image. + +The tree on the left hand-side shows the analysis results. +Its contents are populated from the central database. +This is where you can browse the file system contents and see the results from the blackboard. + +The tree is implemented in the org.sleuthkit.autopsy.directorytree package. + +The area in the upper right is the result viewer area. When a node is selected from the tree, the node and its children are sent to this area. This area is used to view a set of nodes. The viewer is itself a framework with modules that display the data in different layouts. For example, the standard version comes with a table viewer and a thumbnail viewer. Refer to \ref mod_result_page for details on building a data result module. + +When an item is selected from the result viewer area, it is passed to the bottom right content viewers. It too is a framework with many modules that know how to show information about a specific file in different ways. For example, there are viewers that show the data in a hex dump format, extract the strings, and display pictures and movies. +See \ref mod_content_page for details on building new content viewers. + +\section design_report Report generation + +When ingest is complete, the user can generate reports. +There is a reporting framework to enable many different formats. Autopsy currently comes with generic html, xml and Excel reports. See the org.sleuthkit.autopsy.report package for details on the framework and +\ref mod_report_page for details on building a new report module. + + + + + +*/ From 183c333e280ac0bc4fb9ae97e11d6f0ba4039b69 Mon Sep 17 00:00:00 2001 From: Jeff Wallace Date: Thu, 12 Dec 2013 14:54:33 -0500 Subject: [PATCH 8/9] line endings --- HashDatabase/nbproject/project.properties | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/HashDatabase/nbproject/project.properties b/HashDatabase/nbproject/project.properties index 88f75855cb..e633758f8a 100644 --- a/HashDatabase/nbproject/project.properties +++ b/HashDatabase/nbproject/project.properties @@ -1,6 +1,6 @@ -javac.source=1.7 -javac.compilerargs=-Xlint -Xlint:-serial -license.file=../LICENSE-2.0.txt -nbm.homepage=http://www.sleuthkit.org/autopsy/ -nbm.needs.restart=true -spec.version.base=1.3 +javac.source=1.7 +javac.compilerargs=-Xlint -Xlint:-serial +license.file=../LICENSE-2.0.txt +nbm.homepage=http://www.sleuthkit.org/autopsy/ +nbm.needs.restart=true +spec.version.base=1.3 From 7290b1f052db7077b51d5b69fee545cd5e5cb5ba Mon Sep 17 00:00:00 2001 From: Jeff Wallace Date: Thu, 12 Dec 2013 15:13:48 -0500 Subject: [PATCH 9/9] line endings --- .../DirectoryTreeFilterNode.java | 316 +++++++++--------- 1 file changed, 158 insertions(+), 158 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/directorytree/DirectoryTreeFilterNode.java b/Core/src/org/sleuthkit/autopsy/directorytree/DirectoryTreeFilterNode.java index d71ea25216..cd3322fd81 100755 --- a/Core/src/org/sleuthkit/autopsy/directorytree/DirectoryTreeFilterNode.java +++ b/Core/src/org/sleuthkit/autopsy/directorytree/DirectoryTreeFilterNode.java @@ -1,159 +1,159 @@ -/* - * Autopsy Forensic Browser - * - * Copyright 2011 Basis Technology Corp. - * Contact: carrier sleuthkit org - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.sleuthkit.autopsy.directorytree; - -import java.awt.event.ActionEvent; -import java.util.ArrayList; -import java.util.Collections; -import java.util.List; -import java.util.logging.Level; -import javax.swing.AbstractAction; -import javax.swing.Action; -import org.openide.nodes.FilterNode; -import org.openide.nodes.Node; -import org.openide.util.lookup.Lookups; -import org.openide.util.lookup.ProxyLookup; -import org.sleuthkit.autopsy.coreutils.Logger; -import org.sleuthkit.autopsy.datamodel.AbstractContentNode; -import org.sleuthkit.autopsy.datamodel.DisplayableItemNode; -import org.sleuthkit.autopsy.ingest.IngestDialog; -import org.sleuthkit.datamodel.AbstractFile; -import org.sleuthkit.datamodel.Content; -import org.sleuthkit.datamodel.Directory; -import org.sleuthkit.datamodel.Image; -import org.sleuthkit.datamodel.TskCoreException; - -/** - * This class sets the actions for the nodes in the directory tree and creates - * the children filter so that files and such are hidden from the tree. - * - */ -class DirectoryTreeFilterNode extends FilterNode { - - private static final Action collapseAll = new CollapseAction("Collapse All"); - private static final Logger logger = Logger.getLogger(DirectoryTreeFilterNode.class.getName()); - - /** - * the constructor - */ - DirectoryTreeFilterNode(Node arg, boolean createChildren) { - super(arg, DirectoryTreeFilterChildren.createInstance(arg, createChildren), - new ProxyLookup(Lookups.singleton(new OriginalNode(arg)), - arg.getLookup())); - } - - @Override - public String getDisplayName() { - final Node orig = getOriginal(); - - String name = orig.getDisplayName(); - - //do not show children counts for non content nodes - if (orig instanceof AbstractContentNode) { - //show only for file content nodes - AbstractFile file = getLookup().lookup(AbstractFile.class); - if (file != null) { - try { - final int numChildren = file.getChildrenCount(); - name = name + " (" + numChildren + ")"; - } catch (TskCoreException ex) { - logger.log(Level.SEVERE, "Error getting children count to display for file: " + file, ex); - } - - } - } - - return name; - } - - /** - * Right click action for the nodes in the directory tree. - * - * @param popup - * @return - */ - @Override - public Action[] getActions(boolean popup) { - List actions = new ArrayList(); - - final Content content = this.getLookup().lookup(Content.class); - if (content != null) { - actions.addAll(DirectoryTreeFilterNode.getDetailActions(content)); - - //extract dir action - Directory dir = this.getLookup().lookup(Directory.class); - if (dir != null) { - actions.add(ExtractAction.getInstance()); - } - - // file search action - final Image img = this.getLookup().lookup(Image.class); - if (img != null) { - actions.add(new FileSearchAction("Open File Search by Attributes")); - } - - //ingest action - actions.add(new AbstractAction("Run Ingest Modules") { - @Override - public void actionPerformed(ActionEvent e) { - final IngestDialog ingestDialog = new IngestDialog(); - ingestDialog.setContent(Collections.singletonList(content)); - ingestDialog.display(); - } - }); - } - - //check if delete actions should be added - final Node orig = getOriginal(); - //TODO add a mechanism to determine if DisplayableItemNode - if (orig instanceof DisplayableItemNode) { - actions.addAll(getDeleteActions((DisplayableItemNode) orig)); - } - - actions.add(collapseAll); - return actions.toArray(new Action[actions.size()]); - } - - private static List getDeleteActions(DisplayableItemNode original) { - List actions = new ArrayList(); - //actions.addAll(original.accept(getDeleteActionVisitor)); - return actions; - } - - private static List getDetailActions(Content c) { - List actions = new ArrayList(); - - actions.addAll(ExplorerNodeActionVisitor.getActions(c)); - - return actions; - } -} - -class OriginalNode { - - private Node original; - - OriginalNode(Node original) { - this.original = original; - } - - Node getNode() { - return original; - } +/* + * Autopsy Forensic Browser + * + * Copyright 2011 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.directorytree; + +import java.awt.event.ActionEvent; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.logging.Level; +import javax.swing.AbstractAction; +import javax.swing.Action; +import org.openide.nodes.FilterNode; +import org.openide.nodes.Node; +import org.openide.util.lookup.Lookups; +import org.openide.util.lookup.ProxyLookup; +import org.sleuthkit.autopsy.coreutils.Logger; +import org.sleuthkit.autopsy.datamodel.AbstractContentNode; +import org.sleuthkit.autopsy.datamodel.DisplayableItemNode; +import org.sleuthkit.autopsy.ingest.IngestDialog; +import org.sleuthkit.datamodel.AbstractFile; +import org.sleuthkit.datamodel.Content; +import org.sleuthkit.datamodel.Directory; +import org.sleuthkit.datamodel.Image; +import org.sleuthkit.datamodel.TskCoreException; + +/** + * This class sets the actions for the nodes in the directory tree and creates + * the children filter so that files and such are hidden from the tree. + * + */ +class DirectoryTreeFilterNode extends FilterNode { + + private static final Action collapseAll = new CollapseAction("Collapse All"); + private static final Logger logger = Logger.getLogger(DirectoryTreeFilterNode.class.getName()); + + /** + * the constructor + */ + DirectoryTreeFilterNode(Node arg, boolean createChildren) { + super(arg, DirectoryTreeFilterChildren.createInstance(arg, createChildren), + new ProxyLookup(Lookups.singleton(new OriginalNode(arg)), + arg.getLookup())); + } + + @Override + public String getDisplayName() { + final Node orig = getOriginal(); + + String name = orig.getDisplayName(); + + //do not show children counts for non content nodes + if (orig instanceof AbstractContentNode) { + //show only for file content nodes + AbstractFile file = getLookup().lookup(AbstractFile.class); + if (file != null) { + try { + final int numChildren = file.getChildrenCount(); + name = name + " (" + numChildren + ")"; + } catch (TskCoreException ex) { + logger.log(Level.SEVERE, "Error getting children count to display for file: " + file, ex); + } + + } + } + + return name; + } + + /** + * Right click action for the nodes in the directory tree. + * + * @param popup + * @return + */ + @Override + public Action[] getActions(boolean popup) { + List actions = new ArrayList(); + + final Content content = this.getLookup().lookup(Content.class); + if (content != null) { + actions.addAll(DirectoryTreeFilterNode.getDetailActions(content)); + + //extract dir action + Directory dir = this.getLookup().lookup(Directory.class); + if (dir != null) { + actions.add(ExtractAction.getInstance()); + } + + // file search action + final Image img = this.getLookup().lookup(Image.class); + if (img != null) { + actions.add(new FileSearchAction("Open File Search by Attributes")); + } + + //ingest action + actions.add(new AbstractAction("Run Ingest Modules") { + @Override + public void actionPerformed(ActionEvent e) { + final IngestDialog ingestDialog = new IngestDialog(); + ingestDialog.setContent(Collections.singletonList(content)); + ingestDialog.display(); + } + }); + } + + //check if delete actions should be added + final Node orig = getOriginal(); + //TODO add a mechanism to determine if DisplayableItemNode + if (orig instanceof DisplayableItemNode) { + actions.addAll(getDeleteActions((DisplayableItemNode) orig)); + } + + actions.add(collapseAll); + return actions.toArray(new Action[actions.size()]); + } + + private static List getDeleteActions(DisplayableItemNode original) { + List actions = new ArrayList(); + //actions.addAll(original.accept(getDeleteActionVisitor)); + return actions; + } + + private static List getDetailActions(Content c) { + List actions = new ArrayList(); + + actions.addAll(ExplorerNodeActionVisitor.getActions(c)); + + return actions; + } +} + +class OriginalNode { + + private Node original; + + OriginalNode(Node original) { + this.original = original; + } + + Node getNode() { + return original; + } } \ No newline at end of file