Merge branch 'master' of github.com:sleuthkit/autopsy

This commit is contained in:
Brian Carrier 2012-06-29 12:02:39 -04:00
commit 00ab04585f
25 changed files with 481 additions and 183 deletions

View File

@ -111,47 +111,7 @@ public class AddImageErrorsDialog extends javax.swing.JDialog {
errorsText.setText(errors); errorsText.setText(errors);
} }
/**
* @param args the command line arguments
*/
public static void main(String args[]) {
/* Set the Nimbus look and feel */
//<editor-fold defaultstate="collapsed" desc=" Look and feel setting code (optional) ">
/* If Nimbus (introduced in Java SE 6) is not available, stay with the default look and feel.
* For details see http://download.oracle.com/javase/tutorial/uiswing/lookandfeel/plaf.html
*/
try {
for (javax.swing.UIManager.LookAndFeelInfo info : javax.swing.UIManager.getInstalledLookAndFeels()) {
if ("Nimbus".equals(info.getName())) {
javax.swing.UIManager.setLookAndFeel(info.getClassName());
break;
}
}
} catch (ClassNotFoundException ex) {
java.util.logging.Logger.getLogger(AddImageErrorsDialog.class.getName()).log(java.util.logging.Level.SEVERE, null, ex);
} catch (InstantiationException ex) {
java.util.logging.Logger.getLogger(AddImageErrorsDialog.class.getName()).log(java.util.logging.Level.SEVERE, null, ex);
} catch (IllegalAccessException ex) {
java.util.logging.Logger.getLogger(AddImageErrorsDialog.class.getName()).log(java.util.logging.Level.SEVERE, null, ex);
} catch (javax.swing.UnsupportedLookAndFeelException ex) {
java.util.logging.Logger.getLogger(AddImageErrorsDialog.class.getName()).log(java.util.logging.Level.SEVERE, null, ex);
}
//</editor-fold>
/* Create and display the dialog */
java.awt.EventQueue.invokeLater(new Runnable() {
public void run() {
AddImageErrorsDialog dialog = new AddImageErrorsDialog(new javax.swing.JFrame(), true);
dialog.addWindowListener(new java.awt.event.WindowAdapter() {
@Override
public void windowClosing(java.awt.event.WindowEvent e) {
System.exit(0);
}
});
dialog.setVisible(true);
}
});
}
// Variables declaration - do not modify//GEN-BEGIN:variables // Variables declaration - do not modify//GEN-BEGIN:variables
private javax.swing.JButton closeButton; private javax.swing.JButton closeButton;
private javax.swing.JButton copyButton; private javax.swing.JButton copyButton;

View File

@ -99,6 +99,7 @@ final class AddImageVisualPanel2 extends JPanel {
void setErrors(final String errors, boolean critical) { void setErrors(final String errors, boolean critical) {
crDbProgressBar.setValue(100); //always invoked when process completed
if (critical) { if (critical) {
progressLabel.setText("*Failed to add image (critical errors encountered). Click below to view the Add Image Log."); progressLabel.setText("*Failed to add image (critical errors encountered). Click below to view the Add Image Log.");
} }

View File

@ -147,6 +147,7 @@ public class DataResultFilterNode extends FilterNode{
public List<Action> visit(ImageNode img) { public List<Action> visit(ImageNode img) {
List<Action> actions = new ArrayList<Action>(); List<Action> actions = new ArrayList<Action>();
actions.add(new NewWindowViewAction("View in New Window", img)); actions.add(new NewWindowViewAction("View in New Window", img));
actions.add(new FileSearchAction("Open File Search"));
actions.addAll(ShowDetailActionVisitor.getActions(img.getLookup().lookup(Content.class))); actions.addAll(ShowDetailActionVisitor.getActions(img.getLookup().lookup(Content.class)));
return actions; return actions;
} }

View File

@ -71,6 +71,7 @@ class DirectoryTreeFilterNode extends FilterNode {
} }
final Image img = this.getLookup().lookup(Image.class); final Image img = this.getLookup().lookup(Image.class);
if (img != null) { if (img != null) {
actions.add(new FileSearchAction("Open File Search"));
actions.add(new AbstractAction("Restart Ingest Modules") { actions.add(new AbstractAction("Restart Ingest Modules") {
@Override @Override

View File

@ -0,0 +1,40 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2011 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.directorytree;
import java.awt.event.ActionEvent;
import javax.swing.AbstractAction;
import org.openide.util.Lookup;
/**
*
* @author dfickling
*/
public class FileSearchAction extends AbstractAction{
public FileSearchAction(String title) {
super(title);
}
@Override
public void actionPerformed(ActionEvent e) {
FileSearchProvider searcher = Lookup.getDefault().lookup(FileSearchProvider.class);
searcher.showDialog();
}
}

View File

@ -0,0 +1,26 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2011 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.directorytree;
/**
* Lookup interface for File Search (to deal with circular deps)
*/
public interface FileSearchProvider {
public void showDialog();
}

View File

@ -24,8 +24,11 @@ import java.beans.PropertyChangeListener;
import org.openide.util.HelpCtx; import org.openide.util.HelpCtx;
import org.openide.util.actions.CallableSystemAction; import org.openide.util.actions.CallableSystemAction;
import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.directorytree.FileSearchProvider;
public final class FileSearchAction extends CallableSystemAction { public final class FileSearchAction extends CallableSystemAction implements FileSearchProvider{
private static FileSearchAction instance = null;
FileSearchAction() { FileSearchAction() {
super(); super();
@ -43,6 +46,13 @@ public final class FileSearchAction extends CallableSystemAction {
}); });
} }
public static FileSearchAction getDefault() {
if(instance == null){
instance = new FileSearchAction();
}
return instance;
}
@Override @Override
public void actionPerformed(ActionEvent e) { public void actionPerformed(ActionEvent e) {
@ -68,4 +78,9 @@ public final class FileSearchAction extends CallableSystemAction {
protected boolean asynchronous() { protected boolean asynchronous() {
return false; return false;
} }
@Override
public void showDialog() {
performAction();
}
} }

View File

@ -13,6 +13,11 @@
<attr name="position" intvalue="3662"/> <attr name="position" intvalue="3662"/>
</file> </file>
</folder> </folder>
<file name="org-sleuthkit-autopsy-filesearch-FileSearchAction.instance">
<attr name="instanceOf" stringvalue="org.sleuthkit.autopsy.directorytree.FileSearchProvider"/>
<!--<attr name="instanceCreate" methodvalue="org.sleuthkit.autopsy.filesearch.FileSearchAction.getDefault"/>-->
<attr name="position" intvalue="250"/>
</file>
</folder> </folder>
<folder name="Menu"> <folder name="Menu">
<folder name="Tools"> <folder name="Tools">

View File

@ -7,7 +7,7 @@ HashDbSimplePanel.knownValLabel.text=-
HashDbSimplePanel.notableValLabel.text=- HashDbSimplePanel.notableValLabel.text=-
HashDbMgmtPanel.addNotableButton.text=Add Known Bad Database HashDbMgmtPanel.addNotableButton.text=Add Known Bad Database
HashDbMgmtPanel.removeNotableButton.text=Remove Selected HashDbMgmtPanel.removeNotableButton.text=Remove Selected
HashDbSimplePanel.jLabel1.text=Known Bad Database(s): HashDbSimplePanel.jLabel1.text=Select known bad databases to use during ingest:
HashDbSimplePanel.jLabel2.text=NSRL Database: HashDbSimplePanel.jLabel2.text=NSRL Database:
HashDbMgmtPanel.nsrlNameLabel.text=Not Configured HashDbMgmtPanel.nsrlNameLabel.text=Not Configured
HashDbMgmtPanel.setNSRLButton.text=Change HashDbMgmtPanel.setNSRLButton.text=Change

View File

@ -293,7 +293,8 @@ public class HashDbIngestService implements IngestServiceAbstractFile {
ProcessResult ret = ProcessResult.UNKNOWN; ProcessResult ret = ProcessResult.UNKNOWN;
boolean processFile = true; boolean processFile = true;
if (fsContent.getKnown().equals(TskData.FileKnown.BAD)) { if (fsContent.getSize() == 0
|| fsContent.getKnown().equals(TskData.FileKnown.BAD)) {
ret = ProcessResult.OK; ret = ProcessResult.OK;
processFile = false; processFile = false;
} }

View File

@ -1,4 +1,4 @@
<?xml version="1.1" encoding="UTF-8" ?> <?xml version="1.0" encoding="UTF-8" ?>
<Form version="1.5" maxVersion="1.7" type="org.netbeans.modules.form.forminfo.JPanelFormInfo"> <Form version="1.5" maxVersion="1.7" type="org.netbeans.modules.form.forminfo.JPanelFormInfo">
<AuxValues> <AuxValues>
@ -16,18 +16,16 @@
<Layout> <Layout>
<DimensionLayout dim="0"> <DimensionLayout dim="0">
<Group type="103" groupAlignment="0" attributes="0"> <Group type="103" groupAlignment="0" attributes="0">
<Group type="102" alignment="0" attributes="0"> <Component id="jScrollPane1" alignment="1" pref="0" max="32767" attributes="1"/>
<Component id="jScrollPane2" alignment="0" pref="0" max="32767" attributes="0"/>
<Group type="102" attributes="0">
<EmptySpace max="-2" attributes="0"/> <EmptySpace max="-2" attributes="0"/>
<Component id="jLabel2" min="-2" max="-2" attributes="0"/> <Group type="103" groupAlignment="0" attributes="0">
<EmptySpace pref="51" max="32767" attributes="0"/> <Component id="jLabel2" alignment="0" min="-2" max="-2" attributes="0"/>
<Component id="jLabel1" alignment="0" min="-2" max="-2" attributes="0"/>
</Group> </Group>
<Group type="102" alignment="0" attributes="0">
<EmptySpace max="-2" attributes="0"/>
<Component id="jLabel1" min="-2" max="-2" attributes="0"/>
<EmptySpace max="32767" attributes="0"/> <EmptySpace max="32767" attributes="0"/>
</Group> </Group>
<Component id="jScrollPane1" alignment="1" pref="139" max="32767" attributes="1"/>
<Component id="jScrollPane2" alignment="0" pref="139" max="32767" attributes="0"/>
</Group> </Group>
</DimensionLayout> </DimensionLayout>
<DimensionLayout dim="1"> <DimensionLayout dim="1">

View File

@ -119,16 +119,14 @@ public class HashDbSimplePanel extends javax.swing.JPanel {
this.setLayout(layout); this.setLayout(layout);
layout.setHorizontalGroup( layout.setHorizontalGroup(
layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addComponent(jScrollPane1, javax.swing.GroupLayout.Alignment.TRAILING, javax.swing.GroupLayout.PREFERRED_SIZE, 0, Short.MAX_VALUE)
.addComponent(jScrollPane2, javax.swing.GroupLayout.PREFERRED_SIZE, 0, Short.MAX_VALUE)
.addGroup(layout.createSequentialGroup() .addGroup(layout.createSequentialGroup()
.addContainerGap() .addContainerGap()
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addComponent(jLabel2) .addComponent(jLabel2)
.addContainerGap(51, Short.MAX_VALUE)) .addComponent(jLabel1))
.addGroup(layout.createSequentialGroup() .addContainerGap())
.addContainerGap()
.addComponent(jLabel1)
.addContainerGap(javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE))
.addComponent(jScrollPane1, javax.swing.GroupLayout.Alignment.TRAILING, javax.swing.GroupLayout.DEFAULT_SIZE, 139, Short.MAX_VALUE)
.addComponent(jScrollPane2, javax.swing.GroupLayout.DEFAULT_SIZE, 139, Short.MAX_VALUE)
); );
layout.setVerticalGroup( layout.setVerticalGroup(
layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)

View File

@ -28,27 +28,27 @@ enum IndexStatus {
/** /**
* The index and database both exist, and the index is older. * The index and database both exist, and the index is older.
*/ */
INDEX_OUTDATED("Index is older than database."), INDEX_OUTDATED("Index is older than database"),
/** /**
* The index and database both exist, and the index is not older. * The index and database both exist, and the index is not older.
*/ */
INDEX_CURRENT("Database has index."), INDEX_CURRENT("Database and index exist"),
/** /**
* The index exists but the database does not. * The index exists but the database does not.
*/ */
NO_DB("Only an index exists."), NO_DB("Index exists (no database)"),
/** /**
* The database exists but the index does not. * The database exists but the index does not.
*/ */
NO_INDEX("Database does not have index."), NO_INDEX("Index does not exist"),
/** /**
* Neither the index nor the database exists. * Neither the index nor the database exists.
*/ */
NONE("No index or database."), NONE("No index or database"),
/** /**
* The index is currently being generated * The index is currently being generated
*/ */
INDEXING("The index is currently being generated"); INDEXING("Index is currently being generated");
private String message; private String message;

View File

@ -71,7 +71,7 @@ class GetAllFilesContentVisitor extends GetFilesContentVisitor {
StringBuilder queryB = new StringBuilder(); StringBuilder queryB = new StringBuilder();
queryB.append("SELECT * FROM tsk_files WHERE ( (fs_obj_id = ").append(fs.getId()); queryB.append("SELECT * FROM tsk_files WHERE ( (fs_obj_id = ").append(fs.getId());
queryB.append(") OR (fs_obj_id = NULL) ) AND (size > 0)"); queryB.append(") OR (fs_obj_id = NULL) )");
queryB.append(" AND ( (meta_type = ").append(TskData.TSK_FS_META_TYPE_ENUM.TSK_FS_META_TYPE_REG.getMetaType()); queryB.append(" AND ( (meta_type = ").append(TskData.TSK_FS_META_TYPE_ENUM.TSK_FS_META_TYPE_REG.getMetaType());
queryB.append(") OR (meta_type = ").append(TskData.TSK_FS_META_TYPE_ENUM.TSK_FS_META_TYPE_DIR.getMetaType()); queryB.append(") OR (meta_type = ").append(TskData.TSK_FS_META_TYPE_ENUM.TSK_FS_META_TYPE_DIR.getMetaType());
queryB.append( " AND (name != '.') AND (name != '..')"); queryB.append( " AND (name != '.') AND (name != '..')");

View File

@ -365,6 +365,8 @@ public class ExtractedContentViewer implements DataContentViewer {
return false; return false;
} }
if (content.getSize() == 0)
return false;
final Server solrServer = KeywordSearch.getServer(); final Server solrServer = KeywordSearch.getServer();
@ -375,8 +377,6 @@ public class ExtractedContentViewer implements DataContentViewer {
final long contentID = content.getId(); final long contentID = content.getId();
try { try {
return solrServer.queryIsIndexed(contentID); return solrServer.queryIsIndexed(contentID);
} catch (NoOpenCoreException ex) { } catch (NoOpenCoreException ex) {

View File

@ -139,13 +139,15 @@ public class Ingester {
/** /**
* Sends a file to Solr to have its content extracted and added to the * Sends a file to Solr to have its content extracted and added to the
* index. commit() should be called once you're done ingesting files. * index. commit() should be called once you're done ingesting files.
* If the file is a directory or ingestContent is set to false, the file name is indexed only.
* *
* @param f File to ingest * @param fsContent File to ingest
* @param ingestContent if true, index the file and the content, otherwise indesx metadata only
* @throws IngesterException if there was an error processing a specific * @throws IngesterException if there was an error processing a specific
* file, but the Solr server is probably fine. * file, but the Solr server is probably fine.
*/ */
void ingest(FsContent fsContent) throws IngesterException { void ingest(FsContent fsContent, boolean ingestContent) throws IngesterException {
if (fsContent.isDir() ) { if (fsContent.isDir() || ingestContent == false ) {
ingest(new NullContentStream(fsContent), getContentFields(fsContent), 0); ingest(new NullContentStream(fsContent), getContentFields(fsContent), 0);
} }
else { else {
@ -438,25 +440,20 @@ public class Ingester {
} }
/** /**
* Determine if the file is ingestible/indexable by keyword search * Determine if the file content is ingestible/indexable by keyword search
* Ingestible abstract file is either a directory, or an allocated file with supported extensions. * Ingestible abstract file is either a directory, or an allocated file with supported extensions.
* Note: currently only checks by extension and abstract type, it does not check actual file content. * Note: currently only checks by extension and abstract type, it does not check actual file content.
* @param aFile * @param aFile
* @return true if it is ingestible, false otherwise * @return true if it is ingestible, false otherwise
*/ */
static boolean isIngestible(AbstractFile aFile) { static boolean isIngestible(AbstractFile aFile) {
boolean isIngestible = false;
TSK_DB_FILES_TYPE_ENUM aType = aFile.getType(); TSK_DB_FILES_TYPE_ENUM aType = aFile.getType();
if (aType.equals(TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS) if (! aType.equals(TSK_DB_FILES_TYPE_ENUM.FS) )
|| aType.equals(TSK_DB_FILES_TYPE_ENUM.UNUSED_BLOCKS)) return false;
return isIngestible;
FsContent fsContent = (FsContent) aFile; FsContent fsContent = (FsContent) aFile;
if (fsContent.isDir())
//we index dir name, not content
return true;
boolean isIngestible = false;
final String fileName = fsContent.getName(); final String fileName = fsContent.getName();
for (final String ext : ingestibleExtensions) { for (final String ext : ingestibleExtensions) {
if (fileName.toLowerCase().endsWith(ext)) { if (fileName.toLowerCase().endsWith(ext)) {

View File

@ -56,6 +56,13 @@ public class Keyword {
return isLiteral; return isLiteral;
} }
@Override
public String toString() {
return "Keyword{" + "query=" + query + ", isLiteral=" + isLiteral + ", keywordType=" + keywordType + '}';
}
@Override @Override
public boolean equals(Object obj) { public boolean equals(Object obj) {
if (obj == null) { if (obj == null) {

View File

@ -22,12 +22,12 @@
<Group type="103" groupAlignment="0" attributes="0"> <Group type="103" groupAlignment="0" attributes="0">
<Group type="102" alignment="0" attributes="0"> <Group type="102" alignment="0" attributes="0">
<Component id="filesIndexedLabel" min="-2" max="-2" attributes="0"/> <Component id="filesIndexedLabel" min="-2" max="-2" attributes="0"/>
<EmptySpace type="unrelated" max="-2" attributes="0"/> <EmptySpace max="-2" attributes="0"/>
<Component id="filesIndexedValue" min="-2" pref="26" max="-2" attributes="0"/> <Component id="filesIndexedValue" min="-2" pref="104" max="-2" attributes="0"/>
</Group> </Group>
<Component id="skipNSRLCheckBox" alignment="0" min="-2" max="-2" attributes="0"/> <Component id="skipNSRLCheckBox" alignment="0" min="-2" max="-2" attributes="0"/>
</Group> </Group>
<EmptySpace max="32767" attributes="0"/> <EmptySpace pref="165" max="32767" attributes="0"/>
</Group> </Group>
</Group> </Group>
</DimensionLayout> </DimensionLayout>
@ -74,6 +74,9 @@
<Property name="text" type="java.lang.String" editor="org.netbeans.modules.i18n.form.FormI18nStringEditor"> <Property name="text" type="java.lang.String" editor="org.netbeans.modules.i18n.form.FormI18nStringEditor">
<ResourceString bundle="org/sleuthkit/autopsy/keywordsearch/Bundle.properties" key="KeywordSearchConfigurationPanel2.filesIndexedValue.text" replaceFormat="org.openide.util.NbBundle.getMessage({sourceFileName}.class, &quot;{key}&quot;)"/> <ResourceString bundle="org/sleuthkit/autopsy/keywordsearch/Bundle.properties" key="KeywordSearchConfigurationPanel2.filesIndexedValue.text" replaceFormat="org.openide.util.NbBundle.getMessage({sourceFileName}.class, &quot;{key}&quot;)"/>
</Property> </Property>
<Property name="maximumSize" type="java.awt.Dimension" editor="org.netbeans.beaninfo.editors.DimensionEditor">
<Dimension value="null"/>
</Property>
</Properties> </Properties>
</Component> </Component>
<Component class="javax.swing.JSeparator" name="jSeparator1"> <Component class="javax.swing.JSeparator" name="jSeparator1">

View File

@ -71,6 +71,7 @@ public class KeywordSearchConfigurationPanel2 extends javax.swing.JPanel {
filesIndexedLabel.setText(org.openide.util.NbBundle.getMessage(KeywordSearchConfigurationPanel2.class, "KeywordSearchConfigurationPanel2.filesIndexedLabel.text")); // NOI18N filesIndexedLabel.setText(org.openide.util.NbBundle.getMessage(KeywordSearchConfigurationPanel2.class, "KeywordSearchConfigurationPanel2.filesIndexedLabel.text")); // NOI18N
filesIndexedValue.setText(org.openide.util.NbBundle.getMessage(KeywordSearchConfigurationPanel2.class, "KeywordSearchConfigurationPanel2.filesIndexedValue.text")); // NOI18N filesIndexedValue.setText(org.openide.util.NbBundle.getMessage(KeywordSearchConfigurationPanel2.class, "KeywordSearchConfigurationPanel2.filesIndexedValue.text")); // NOI18N
filesIndexedValue.setMaximumSize(null);
javax.swing.GroupLayout layout = new javax.swing.GroupLayout(this); javax.swing.GroupLayout layout = new javax.swing.GroupLayout(this);
this.setLayout(layout); this.setLayout(layout);
@ -82,10 +83,10 @@ public class KeywordSearchConfigurationPanel2 extends javax.swing.JPanel {
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(layout.createSequentialGroup() .addGroup(layout.createSequentialGroup()
.addComponent(filesIndexedLabel) .addComponent(filesIndexedLabel)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(filesIndexedValue, javax.swing.GroupLayout.PREFERRED_SIZE, 26, javax.swing.GroupLayout.PREFERRED_SIZE)) .addComponent(filesIndexedValue, javax.swing.GroupLayout.PREFERRED_SIZE, 104, javax.swing.GroupLayout.PREFERRED_SIZE))
.addComponent(skipNSRLCheckBox)) .addComponent(skipNSRLCheckBox))
.addContainerGap()) .addContainerGap(165, Short.MAX_VALUE))
); );
layout.setVerticalGroup( layout.setVerticalGroup(
layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
@ -97,7 +98,7 @@ public class KeywordSearchConfigurationPanel2 extends javax.swing.JPanel {
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(filesIndexedLabel) .addComponent(filesIndexedLabel)
.addComponent(filesIndexedValue)) .addComponent(filesIndexedValue, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE))
.addContainerGap(226, Short.MAX_VALUE)) .addContainerGap(226, Short.MAX_VALUE))
); );
}// </editor-fold>//GEN-END:initComponents }// </editor-fold>//GEN-END:initComponents

View File

@ -38,6 +38,7 @@ import org.apache.solr.client.solrj.SolrServerException;
import org.netbeans.api.progress.ProgressHandle; import org.netbeans.api.progress.ProgressHandle;
import org.netbeans.api.progress.ProgressHandleFactory; import org.netbeans.api.progress.ProgressHandleFactory;
import org.openide.util.Cancellable; import org.openide.util.Cancellable;
import org.openide.util.Exceptions;
import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.ingest.IngestManager; import org.sleuthkit.autopsy.ingest.IngestManager;
import org.sleuthkit.autopsy.ingest.IngestManagerProxy; import org.sleuthkit.autopsy.ingest.IngestManagerProxy;
@ -55,12 +56,11 @@ import org.sleuthkit.datamodel.SleuthkitCase;
import org.sleuthkit.datamodel.TskData; import org.sleuthkit.datamodel.TskData;
/** /**
* An ingest service on a file level * An ingest service on a file level Performs indexing of allocated and Solr
* Performs indexing of allocated and Solr supported files, * supported files, string extraction and indexing of unallocated and not Solr
* string extraction and indexing of unallocated and not Solr supported files * supported files Index commit is done periodically (determined by user set
* Index commit is done periodically (determined by user set ingest update interval) * ingest update interval) Runs a periodic keyword / regular expression search
* Runs a periodic keyword / regular expression search on currently configured lists for ingest * on currently configured lists for ingest and writes results to blackboard
* and writes results to blackboard
* Reports interesting events to Inbox and to viewers * Reports interesting events to Inbox and to viewers
* *
* Registered as a service in layer.xml * Registered as a service in layer.xml
@ -92,19 +92,20 @@ public final class KeywordSearchIngestService implements IngestServiceAbstractFi
private volatile int messageID = 0; private volatile int messageID = 0;
private boolean processedFiles; private boolean processedFiles;
private volatile boolean finalSearcherDone = true; private volatile boolean finalSearcherDone = true;
private final String hashDBServiceName = "Hash Lookup"; private final String hashDBServiceName = "Hash Lookup"; //NOTE this needs to match the HashDB service getName()
private SleuthkitCase caseHandle = null; private SleuthkitCase caseHandle = null;
private boolean skipKnown = true; private boolean skipKnown = true;
boolean initialized = false; boolean initialized = false;
private enum IngestStatus { private enum IngestStatus {
INGESTED, EXTRACTED_INGESTED, SKIPPED, INGESTED, EXTRACTED_INGESTED, SKIPPED, INGESTED_META
}; };
private Map<Long, IngestStatus> ingestStatus; private Map<Long, IngestStatus> ingestStatus;
/** /**
* Returns singleton instance of the service, creates one if needed * Returns singleton instance of the service, creates one if needed
*
* @return instance of the service * @return instance of the service
*/ */
public static synchronized KeywordSearchIngestService getDefault() { public static synchronized KeywordSearchIngestService getDefault() {
@ -115,10 +116,12 @@ public final class KeywordSearchIngestService implements IngestServiceAbstractFi
} }
/** /**
* Starts processing of every file provided by IngestManager. * Starts processing of every file provided by IngestManager. Checks if it
* Checks if it is time to commit and run search * is time to commit and run search
*
* @param abstractFile file/unallocated file/directory to process * @param abstractFile file/unallocated file/directory to process
* @return ProcessResult.OK in most cases and ERROR only if error in the pipeline, otherwise does not advice to stop the pipeline * @return ProcessResult.OK in most cases and ERROR only if error in the
* pipeline, otherwise does not advice to stop the pipeline
*/ */
@Override @Override
public ProcessResult process(AbstractFile abstractFile) { public ProcessResult process(AbstractFile abstractFile) {
@ -133,8 +136,12 @@ public final class KeywordSearchIngestService implements IngestServiceAbstractFi
IngestServiceAbstractFile.ProcessResult hashDBResult = managerProxy.getAbstractFileServiceResult(hashDBServiceName); IngestServiceAbstractFile.ProcessResult hashDBResult = managerProxy.getAbstractFileServiceResult(hashDBServiceName);
//logger.log(Level.INFO, "hashdb result: " + hashDBResult + "file: " + AbstractFile.getName()); //logger.log(Level.INFO, "hashdb result: " + hashDBResult + "file: " + AbstractFile.getName());
if (hashDBResult == IngestServiceAbstractFile.ProcessResult.COND_STOP && skipKnown) { if (hashDBResult == IngestServiceAbstractFile.ProcessResult.COND_STOP && skipKnown) {
//index meta-data only
indexer.indexFile(abstractFile, false);
return ProcessResult.OK; return ProcessResult.OK;
} else if (hashDBResult == IngestServiceAbstractFile.ProcessResult.ERROR) { } else if (hashDBResult == IngestServiceAbstractFile.ProcessResult.ERROR) {
//index meta-data only
indexer.indexFile(abstractFile, false);
//notify depending service that keyword search (would) encountered error for this file //notify depending service that keyword search (would) encountered error for this file
return ProcessResult.ERROR; return ProcessResult.ERROR;
} }
@ -145,7 +152,8 @@ public final class KeywordSearchIngestService implements IngestServiceAbstractFi
checkRunCommitSearch(); checkRunCommitSearch();
indexer.indexFile(abstractFile); //index the file and content (if the content is supported)
indexer.indexFile(abstractFile, true);
return ProcessResult.OK; return ProcessResult.OK;
} }
@ -196,8 +204,7 @@ public final class KeywordSearchIngestService implements IngestServiceAbstractFi
} }
/** /**
* Handle stop event (ingest interrupted) * Handle stop event (ingest interrupted) Cleanup resources, threads, timers
* Cleanup resources, threads, timers
*/ */
@Override @Override
public void stop() { public void stop() {
@ -234,8 +241,9 @@ public final class KeywordSearchIngestService implements IngestServiceAbstractFi
} }
/** /**
* Initializes the service for new ingest run * Initializes the service for new ingest run Sets up threads, timers,
* Sets up threads, timers, retrieves settings, keyword lists to run on * retrieves settings, keyword lists to run on
*
* @param managerProxy * @param managerProxy
*/ */
@Override @Override
@ -320,8 +328,10 @@ public final class KeywordSearchIngestService implements IngestServiceAbstractFi
} }
/** /**
* The services maintains background threads, return true if background threads are running * The services maintains background threads, return true if background
* or there are pending tasks to be run in the future, such as the final search post-ingest completion * threads are running or there are pending tasks to be run in the future,
* such as the final search post-ingest completion
*
* @return * @return
*/ */
@Override @Override
@ -353,6 +363,7 @@ public final class KeywordSearchIngestService implements IngestServiceAbstractFi
*/ */
private void postIndexSummary() { private void postIndexSummary() {
int indexed = 0; int indexed = 0;
int indexed_meta = 0;
int indexed_extr = 0; int indexed_extr = 0;
int skipped = 0; int skipped = 0;
for (IngestStatus s : ingestStatus.values()) { for (IngestStatus s : ingestStatus.values()) {
@ -360,6 +371,9 @@ public final class KeywordSearchIngestService implements IngestServiceAbstractFi
case INGESTED: case INGESTED:
++indexed; ++indexed;
break; break;
case INGESTED_META:
++indexed_meta;
break;
case EXTRACTED_INGESTED: case EXTRACTED_INGESTED:
++indexed_extr; ++indexed_extr;
break; break;
@ -373,6 +387,7 @@ public final class KeywordSearchIngestService implements IngestServiceAbstractFi
StringBuilder msg = new StringBuilder(); StringBuilder msg = new StringBuilder();
msg.append("Indexed files: ").append(indexed).append("<br />Indexed strings: ").append(indexed_extr); msg.append("Indexed files: ").append(indexed).append("<br />Indexed strings: ").append(indexed_extr);
msg.append("<br />Indexed meta-data only: ").append(indexed_meta).append("<br />");
msg.append("<br />Skipped files: ").append(skipped).append("<br />"); msg.append("<br />Skipped files: ").append(skipped).append("<br />");
String indexStats = msg.toString(); String indexStats = msg.toString();
logger.log(Level.INFO, "Keyword Indexing Completed: " + indexStats); logger.log(Level.INFO, "Keyword Indexing Completed: " + indexStats);
@ -423,8 +438,8 @@ public final class KeywordSearchIngestService implements IngestServiceAbstractFi
} }
/** /**
* Check if time to commit, if so, run commit. * Check if time to commit, if so, run commit. Then run search if search
* Then run search if search timer is also set. * timer is also set.
*/ */
void checkRunCommitSearch() { void checkRunCommitSearch() {
if (commitIndex) { if (commitIndex) {
@ -446,8 +461,8 @@ public final class KeywordSearchIngestService implements IngestServiceAbstractFi
} }
/** /**
* CommitTimerAction to run by commitTimer * CommitTimerAction to run by commitTimer Sets a flag to indicate we are
* Sets a flag to indicate we are ready for commit * ready for commit
*/ */
private class CommitTimerAction implements ActionListener { private class CommitTimerAction implements ActionListener {
@ -461,8 +476,8 @@ public final class KeywordSearchIngestService implements IngestServiceAbstractFi
} }
/** /**
* SearchTimerAction to run by searchTimer * SearchTimerAction to run by searchTimer Sets a flag to indicate we are
* Sets a flag to indicate we are ready to search * ready to search
*/ */
private class SearchTimerAction implements ActionListener { private class SearchTimerAction implements ActionListener {
@ -495,42 +510,70 @@ public final class KeywordSearchIngestService implements IngestServiceAbstractFi
return indexed; return indexed;
} }
private void indexFile(AbstractFile aFile) { private void indexFile(AbstractFile aFile, boolean indexContent) {
//logger.log(Level.INFO, "Processing AbstractFile: " + abstractFile.getName()); //logger.log(Level.INFO, "Processing AbstractFile: " + abstractFile.getName());
boolean ingestibleFile = Ingester.isIngestible(aFile);
final long size = aFile.getSize(); FsContent fsContent = null;
//limit size of entire file, do not limit strings //check if alloc fs file or dir
if (size == 0 || (ingestibleFile && size > MAX_INDEX_SIZE)) { TskData.TSK_DB_FILES_TYPE_ENUM aType = aFile.getType();
if (aType.equals(TskData.TSK_DB_FILES_TYPE_ENUM.FS)) {
fsContent = (FsContent) aFile;
}
//if alloc fs file and not index content, or a dir, index meta data only
if (fsContent != null
&& (indexContent == false || fsContent.isDir())) {
try {
ingester.ingest(fsContent, false); //meta-data only
ingestStatus.put(aFile.getId(), IngestStatus.INGESTED_META);
} catch (IngesterException ex) {
ingestStatus.put(aFile.getId(), IngestStatus.SKIPPED); ingestStatus.put(aFile.getId(), IngestStatus.SKIPPED);
logger.log(Level.WARNING, "Unable to index meta-data for fsContent: " + fsContent.getId(), ex);
}
return; return;
} }
if (ingestibleFile == true) { boolean ingestibleFile = Ingester.isIngestible(aFile);
//we know it's an allocated file or dir (FsContent)
FsContent fileDir = (FsContent) aFile; final long size = aFile.getSize();
//if fs file, limit size of entire file, do not limit strings
if (fsContent != null && (size == 0 || (ingestibleFile && size > MAX_INDEX_SIZE))) {
//if fs file, index meta only, otherwise if unalloc, skip
try {
ingester.ingest(fsContent, false); //meta-data only
ingestStatus.put(aFile.getId(), IngestStatus.INGESTED_META);
} catch (IngesterException ex) {
ingestStatus.put(aFile.getId(), IngestStatus.SKIPPED);
logger.log(Level.WARNING, "Unable to index meta-data for fsContent: " + fsContent.getId(), ex);
}
return;
}
if (fsContent != null && ingestibleFile == true) {
//we know it's an allocated fs file (FsContent) with supported content
try { try {
//logger.log(Level.INFO, "indexing: " + fsContent.getName()); //logger.log(Level.INFO, "indexing: " + fsContent.getName());
ingester.ingest(fileDir); ingester.ingest(fsContent, true);
ingestStatus.put(fileDir.getId(), IngestStatus.INGESTED); ingestStatus.put(fsContent.getId(), IngestStatus.INGESTED);
} catch (IngesterException e) { } catch (IngesterException e) {
ingestStatus.put(fileDir.getId(), IngestStatus.SKIPPED); ingestStatus.put(fsContent.getId(), IngestStatus.SKIPPED);
//try to extract strings if not a dir //try to extract strings, if a file
if (fileDir.isFile() == true) { if (fsContent.isFile() == true) {
processNonIngestible(fileDir); processNonIngestible(fsContent);
} }
} catch (Exception e) { } catch (Exception e) {
ingestStatus.put(fileDir.getId(), IngestStatus.SKIPPED); ingestStatus.put(fsContent.getId(), IngestStatus.SKIPPED);
//try to extract strings if not a dir //try to extract strings if a file
if (fileDir.isFile() == true) { if (fsContent.isFile() == true) {
processNonIngestible(fileDir); processNonIngestible(fsContent);
} }
} }
} else { } else {
//unallocated or unsupported type by Solr //unallocated file or unsupported content type by Solr
processNonIngestible(aFile); processNonIngestible(aFile);
} }
} }
@ -547,10 +590,10 @@ public final class KeywordSearchIngestService implements IngestServiceAbstractFi
} }
/** /**
* Searcher responsible for searching the current index and writing results to blackboard * Searcher responsible for searching the current index and writing results
* and the inbox. Also, posts results to listeners as Ingest data events. * to blackboard and the inbox. Also, posts results to listeners as Ingest
* Searches entire index, and keeps track of only new results to report and save. * data events. Searches entire index, and keeps track of only new results
* Runs as a background thread. * to report and save. Runs as a background thread.
*/ */
private class Searcher extends SwingWorker<Object, Void> { private class Searcher extends SwingWorker<Object, Void> {
@ -574,7 +617,6 @@ public final class KeywordSearchIngestService implements IngestServiceAbstractFi
final String displayName = "Keyword Search" + (finalRun ? " - Finalizing" : ""); final String displayName = "Keyword Search" + (finalRun ? " - Finalizing" : "");
progress = ProgressHandleFactory.createHandle(displayName + (" (Pending)"), new Cancellable() { progress = ProgressHandleFactory.createHandle(displayName + (" (Pending)"), new Cancellable() {
@Override @Override
public boolean cancel() { public boolean cancel() {
logger.log(Level.INFO, "Cancelling the searcher by user."); logger.log(Level.INFO, "Cancelling the searcher by user.");
@ -833,14 +875,14 @@ public final class KeywordSearchIngestService implements IngestServiceAbstractFi
//without relying on done() method that is not guaranteed to run after background thread completes //without relying on done() method that is not guaranteed to run after background thread completes
//NEED to call this method always right before doInBackground() returns //NEED to call this method always right before doInBackground() returns
/** /**
* Performs the cleanup that needs to be done right AFTER doInBackground() returns * Performs the cleanup that needs to be done right AFTER
* without relying on done() method that is not guaranteed to run after background thread completes * doInBackground() returns without relying on done() method that is not
* REQUIRED to call this method always right before doInBackground() returns * guaranteed to run after background thread completes REQUIRED to call
* this method always right before doInBackground() returns
*/ */
private void finalizeSearcher() { private void finalizeSearcher() {
logger.log(Level.INFO, "Searcher finalizing"); logger.log(Level.INFO, "Searcher finalizing");
SwingUtilities.invokeLater(new Runnable() { SwingUtilities.invokeLater(new Runnable() {
@Override @Override
public void run() { public void run() {
progress.finish(); progress.finish();
@ -890,7 +932,9 @@ public final class KeywordSearchIngestService implements IngestServiceAbstractFi
/** /**
* Set the skip known files setting on the service * Set the skip known files setting on the service
* @param skip true if skip, otherwise, will process known files as well, as reported by HashDB service *
* @param skip true if skip, otherwise, will process known files as well, as
* reported by HashDB service
*/ */
void setSkipKnown(boolean skip) { void setSkipKnown(boolean skip) {
this.skipKnown = skip; this.skipKnown = skip;

View File

@ -124,7 +124,7 @@ public class Report {
tempdbconnect.executeStmt("DROP TABLE IF EXISTS report_hashset;"); tempdbconnect.executeStmt("DROP TABLE IF EXISTS report_hashset;");
tempdbconnect.executeStmt("DROP TABLE IF EXISTS report_hashname;"); tempdbconnect.executeStmt("DROP TABLE IF EXISTS report_hashname;");
tempdbconnect.executeStmt("DROP TABLE IF EXISTS report_hash;"); tempdbconnect.executeStmt("DROP TABLE IF EXISTS report_hash;");
String temp1 = "CREATE TABLE report_hashset AS SELECT value_text as hashset,blackboard_attributes.attribute_type_id, blackboard_attributes.artifact_id FROM blackboard_attributes WHERE attribute_type_id = " + BlackboardAttribute.ATTRIBUTE_TYPE.TSK_HASHSET_NAME.getTypeID() + ";"; String temp1 = "CREATE TABLE report_hashset AS SELECT value_text as hashset,blackboard_attributes.attribute_type_id, blackboard_attributes.artifact_id FROM blackboard_attributes WHERE attribute_type_id = " + BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME.getTypeID() + ";";
String temp5 = "CREATE TABLE report_hashname AS SELECT name, size, report_hashset.artifact_id from tsk_files,blackboard_artifacts, report_hashset WHERE blackboard_artifacts.artifact_id = report_hashset.artifact_id AND blackboard_artifacts.obj_id = tsk_files.obj_id;"; String temp5 = "CREATE TABLE report_hashname AS SELECT name, size, report_hashset.artifact_id from tsk_files,blackboard_artifacts, report_hashset WHERE blackboard_artifacts.artifact_id = report_hashset.artifact_id AND blackboard_artifacts.obj_id = tsk_files.obj_id;";
String temp6 = "CREATE TABLE report_hash AS SELECT hashset,size,name from report_hashset INNER JOIN report_hashname ON report_hashset.artifact_id=report_hashname.artifact_id;"; String temp6 = "CREATE TABLE report_hash AS SELECT hashset,size,name from report_hashset INNER JOIN report_hashname ON report_hashset.artifact_id=report_hashname.artifact_id;";
tempdbconnect.executeStmt(temp1); tempdbconnect.executeStmt(temp1);
@ -215,7 +215,7 @@ public class Report {
table.append("<table><thead><tr><th>").append("Folder").append("</th><th>From</th><th>To</th><th>Subject</th><th>Date/Time</th><th>Content</th><th>CC</th><th>BCC</th><th>Path</th></tr><tbody>"); table.append("<table><thead><tr><th>").append("Folder").append("</th><th>From</th><th>To</th><th>Subject</th><th>Date/Time</th><th>Content</th><th>CC</th><th>BCC</th><th>Path</th></tr><tbody>");
} }
table.append("<tr><td>").append(uniqueresults.getString("name")).append("</td>"); table.append("<tr><td>").append(uniqueresults.getString("name")).append("</td>");
table.append("<td>").append(uniqueresults.getString("receiver")).append("</td>").append("<td>").append(uniqueresults.getString("author")).append("<br />(").append(uniqueresults.getString("subject")).append(")").append("</td>"); table.append("<td>").append(uniqueresults.getString("receiver")).append("</td>").append("<td>").append(uniqueresults.getString("author")).append("</td><td>").append(uniqueresults.getString("subject")).append("</td>");
SimpleDateFormat sdf = new java.text.SimpleDateFormat("yyyy/MM/dd HH:mm:ss"); SimpleDateFormat sdf = new java.text.SimpleDateFormat("yyyy/MM/dd HH:mm:ss");
String value = sdf.format(new java.util.Date(uniqueresults.getLong("date") * 1000)); String value = sdf.format(new java.util.Date(uniqueresults.getLong("date") * 1000));
table.append("<td>").append(value).append("</td>"); table.append("<td>").append(value).append("</td>");

View File

@ -246,7 +246,7 @@ public class ReportHTML implements ReportModule {
StringBuilder nodeKeyword = new StringBuilder("<h3><a name=\"keyword\">Keyword Search Hits (").append(countKeyword).append(")</h3>"); StringBuilder nodeKeyword = new StringBuilder("<h3><a name=\"keyword\">Keyword Search Hits (").append(countKeyword).append(")</h3>");
StringBuilder nodeHash = new StringBuilder("<h3><a name=\"hash\">Hashset Hit (").append(countHash).append(")</h3>"); StringBuilder nodeHash = new StringBuilder("<h3><a name=\"hash\">Hashset Hit (").append(countHash).append(")</h3>");
StringBuilder nodeDevice = new StringBuilder("<h3><a name=\"device\">Attached Devices (").append(countHash).append(")</h3>").append(tableHeader).append("<th>Name</th><th>Serial #</th><th>Time</th></tr></thead><tbody>"); StringBuilder nodeDevice = new StringBuilder("<h3><a name=\"device\">Attached Devices (").append(countHash).append(")</h3>").append(tableHeader).append("<th>Name</th><th>Serial #</th><th>Time</th></tr></thead><tbody>");
StringBuilder nodeEmail = new StringBuilder("<h3><a name=\"email\">Email Messages (").append(countHash).append(")</h3>"); StringBuilder nodeEmail = new StringBuilder("<h3><a name=\"email\">Email Messages (").append(countEmail).append(")</h3>");
int alt = 0; int alt = 0;
String altRow = ""; String altRow = "";

View File

@ -69,7 +69,7 @@
<!-- This target will create a custom ZIP file for us. It first uses the general <!-- This target will create a custom ZIP file for us. It first uses the general
ZIP target and then opens it up and adds in any files that we want. This is where we customize the ZIP target and then opens it up and adds in any files that we want. This is where we customize the
version number. --> version number. -->
<target name="build-zip" depends="suite.build-zip"> <target name="build-zip" depends="suite.build-zip,findJRE">
<property name="nbdist.dir" value="dist"/> <property name="nbdist.dir" value="dist"/>
<property name="release.dir" value="${nbdist.dir}/${app.name}"/> <property name="release.dir" value="${nbdist.dir}/${app.name}"/>
@ -84,11 +84,17 @@
<copy file="${basedir}/LICENSE-2.0.txt" tofile="${zip-tmp}/${app.name}/LICENSE-2.0.txt"/> <copy file="${basedir}/LICENSE-2.0.txt" tofile="${zip-tmp}/${app.name}/LICENSE-2.0.txt"/>
<copy file="${basedir}/NEWS.txt" tofile="${zip-tmp}/${app.name}/NEWS.txt"/> <copy file="${basedir}/NEWS.txt" tofile="${zip-tmp}/${app.name}/NEWS.txt"/>
<copy file="${basedir}/KNOWN_ISSUES.txt" tofile="${zip-tmp}/${app.name}/KNOWN_ISSUES.txt"/> <copy file="${basedir}/KNOWN_ISSUES.txt" tofile="${zip-tmp}/${app.name}/KNOWN_ISSUES.txt"/>
<unzip src="${thirdparty.dir}/gstreamer/${os.family}/i386/0.10.7/gstreamer.zip" dest="${zip-tmp}/${app.name}/gstreamer"/>
<copy todir="${zip-tmp}/${app.name}/jre6">
<fileset dir="${env.JRE_HOME}"/>
</copy>
<copy file="${basedir}/branding_${app.name}/icon.ico" tofile="${zip-tmp}/${app.name}/icon.ico" overwrite="true"/>
<antcall target="copyExternalLibsToZip"></antcall> <antcall target="copyExternalLibsToZip"></antcall>
<propertyfile <propertyfile
file="${zip-tmp}/${app.name}/etc/${app.name}.conf"> file="${zip-tmp}/${app.name}/etc/${app.name}.conf">
<entry key="default_options" value="&quot;--branding ${app.name} -J-Xms24m -J-Xmx1024m&quot;" /> <entry key="default_options" value="&quot;--branding ${app.name} -J-Xms24m -J-Xmx1024m&quot;" />
<entry key="jdkhome" value="&quot;jre6&quot;" />
</propertyfile> </propertyfile>
<!-- step (4) zip again, but with the version numbers in the dir --> <!-- step (4) zip again, but with the version numbers in the dir -->
@ -199,22 +205,8 @@
</target> </target>
<target name="build-installer-dir" depends="findJRE,build-zip" > <target name="build-installer-dir" depends="build-zip" >
<unzip src="${nbdist.dir}/${app.name}-${app.version}.zip" dest="${nbdist.dir}/${app.name}-installer"/> <unzip src="${nbdist.dir}/${app.name}-${app.version}.zip" dest="${nbdist.dir}/${app.name}-installer"/>
<antcall target="update-jdkhome" />
<delete file="${nbdist.dir}/${app.name}-${app.version}.zip"/>
<unzip src="${thirdparty.dir}/gstreamer/${os.family}/i386/0.10.7/gstreamer.zip" dest="${nbdist.dir}/${app.name}-installer/gstreamer"/>
<copy todir="${nbdist.dir}/${app.name}-installer/jre6">
<fileset dir="${env.JRE_HOME}"/>
</copy>
<copy file="${basedir}/branding_${app.name}/icon.ico" tofile="${nbdist.dir}/${app.name}-installer/icon.ico" overwrite="true"/>
</target>
<target name="update-jdkhome" depends="getProps">
<propertyfile
file="${nbdist.dir}/${app.name}-installer/etc/${app.name}.conf">
<entry key="jdkhome" value="&quot;jre6&quot;" />
</propertyfile>
</target> </target>
<target name="build-installer" depends="build-installer-dir"> <target name="build-installer" depends="build-installer-dir">

View File

@ -1,18 +1,224 @@
/*! \page design_page General Design /*! \page design_page General Design
\section design_overview Overview \section design_overview Overview
This section outlines Autopsy design from the typical analysis work flow perspective.
A typical Autopsy work flow consists of the following steps:
- Wizards are used to create case and images (org.sleuthkit.autopsy.casemodule),
- TSK database is created,
- Ingest modules are run (org.sleuthkit.autopsy.ingest),
- Ingest modules post results to the blackboard and ingest inbox,
- Directory tree displays blackboard contents,
- Data is encapsulated into nodes and passed to table and content viewers,
- Reports can be generated.
\subsection design_overview_sub1 Creating a case
The first step in Autopsy work flow is creating a case.
User is guided with the case creation wizard (invoked by org.sleuthkit.autopsy.casemodule.NewCaseWizardAction) to enter the case name, base directory and optional case information.
The base directory is the directory where all files associated with the case are stored.
The directory is self contained (besides referenced images files, which are stored separately) and could be later moved to another location or another machine by the user (along with the linked image files).
The case directory contains:
- a newly created, empty case SQLite TSK database, autopsy.db,
- a case XML configuration file, named after the case name and .aut extension,
- directory structure for temporary files, case log files, cache files, and module specific files.
An example of module-specific directory is keywordsearch directory, created by the Keyword Search module.
After case is created, currentCase singleton member variable in Case class is updated.
It contains access to higher-level case information stored in the case XML file.
org.sleuthkit.autopsy.casemodule.Case class also contains support for case events; events are sent to registered listeners when new case is created, opened, changed or closed.
When a case is changed or created, also updated is org.sleuthkit.datamodel.SleuthkitCase handle to the TSK database.
SleuthkitCase contains a handle to org.sleuthkit.datamodel.SleuthkitJNI object, through which native sleuthkit API can be accessed.
\subsection design_overview_sub2 Adding an image
After case in created, user is guided to add an image to the case using the wizard invoked by org.sleuthkit.autopsy.casemodule.AddImageAction.
org.sleuthkit.autopsy.casemodule.AddImageWizardIterator instantiates and manages the wizard panels (there are currently 4 of them).
User enters image information in the first panel org.sleuthkit.autopsy.casemodule.AddImageWizardPanel1 (image path, image timezone and additional options).
In the subsequent panel, org.sleuthkit.autopsy.casemodule.AddImageWizardPanel2, a background worker thread is spawned in AddImgTask.
Work is delegated to org.sleuthkit.datamodel.AddImageProcess, which calls native sleuthkit methods via SleuthkitJNI to: initialize, run and commit the new image.
The entire process is enclosed within a database transaction and the transaction is not committed until user finalizes the process in org.sleuthkit.autopsy.casemodule.AddImageWizardPanel3.
User can also interrupt the ongoing add image process, which results in a stop call in sleuthkit. The call sets a special stop flag. The flag is periodically checked by sleutkit code and,
if set, it will result in breaking out of any current processing methods and loops and return from sleuthkit.
The worker thread in Autopsy will terminate and revert will be called to back out of the current transaction.
Actual work in the add image process is done in the native sleuthkit library.
The library reads the image and populates the TSK SQLite database with the image meta-data.
Rows are inserted into the following tables:
- tsk_objects (all content objects are given their unique object IDs and are associated with parents),
- tsk_file_layout (for file block information, such as for "special" files representing unallocated data),
- tsk_image_info, tsk_image_names (to store image info, such as local image paths, block size and time zone),
- tsk_vs_info (to store volume system information),
- tsk_vs_parts (to store volume information),
- tsk_fs_info (to store file system information),
- tsk_files (to store all files and directories discovered and their attributes).
After image has been processed successfully and after the user confirmation, the transaction is committed to the database.
Errors from processing the image in sleuthkit are propagated using org.sleuthkit.datamodel.TskCoreException and org.sleuthkit.datamodel.TskDataException java exceptions.
The errors are logged and can be reviewed by the user form the wizard.
org.sleuthkit.datamodel.TskCoreException is handled by the wizard as a critical, unrecoverable error condition with TSK core, resulting in the interruption of the add image process.
org.sleuthkit.datamodel.TskDataException, pertaining to an error associated with the data itself (such as invalid volume offset), is treated as a warning - the process still continues because there are likely data image that can be still read.
\subsection design_overview_sub3 Concurrency
Autopsy is a highly multi-threaded application; besides threads associated with the GUI, event dispatching and Netbeans RCP, the application uses threads to support concurrent user-driven processes.
For instance, multiple image ingest services can be ran at the same time. In addition, user can add another image to the database while ingest is running on previously added images.
During the add image process, a database lock is acquired using org.sleuthkit.autopsy.casemodule.SleuthkitCase.dbWriteLock() to ensure exclusive access to the database resource.
Once the lock is acquired by the add image process, other Autopsy threads trying to access the database as acquire the lock (such as ingest modules) will block for the duration of add image process.
The database lock is implemented with SQLite database in mind, which does not support concurrent writes. The database lock is released with org.sleuthkit.autopsy.casemodule.SleuthkitCase.dbWriteUnlock() when the add image process has ended.
The database lock is used for all database access methods in org.sleuthkit.autopsy.casemodule.SleuthkitCase.
\subsection design_overview_sub4 Running ingest modules
User has an option to run ingest modules after the image has been added using the wizard, and, optionally,
at any time ingest modules can be run or re-run.
Ingest modules (also referred as ingest services) are designed as plugins that are separate from Autopsy core.
Ingest modules can be added to an existing Autopsy installation as jar files and they will be automatically recognized next time Autopsy starts.
Every module generally has its own specific role. The two main use cases for ingest modules are:
- to extract information from the image and write result to blackboard,
- to analyze data already in blackboard and add more information to it.
There may also be special-purpose ingest modules that run early in the ingest pipe-line. Results posted by such modules can useful to subsequent modules.
One example of such module is Hash DB module, which determines which files are known; known files are generally treated differently.
For instance, processing of known files can be skipped by subsequent modules in the pipeline (if chosen so), for performance reasons.
Autopsy provides an ingest module framework in org.sleuthkit.autopsy.ingest package, located in a separate module.
The framework provides interfaces every ingest module needs to implement:
* org.sleuthkit.autopsy.ingest.IngestServiceImage (for modules that are interested in the image as a whole, or picking only specific data from the image of interest)
* org.sleuthkit.autopsy.ingest.IngestServiceAbstractFile (for modules that need to process every file).
The interfaces define methods to initialize, process passed in data, configure the ingest service, query the service state and finalize the service.
The framework also contains classes:
- org.sleuthkit.autopsy.ingest.IngestManager, the ingest manager, responsible for discovery of ingest modules, enqueuing work to the modules, starting and stopping the ingest pipeline,
propagating messages sent from the ingest modules to other Autopsy components.
- org.sleuthkit.autopsy.ingest.IngestManagerProxy, a facility used by the modules to communicate with the manager,
- additional classes to support threading, sending messages, ingest monitoring, ingest cancellation, progress bars,
- a user interface component (Ingest Inbox) used to display interesting messages posted by ingest modules to the user,
To implement an ingest module it is required to implement one of the interfaces (for file or image ingest)
and to have the module register itself using Netbeans Lookup infrastructure in layer.xml file.
Please refer to ingest.dox, org.sleuthkit.autopsy.ingest package API and org.sleuthkit.autopsy.ingest.example examples for more details on implementing custom ingest modules.
Most ingest modules typically require configuration before they are executed.
The configuration methods are defined in the ingest modules interfaces.
Module configuration is decentralized and module-specific; every module maintains its
own configuration state and is responsible for implementing its own JPanels to render
and present the configuration to the user. There are method hooks defined in the ingest service interface that are used to hint the module when the configuration should be preserved.
Ingest modules run in background threads. There is a single background thread for file-level ingest modules, within which every file ingest module runs series for every file.
Image ingest modules run each in their own thread and thus can run in parallel (TODO we will change this in the future for performance reasons, and support image ingest module dependencies).
Every ingest thread is presented with a progress bar and can be cancelled by a user, or by the framework, in case of a critical event (such as Autopsy is terminating, or a system error).
Ingest module can also implement its own internal threads for any special-purpose processing that can occur in parallel.
However, the module is then responsible for creating, managing and tearing down the internal threads.
An example of a module that maintains its own threads is the KeywordSearch module.
\subsection design_overview_sub5 Ingest modules posting results
Ingest services, when running, provide real-time updates to the user
by periodically posting data results and messages to registered components.
The timing as to when a service posts results data is module-implementation-specific.
In a simple case, service may post new data as soon as the data is available
- the case for simple services that take a relatively short amount of time to execute and new data is expected
to arrive in the order of seconds.
Another possibility is to post data in fixed time-intervals (e.g. for a service that takes minutes to produce results
and for a service that maintains internal threads to perform work).
There exist a global update setting that specifies maximum time interval for the service to post data.
User may adjust the interval for more frequent, real-time updates. Services that post data in periodic intervals should post their data according to this setting.
The setting is retrieved by the module using getUpdateFrequency() method in org.sleuthkit.autopsy.ingest.IngestManagerProxy class.
Data events registration and posting data.
When an ingest service produces data, it then writes it to the blackboard (as blackboard artifacts and associated attributes).
Service should notify listeners of new data available periodically by invoking fireServiceDataEvent() method in org.sleuthkit.autopsy.ingest.IngestManagerProxy class.
The method accepts org.sleuthkit.autopsy.ingest.ServiceDataEvent parameter.
The parameter wraps a collection of blackboard artifacts and their associated attributes that are to be reported as the new data to listeners.
Passing the data as part of the event reduces memory footprint and decreases number of garbage collections
of the blackboard artifacts and attributes objects (the objects are expected to be reused by the data event listeners).
Service name and artifact type for the collection of artifacts is also passed in as as part of the event.
The artifacts passed in a single event should be of the same type, which is enforced by the org.sleuthkit.autopsy.ingest.ServiceDataEvent constructor.
If a service has new data, but the service implementation does not include new artifact tracking, it is possible to pass only the service name and artifact type in the event.
The event listener may choose to perform a blackboard query for the artifact type to query all data of that type currently stored in the blackboard, including the new data.
Service event listeners need to register themselves with the org.sleuthkit.autopsy.ingest.IngestManager directly, using static addPropertyChangeListener() method.
At the end of the ingest, org.sleuthkit.autopsy.ingest.IngestManager itself will notify all listeners of new data being available in the blackboard.
This ensures the listeners receive a new data notification, in case some of the modules fail to report availability of new data.
However, ingest module developers are encouraged to generate new data events in order to provide the real-time feedback to the user.
Ingest messages registration and posting
In addition to data events, ingest services should send ingest messages about interesting events.
Examples of such events include service status (started, stopped) or information about new data.
The messages include the source service, message subject, message details, unique message id (in the context of the originating service) and a uniqueness attribute, used to group similar messages together and to determine the overall importance priority) of the message.
A message group with a higher number of aggregate messages with the same uniqueness is considered a lower priority.
Ingest messages have different types: there are info messages, warning messages, error messages and data messages.
The data messages contain encapsulated blackboard artifacts and attributes. The passed in data is used by the ingest inbox GUI widget to navigate to the artifact view in the directory tree, if requested by the user.
Ingest message API is defined in org.sleuthkit.autopsy.ingest.IngestMessage class. The class also contains factory methods to create new messages.
Messages are posted using org.sleuthkit.autopsy.ingest.IngestManagerProxy postMessage() method, which accepts a message created using of the factory methods.
The recipient of the ingest messages is the Ingest Inbox viewer widget component, from the org.sleuthkit.autopsy.ingest.IngestManager package.
\subsection design_overview_sub6 Result viewers (directory tree, table viewers, content viewers)
The directory tree result viewer (in the left-hand panel of the Autopsy viewer) is the core results viewer for the results saved during the ingest process.
The component is by default registered as an ingest message listener with the ingest manager.
When Autopsy starts, the viewer queries the blackboard data and populates the UI.
During ingest, the viewer responds to data events by refreshing the data nodes corresponding to the artifact type in the data event.
When ingest is completed, the viewer responds to the final ingest data event generated by the ingest manager,
and performs a final refresh of all data nodes.
Data is encapsulated in nodes org.openide.nodes.Node before it is displayed in the UI.
A node is an abstraction for a displayable data unit.
The nodes contain property sheets to store data and are organized in a parent-child hierarchy.
The hierarchy is used to visually represent the data and to trigger child view update whenever the parent node is selected by the user.
Node child factories are invoked by the Netbeans framework at the time of parent node selection to create or refresh the child node view.
Once a node is selected, its property sheet is rendered in the default table result viewer in the top-right part of the Autopsy UI.
Nodes can also be registered with content viewer (bottom-right part of the Autopsy UI).
Nodes use the node lookup infrastructure org.openide.util.Lookup to register their content viewer capabilities.
When a new node is selected, org.sleuthkit.autopsy.corecomponents.DataContentTopComponent queries registered data content viewers to determine support for the given node content.
The specific content viewers query the node lookup to determine the content capability match and return a number ranking the degree of the viewer support for the node type.
Based on return values of isSupported() and isPreferred() methods, the org.sleuthkit.autopsy.corecomponents.DataContentTopComponent enables or disables content viewers and selects a default active viewer for the node type.
\subsection design_overview_sub7 Report generation
After ingest is run, user can generate reports.
There are several types of reports implemented as submodules that are shipped with Autopsy core: generic html, xml and Excel reports.
Each reporting submodule implements org.sleuthkit.autopsy.report.ReportModule interface and registers itself in layer.xml
Reporting submodule typically interacts with 3 components:
- org.sleuthkit.autopsy.report.ReportConfiguration - to read current reporting configuration set by the user,
- Blackboard API in org.sleuthkit.datamodel.SleuthkitCase class - to traverse and read blackboard artifacts and attributes,
- an API (possibly external/thirdparty API) to convert blackboard artifacts data structures to the desired reporting format.
Autopsy reporting module is present in org.sleuthkit.autopsy.report package.
Please refer to report.dox and org.sleuthkit.autopsy.report package API documentation for more details on how to implement a custom reporting submodule.
Talk about the various pieces and how things are working behind the scenes (in order of anlaysis).
- Wizards are used to create case and images (all from org.sleuthkit.autopsy.casemodule)
- DB is created
- Ingest modules are run (org.sleuthkit.autopsy.ingest.IngestManager)
- Ingest modules post results to the blackboard and inbox
- Tree displays blackboard contents
- Data is encapsulated into nodes and passed to table and content viewers
- Reports can be generated
\subsection design_overview_sub Sub-Section
This was added for reference.
*/ */

View File

@ -85,9 +85,11 @@ public class ThunderbirdMboxFileIngestService implements IngestServiceAbstractFi
} }
try { try {
byte[] t = new byte[(int) 128]; byte[] t = new byte[64];
int byteRead = fsContent.read(t, 0, 128); if(fsContent.getSize() > 64) {
int byteRead = fsContent.read(t, 0, 64);
isMbox = mbox.isValidMimeTypeMbox(t); isMbox = mbox.isValidMimeTypeMbox(t);
}
} catch (TskException ex) { } catch (TskException ex) {
Logger.getLogger(ThunderbirdMboxFileIngestService.class.getName()).log(Level.WARNING, null, ex); Logger.getLogger(ThunderbirdMboxFileIngestService.class.getName()).log(Level.WARNING, null, ex);
} }