diff --git a/Case/src/org/sleuthkit/autopsy/casemodule/AddImageErrorsDialog.java b/Case/src/org/sleuthkit/autopsy/casemodule/AddImageErrorsDialog.java index 8f24771481..cf6d30f504 100644 --- a/Case/src/org/sleuthkit/autopsy/casemodule/AddImageErrorsDialog.java +++ b/Case/src/org/sleuthkit/autopsy/casemodule/AddImageErrorsDialog.java @@ -111,47 +111,7 @@ public class AddImageErrorsDialog extends javax.swing.JDialog { errorsText.setText(errors); } - /** - * @param args the command line arguments - */ - public static void main(String args[]) { - /* Set the Nimbus look and feel */ - // - /* If Nimbus (introduced in Java SE 6) is not available, stay with the default look and feel. - * For details see http://download.oracle.com/javase/tutorial/uiswing/lookandfeel/plaf.html - */ - try { - for (javax.swing.UIManager.LookAndFeelInfo info : javax.swing.UIManager.getInstalledLookAndFeels()) { - if ("Nimbus".equals(info.getName())) { - javax.swing.UIManager.setLookAndFeel(info.getClassName()); - break; - } - } - } catch (ClassNotFoundException ex) { - java.util.logging.Logger.getLogger(AddImageErrorsDialog.class.getName()).log(java.util.logging.Level.SEVERE, null, ex); - } catch (InstantiationException ex) { - java.util.logging.Logger.getLogger(AddImageErrorsDialog.class.getName()).log(java.util.logging.Level.SEVERE, null, ex); - } catch (IllegalAccessException ex) { - java.util.logging.Logger.getLogger(AddImageErrorsDialog.class.getName()).log(java.util.logging.Level.SEVERE, null, ex); - } catch (javax.swing.UnsupportedLookAndFeelException ex) { - java.util.logging.Logger.getLogger(AddImageErrorsDialog.class.getName()).log(java.util.logging.Level.SEVERE, null, ex); - } - // - /* Create and display the dialog */ - java.awt.EventQueue.invokeLater(new Runnable() { - public void run() { - AddImageErrorsDialog dialog = new AddImageErrorsDialog(new javax.swing.JFrame(), true); - dialog.addWindowListener(new java.awt.event.WindowAdapter() { - @Override - public void windowClosing(java.awt.event.WindowEvent e) { - System.exit(0); - } - }); - dialog.setVisible(true); - } - }); - } // Variables declaration - do not modify//GEN-BEGIN:variables private javax.swing.JButton closeButton; private javax.swing.JButton copyButton; diff --git a/Case/src/org/sleuthkit/autopsy/casemodule/AddImageVisualPanel2.java b/Case/src/org/sleuthkit/autopsy/casemodule/AddImageVisualPanel2.java index 244fcaafaf..b952e13173 100644 --- a/Case/src/org/sleuthkit/autopsy/casemodule/AddImageVisualPanel2.java +++ b/Case/src/org/sleuthkit/autopsy/casemodule/AddImageVisualPanel2.java @@ -99,6 +99,7 @@ final class AddImageVisualPanel2 extends JPanel { void setErrors(final String errors, boolean critical) { + crDbProgressBar.setValue(100); //always invoked when process completed if (critical) { progressLabel.setText("*Failed to add image (critical errors encountered). Click below to view the Add Image Log."); } diff --git a/DirectoryTree/src/org/sleuthkit/autopsy/directorytree/DataResultFilterNode.java b/DirectoryTree/src/org/sleuthkit/autopsy/directorytree/DataResultFilterNode.java index 22a174eeb3..2bfef28bcb 100644 --- a/DirectoryTree/src/org/sleuthkit/autopsy/directorytree/DataResultFilterNode.java +++ b/DirectoryTree/src/org/sleuthkit/autopsy/directorytree/DataResultFilterNode.java @@ -147,6 +147,7 @@ public class DataResultFilterNode extends FilterNode{ public List visit(ImageNode img) { List actions = new ArrayList(); actions.add(new NewWindowViewAction("View in New Window", img)); + actions.add(new FileSearchAction("Open File Search")); actions.addAll(ShowDetailActionVisitor.getActions(img.getLookup().lookup(Content.class))); return actions; } diff --git a/DirectoryTree/src/org/sleuthkit/autopsy/directorytree/DirectoryTreeFilterNode.java b/DirectoryTree/src/org/sleuthkit/autopsy/directorytree/DirectoryTreeFilterNode.java index b1ae1ef70c..db07d38048 100644 --- a/DirectoryTree/src/org/sleuthkit/autopsy/directorytree/DirectoryTreeFilterNode.java +++ b/DirectoryTree/src/org/sleuthkit/autopsy/directorytree/DirectoryTreeFilterNode.java @@ -71,6 +71,7 @@ class DirectoryTreeFilterNode extends FilterNode { } final Image img = this.getLookup().lookup(Image.class); if (img != null) { + actions.add(new FileSearchAction("Open File Search")); actions.add(new AbstractAction("Restart Ingest Modules") { @Override diff --git a/DirectoryTree/src/org/sleuthkit/autopsy/directorytree/FileSearchAction.java b/DirectoryTree/src/org/sleuthkit/autopsy/directorytree/FileSearchAction.java new file mode 100644 index 0000000000..b223aa76a7 --- /dev/null +++ b/DirectoryTree/src/org/sleuthkit/autopsy/directorytree/FileSearchAction.java @@ -0,0 +1,40 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2011 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.directorytree; + +import java.awt.event.ActionEvent; +import javax.swing.AbstractAction; +import org.openide.util.Lookup; + +/** + * + * @author dfickling + */ +public class FileSearchAction extends AbstractAction{ + + public FileSearchAction(String title) { + super(title); + } + @Override + public void actionPerformed(ActionEvent e) { + FileSearchProvider searcher = Lookup.getDefault().lookup(FileSearchProvider.class); + searcher.showDialog(); + } + +} diff --git a/DirectoryTree/src/org/sleuthkit/autopsy/directorytree/FileSearchProvider.java b/DirectoryTree/src/org/sleuthkit/autopsy/directorytree/FileSearchProvider.java new file mode 100644 index 0000000000..5f6dc113ba --- /dev/null +++ b/DirectoryTree/src/org/sleuthkit/autopsy/directorytree/FileSearchProvider.java @@ -0,0 +1,26 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2011 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.directorytree; + +/** + * Lookup interface for File Search (to deal with circular deps) + */ +public interface FileSearchProvider { + public void showDialog(); +} diff --git a/FileSearch/src/org/sleuthkit/autopsy/filesearch/FileSearchAction.java b/FileSearch/src/org/sleuthkit/autopsy/filesearch/FileSearchAction.java index cbe1540fd4..248ae85309 100644 --- a/FileSearch/src/org/sleuthkit/autopsy/filesearch/FileSearchAction.java +++ b/FileSearch/src/org/sleuthkit/autopsy/filesearch/FileSearchAction.java @@ -24,8 +24,11 @@ import java.beans.PropertyChangeListener; import org.openide.util.HelpCtx; import org.openide.util.actions.CallableSystemAction; import org.sleuthkit.autopsy.casemodule.Case; +import org.sleuthkit.autopsy.directorytree.FileSearchProvider; -public final class FileSearchAction extends CallableSystemAction { +public final class FileSearchAction extends CallableSystemAction implements FileSearchProvider{ + + private static FileSearchAction instance = null; FileSearchAction() { super(); @@ -43,6 +46,13 @@ public final class FileSearchAction extends CallableSystemAction { }); } + public static FileSearchAction getDefault() { + if(instance == null){ + instance = new FileSearchAction(); + } + return instance; + } + @Override public void actionPerformed(ActionEvent e) { @@ -68,4 +78,9 @@ public final class FileSearchAction extends CallableSystemAction { protected boolean asynchronous() { return false; } + + @Override + public void showDialog() { + performAction(); + } } diff --git a/FileSearch/src/org/sleuthkit/autopsy/filesearch/layer.xml b/FileSearch/src/org/sleuthkit/autopsy/filesearch/layer.xml index c5fa2b2bdf..2982f0339d 100644 --- a/FileSearch/src/org/sleuthkit/autopsy/filesearch/layer.xml +++ b/FileSearch/src/org/sleuthkit/autopsy/filesearch/layer.xml @@ -13,6 +13,11 @@ + + + + + diff --git a/HashDatabase/src/org/sleuthkit/autopsy/hashdatabase/Bundle.properties b/HashDatabase/src/org/sleuthkit/autopsy/hashdatabase/Bundle.properties index 800c622f5c..580b129879 100644 --- a/HashDatabase/src/org/sleuthkit/autopsy/hashdatabase/Bundle.properties +++ b/HashDatabase/src/org/sleuthkit/autopsy/hashdatabase/Bundle.properties @@ -7,7 +7,7 @@ HashDbSimplePanel.knownValLabel.text=- HashDbSimplePanel.notableValLabel.text=- HashDbMgmtPanel.addNotableButton.text=Add Known Bad Database HashDbMgmtPanel.removeNotableButton.text=Remove Selected -HashDbSimplePanel.jLabel1.text=Known Bad Database(s): +HashDbSimplePanel.jLabel1.text=Select known bad databases to use during ingest: HashDbSimplePanel.jLabel2.text=NSRL Database: HashDbMgmtPanel.nsrlNameLabel.text=Not Configured HashDbMgmtPanel.setNSRLButton.text=Change diff --git a/HashDatabase/src/org/sleuthkit/autopsy/hashdatabase/HashDbIngestService.java b/HashDatabase/src/org/sleuthkit/autopsy/hashdatabase/HashDbIngestService.java index a847bcc057..02b84b5c29 100644 --- a/HashDatabase/src/org/sleuthkit/autopsy/hashdatabase/HashDbIngestService.java +++ b/HashDatabase/src/org/sleuthkit/autopsy/hashdatabase/HashDbIngestService.java @@ -293,7 +293,8 @@ public class HashDbIngestService implements IngestServiceAbstractFile { ProcessResult ret = ProcessResult.UNKNOWN; boolean processFile = true; - if (fsContent.getKnown().equals(TskData.FileKnown.BAD)) { + if (fsContent.getSize() == 0 + || fsContent.getKnown().equals(TskData.FileKnown.BAD)) { ret = ProcessResult.OK; processFile = false; } diff --git a/HashDatabase/src/org/sleuthkit/autopsy/hashdatabase/HashDbSimplePanel.form b/HashDatabase/src/org/sleuthkit/autopsy/hashdatabase/HashDbSimplePanel.form index ac8c4ab233..eddde087ed 100644 --- a/HashDatabase/src/org/sleuthkit/autopsy/hashdatabase/HashDbSimplePanel.form +++ b/HashDatabase/src/org/sleuthkit/autopsy/hashdatabase/HashDbSimplePanel.form @@ -1,4 +1,4 @@ - +
@@ -16,18 +16,16 @@ - + + + - - - - - - + + + + - - diff --git a/HashDatabase/src/org/sleuthkit/autopsy/hashdatabase/HashDbSimplePanel.java b/HashDatabase/src/org/sleuthkit/autopsy/hashdatabase/HashDbSimplePanel.java index 706094616c..a8ec3ddd72 100644 --- a/HashDatabase/src/org/sleuthkit/autopsy/hashdatabase/HashDbSimplePanel.java +++ b/HashDatabase/src/org/sleuthkit/autopsy/hashdatabase/HashDbSimplePanel.java @@ -119,16 +119,14 @@ public class HashDbSimplePanel extends javax.swing.JPanel { this.setLayout(layout); layout.setHorizontalGroup( layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) + .addComponent(jScrollPane1, javax.swing.GroupLayout.Alignment.TRAILING, javax.swing.GroupLayout.PREFERRED_SIZE, 0, Short.MAX_VALUE) + .addComponent(jScrollPane2, javax.swing.GroupLayout.PREFERRED_SIZE, 0, Short.MAX_VALUE) .addGroup(layout.createSequentialGroup() .addContainerGap() - .addComponent(jLabel2) - .addContainerGap(51, Short.MAX_VALUE)) - .addGroup(layout.createSequentialGroup() - .addContainerGap() - .addComponent(jLabel1) - .addContainerGap(javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)) - .addComponent(jScrollPane1, javax.swing.GroupLayout.Alignment.TRAILING, javax.swing.GroupLayout.DEFAULT_SIZE, 139, Short.MAX_VALUE) - .addComponent(jScrollPane2, javax.swing.GroupLayout.DEFAULT_SIZE, 139, Short.MAX_VALUE) + .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) + .addComponent(jLabel2) + .addComponent(jLabel1)) + .addContainerGap()) ); layout.setVerticalGroup( layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) diff --git a/HashDatabase/src/org/sleuthkit/autopsy/hashdatabase/IndexStatus.java b/HashDatabase/src/org/sleuthkit/autopsy/hashdatabase/IndexStatus.java index 2500f21ee4..b29fd237e9 100644 --- a/HashDatabase/src/org/sleuthkit/autopsy/hashdatabase/IndexStatus.java +++ b/HashDatabase/src/org/sleuthkit/autopsy/hashdatabase/IndexStatus.java @@ -28,27 +28,27 @@ enum IndexStatus { /** * The index and database both exist, and the index is older. */ - INDEX_OUTDATED("Index is older than database."), + INDEX_OUTDATED("Index is older than database"), /** * The index and database both exist, and the index is not older. */ - INDEX_CURRENT("Database has index."), + INDEX_CURRENT("Database and index exist"), /** * The index exists but the database does not. */ - NO_DB("Only an index exists."), + NO_DB("Index exists (no database)"), /** * The database exists but the index does not. */ - NO_INDEX("Database does not have index."), + NO_INDEX("Index does not exist"), /** * Neither the index nor the database exists. */ - NONE("No index or database."), + NONE("No index or database"), /** * The index is currently being generated */ - INDEXING("The index is currently being generated"); + INDEXING("Index is currently being generated"); private String message; diff --git a/Ingest/src/org/sleuthkit/autopsy/ingest/GetAllFilesContentVisitor.java b/Ingest/src/org/sleuthkit/autopsy/ingest/GetAllFilesContentVisitor.java index 1f751d2cf0..2a9c912991 100644 --- a/Ingest/src/org/sleuthkit/autopsy/ingest/GetAllFilesContentVisitor.java +++ b/Ingest/src/org/sleuthkit/autopsy/ingest/GetAllFilesContentVisitor.java @@ -71,7 +71,7 @@ class GetAllFilesContentVisitor extends GetFilesContentVisitor { StringBuilder queryB = new StringBuilder(); queryB.append("SELECT * FROM tsk_files WHERE ( (fs_obj_id = ").append(fs.getId()); - queryB.append(") OR (fs_obj_id = NULL) ) AND (size > 0)"); + queryB.append(") OR (fs_obj_id = NULL) )"); queryB.append(" AND ( (meta_type = ").append(TskData.TSK_FS_META_TYPE_ENUM.TSK_FS_META_TYPE_REG.getMetaType()); queryB.append(") OR (meta_type = ").append(TskData.TSK_FS_META_TYPE_ENUM.TSK_FS_META_TYPE_DIR.getMetaType()); queryB.append( " AND (name != '.') AND (name != '..')"); diff --git a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/ExtractedContentViewer.java b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/ExtractedContentViewer.java index d44fafceff..7cb62bbfd3 100644 --- a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/ExtractedContentViewer.java +++ b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/ExtractedContentViewer.java @@ -365,6 +365,8 @@ public class ExtractedContentViewer implements DataContentViewer { return false; } + if (content.getSize() == 0) + return false; final Server solrServer = KeywordSearch.getServer(); @@ -375,8 +377,6 @@ public class ExtractedContentViewer implements DataContentViewer { final long contentID = content.getId(); - - try { return solrServer.queryIsIndexed(contentID); } catch (NoOpenCoreException ex) { diff --git a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/Ingester.java b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/Ingester.java index 883a6830b8..d95a8f6649 100755 --- a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/Ingester.java +++ b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/Ingester.java @@ -139,13 +139,15 @@ public class Ingester { /** * Sends a file to Solr to have its content extracted and added to the * index. commit() should be called once you're done ingesting files. + * If the file is a directory or ingestContent is set to false, the file name is indexed only. * - * @param f File to ingest + * @param fsContent File to ingest + * @param ingestContent if true, index the file and the content, otherwise indesx metadata only * @throws IngesterException if there was an error processing a specific * file, but the Solr server is probably fine. */ - void ingest(FsContent fsContent) throws IngesterException { - if (fsContent.isDir() ) { + void ingest(FsContent fsContent, boolean ingestContent) throws IngesterException { + if (fsContent.isDir() || ingestContent == false ) { ingest(new NullContentStream(fsContent), getContentFields(fsContent), 0); } else { @@ -438,25 +440,20 @@ public class Ingester { } /** - * Determine if the file is ingestible/indexable by keyword search + * Determine if the file content is ingestible/indexable by keyword search * Ingestible abstract file is either a directory, or an allocated file with supported extensions. * Note: currently only checks by extension and abstract type, it does not check actual file content. * @param aFile * @return true if it is ingestible, false otherwise */ static boolean isIngestible(AbstractFile aFile) { - boolean isIngestible = false; - TSK_DB_FILES_TYPE_ENUM aType = aFile.getType(); - if (aType.equals(TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS) - || aType.equals(TSK_DB_FILES_TYPE_ENUM.UNUSED_BLOCKS)) - return isIngestible; + if (! aType.equals(TSK_DB_FILES_TYPE_ENUM.FS) ) + return false; FsContent fsContent = (FsContent) aFile; - if (fsContent.isDir()) - //we index dir name, not content - return true; + boolean isIngestible = false; final String fileName = fsContent.getName(); for (final String ext : ingestibleExtensions) { if (fileName.toLowerCase().endsWith(ext)) { diff --git a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/Keyword.java b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/Keyword.java index ba3b0182fe..99524a700f 100644 --- a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/Keyword.java +++ b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/Keyword.java @@ -56,6 +56,13 @@ public class Keyword { return isLiteral; } + @Override + public String toString() { + return "Keyword{" + "query=" + query + ", isLiteral=" + isLiteral + ", keywordType=" + keywordType + '}'; + } + + + @Override public boolean equals(Object obj) { if (obj == null) { diff --git a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchConfigurationPanel2.form b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchConfigurationPanel2.form index dcf2fe6e1d..39daf044ad 100644 --- a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchConfigurationPanel2.form +++ b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchConfigurationPanel2.form @@ -22,12 +22,12 @@ - - + + - + @@ -74,6 +74,9 @@ + + + diff --git a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchConfigurationPanel2.java b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchConfigurationPanel2.java index a97b1c635d..859c80e3b3 100644 --- a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchConfigurationPanel2.java +++ b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchConfigurationPanel2.java @@ -71,6 +71,7 @@ public class KeywordSearchConfigurationPanel2 extends javax.swing.JPanel { filesIndexedLabel.setText(org.openide.util.NbBundle.getMessage(KeywordSearchConfigurationPanel2.class, "KeywordSearchConfigurationPanel2.filesIndexedLabel.text")); // NOI18N filesIndexedValue.setText(org.openide.util.NbBundle.getMessage(KeywordSearchConfigurationPanel2.class, "KeywordSearchConfigurationPanel2.filesIndexedValue.text")); // NOI18N + filesIndexedValue.setMaximumSize(null); javax.swing.GroupLayout layout = new javax.swing.GroupLayout(this); this.setLayout(layout); @@ -82,10 +83,10 @@ public class KeywordSearchConfigurationPanel2 extends javax.swing.JPanel { .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(layout.createSequentialGroup() .addComponent(filesIndexedLabel) - .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED) - .addComponent(filesIndexedValue, javax.swing.GroupLayout.PREFERRED_SIZE, 26, javax.swing.GroupLayout.PREFERRED_SIZE)) + .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) + .addComponent(filesIndexedValue, javax.swing.GroupLayout.PREFERRED_SIZE, 104, javax.swing.GroupLayout.PREFERRED_SIZE)) .addComponent(skipNSRLCheckBox)) - .addContainerGap()) + .addContainerGap(165, Short.MAX_VALUE)) ); layout.setVerticalGroup( layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) @@ -97,7 +98,7 @@ public class KeywordSearchConfigurationPanel2 extends javax.swing.JPanel { .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) .addComponent(filesIndexedLabel) - .addComponent(filesIndexedValue)) + .addComponent(filesIndexedValue, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)) .addContainerGap(226, Short.MAX_VALUE)) ); }// //GEN-END:initComponents diff --git a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchIngestService.java b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchIngestService.java index 58ee46940f..d3ff1b367b 100644 --- a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchIngestService.java +++ b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchIngestService.java @@ -38,6 +38,7 @@ import org.apache.solr.client.solrj.SolrServerException; import org.netbeans.api.progress.ProgressHandle; import org.netbeans.api.progress.ProgressHandleFactory; import org.openide.util.Cancellable; +import org.openide.util.Exceptions; import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.ingest.IngestManager; import org.sleuthkit.autopsy.ingest.IngestManagerProxy; @@ -55,14 +56,13 @@ import org.sleuthkit.datamodel.SleuthkitCase; import org.sleuthkit.datamodel.TskData; /** - * An ingest service on a file level - * Performs indexing of allocated and Solr supported files, - * string extraction and indexing of unallocated and not Solr supported files - * Index commit is done periodically (determined by user set ingest update interval) - * Runs a periodic keyword / regular expression search on currently configured lists for ingest - * and writes results to blackboard + * An ingest service on a file level Performs indexing of allocated and Solr + * supported files, string extraction and indexing of unallocated and not Solr + * supported files Index commit is done periodically (determined by user set + * ingest update interval) Runs a periodic keyword / regular expression search + * on currently configured lists for ingest and writes results to blackboard * Reports interesting events to Inbox and to viewers - * + * * Registered as a service in layer.xml */ public final class KeywordSearchIngestService implements IngestServiceAbstractFile { @@ -92,19 +92,20 @@ public final class KeywordSearchIngestService implements IngestServiceAbstractFi private volatile int messageID = 0; private boolean processedFiles; private volatile boolean finalSearcherDone = true; - private final String hashDBServiceName = "Hash Lookup"; + private final String hashDBServiceName = "Hash Lookup"; //NOTE this needs to match the HashDB service getName() private SleuthkitCase caseHandle = null; private boolean skipKnown = true; boolean initialized = false; private enum IngestStatus { - INGESTED, EXTRACTED_INGESTED, SKIPPED, + INGESTED, EXTRACTED_INGESTED, SKIPPED, INGESTED_META }; private Map ingestStatus; /** * Returns singleton instance of the service, creates one if needed + * * @return instance of the service */ public static synchronized KeywordSearchIngestService getDefault() { @@ -115,10 +116,12 @@ public final class KeywordSearchIngestService implements IngestServiceAbstractFi } /** - * Starts processing of every file provided by IngestManager. - * Checks if it is time to commit and run search + * Starts processing of every file provided by IngestManager. Checks if it + * is time to commit and run search + * * @param abstractFile file/unallocated file/directory to process - * @return ProcessResult.OK in most cases and ERROR only if error in the pipeline, otherwise does not advice to stop the pipeline + * @return ProcessResult.OK in most cases and ERROR only if error in the + * pipeline, otherwise does not advice to stop the pipeline */ @Override public ProcessResult process(AbstractFile abstractFile) { @@ -133,8 +136,12 @@ public final class KeywordSearchIngestService implements IngestServiceAbstractFi IngestServiceAbstractFile.ProcessResult hashDBResult = managerProxy.getAbstractFileServiceResult(hashDBServiceName); //logger.log(Level.INFO, "hashdb result: " + hashDBResult + "file: " + AbstractFile.getName()); if (hashDBResult == IngestServiceAbstractFile.ProcessResult.COND_STOP && skipKnown) { + //index meta-data only + indexer.indexFile(abstractFile, false); return ProcessResult.OK; } else if (hashDBResult == IngestServiceAbstractFile.ProcessResult.ERROR) { + //index meta-data only + indexer.indexFile(abstractFile, false); //notify depending service that keyword search (would) encountered error for this file return ProcessResult.ERROR; } @@ -145,7 +152,8 @@ public final class KeywordSearchIngestService implements IngestServiceAbstractFi checkRunCommitSearch(); - indexer.indexFile(abstractFile); + //index the file and content (if the content is supported) + indexer.indexFile(abstractFile, true); return ProcessResult.OK; } @@ -196,8 +204,7 @@ public final class KeywordSearchIngestService implements IngestServiceAbstractFi } /** - * Handle stop event (ingest interrupted) - * Cleanup resources, threads, timers + * Handle stop event (ingest interrupted) Cleanup resources, threads, timers */ @Override public void stop() { @@ -234,9 +241,10 @@ public final class KeywordSearchIngestService implements IngestServiceAbstractFi } /** - * Initializes the service for new ingest run - * Sets up threads, timers, retrieves settings, keyword lists to run on - * @param managerProxy + * Initializes the service for new ingest run Sets up threads, timers, + * retrieves settings, keyword lists to run on + * + * @param managerProxy */ @Override public void init(IngestManagerProxy managerProxy) { @@ -320,9 +328,11 @@ public final class KeywordSearchIngestService implements IngestServiceAbstractFi } /** - * The services maintains background threads, return true if background threads are running - * or there are pending tasks to be run in the future, such as the final search post-ingest completion - * @return + * The services maintains background threads, return true if background + * threads are running or there are pending tasks to be run in the future, + * such as the final search post-ingest completion + * + * @return */ @Override public boolean hasBackgroundJobsRunning() { @@ -353,6 +363,7 @@ public final class KeywordSearchIngestService implements IngestServiceAbstractFi */ private void postIndexSummary() { int indexed = 0; + int indexed_meta = 0; int indexed_extr = 0; int skipped = 0; for (IngestStatus s : ingestStatus.values()) { @@ -360,6 +371,9 @@ public final class KeywordSearchIngestService implements IngestServiceAbstractFi case INGESTED: ++indexed; break; + case INGESTED_META: + ++indexed_meta; + break; case EXTRACTED_INGESTED: ++indexed_extr; break; @@ -373,6 +387,7 @@ public final class KeywordSearchIngestService implements IngestServiceAbstractFi StringBuilder msg = new StringBuilder(); msg.append("Indexed files: ").append(indexed).append("
Indexed strings: ").append(indexed_extr); + msg.append("
Indexed meta-data only: ").append(indexed_meta).append("
"); msg.append("
Skipped files: ").append(skipped).append("
"); String indexStats = msg.toString(); logger.log(Level.INFO, "Keyword Indexing Completed: " + indexStats); @@ -423,8 +438,8 @@ public final class KeywordSearchIngestService implements IngestServiceAbstractFi } /** - * Check if time to commit, if so, run commit. - * Then run search if search timer is also set. + * Check if time to commit, if so, run commit. Then run search if search + * timer is also set. */ void checkRunCommitSearch() { if (commitIndex) { @@ -446,8 +461,8 @@ public final class KeywordSearchIngestService implements IngestServiceAbstractFi } /** - * CommitTimerAction to run by commitTimer - * Sets a flag to indicate we are ready for commit + * CommitTimerAction to run by commitTimer Sets a flag to indicate we are + * ready for commit */ private class CommitTimerAction implements ActionListener { @@ -461,8 +476,8 @@ public final class KeywordSearchIngestService implements IngestServiceAbstractFi } /** - * SearchTimerAction to run by searchTimer - * Sets a flag to indicate we are ready to search + * SearchTimerAction to run by searchTimer Sets a flag to indicate we are + * ready to search */ private class SearchTimerAction implements ActionListener { @@ -477,7 +492,7 @@ public final class KeywordSearchIngestService implements IngestServiceAbstractFi /** * File indexer, processes and indexes known/allocated files, - * unknown/unallocated files and directories accordingly + * unknown/unallocated files and directories accordingly */ private class Indexer { @@ -495,42 +510,70 @@ public final class KeywordSearchIngestService implements IngestServiceAbstractFi return indexed; } - private void indexFile(AbstractFile aFile) { + private void indexFile(AbstractFile aFile, boolean indexContent) { //logger.log(Level.INFO, "Processing AbstractFile: " + abstractFile.getName()); - boolean ingestibleFile = Ingester.isIngestible(aFile); - final long size = aFile.getSize(); - //limit size of entire file, do not limit strings - if (size == 0 || (ingestibleFile && size > MAX_INDEX_SIZE)) { - ingestStatus.put(aFile.getId(), IngestStatus.SKIPPED); + FsContent fsContent = null; + //check if alloc fs file or dir + TskData.TSK_DB_FILES_TYPE_ENUM aType = aFile.getType(); + if (aType.equals(TskData.TSK_DB_FILES_TYPE_ENUM.FS)) { + fsContent = (FsContent) aFile; + } + + //if alloc fs file and not index content, or a dir, index meta data only + if (fsContent != null + && (indexContent == false || fsContent.isDir())) { + try { + ingester.ingest(fsContent, false); //meta-data only + ingestStatus.put(aFile.getId(), IngestStatus.INGESTED_META); + } catch (IngesterException ex) { + ingestStatus.put(aFile.getId(), IngestStatus.SKIPPED); + logger.log(Level.WARNING, "Unable to index meta-data for fsContent: " + fsContent.getId(), ex); + } + return; } - if (ingestibleFile == true) { - //we know it's an allocated file or dir (FsContent) - FsContent fileDir = (FsContent) aFile; + boolean ingestibleFile = Ingester.isIngestible(aFile); + + final long size = aFile.getSize(); + //if fs file, limit size of entire file, do not limit strings + if (fsContent != null && (size == 0 || (ingestibleFile && size > MAX_INDEX_SIZE))) { + //if fs file, index meta only, otherwise if unalloc, skip + try { + ingester.ingest(fsContent, false); //meta-data only + ingestStatus.put(aFile.getId(), IngestStatus.INGESTED_META); + } catch (IngesterException ex) { + ingestStatus.put(aFile.getId(), IngestStatus.SKIPPED); + logger.log(Level.WARNING, "Unable to index meta-data for fsContent: " + fsContent.getId(), ex); + } + + return; + } + + if (fsContent != null && ingestibleFile == true) { + //we know it's an allocated fs file (FsContent) with supported content try { //logger.log(Level.INFO, "indexing: " + fsContent.getName()); - ingester.ingest(fileDir); - ingestStatus.put(fileDir.getId(), IngestStatus.INGESTED); + ingester.ingest(fsContent, true); + ingestStatus.put(fsContent.getId(), IngestStatus.INGESTED); } catch (IngesterException e) { - ingestStatus.put(fileDir.getId(), IngestStatus.SKIPPED); - //try to extract strings if not a dir - if (fileDir.isFile() == true) { - processNonIngestible(fileDir); + ingestStatus.put(fsContent.getId(), IngestStatus.SKIPPED); + //try to extract strings, if a file + if (fsContent.isFile() == true) { + processNonIngestible(fsContent); } } catch (Exception e) { - ingestStatus.put(fileDir.getId(), IngestStatus.SKIPPED); - //try to extract strings if not a dir - if (fileDir.isFile() == true) { - processNonIngestible(fileDir); + ingestStatus.put(fsContent.getId(), IngestStatus.SKIPPED); + //try to extract strings if a file + if (fsContent.isFile() == true) { + processNonIngestible(fsContent); } } } else { - //unallocated or unsupported type by Solr + //unallocated file or unsupported content type by Solr processNonIngestible(aFile); - } } @@ -547,10 +590,10 @@ public final class KeywordSearchIngestService implements IngestServiceAbstractFi } /** - * Searcher responsible for searching the current index and writing results to blackboard - * and the inbox. Also, posts results to listeners as Ingest data events. - * Searches entire index, and keeps track of only new results to report and save. - * Runs as a background thread. + * Searcher responsible for searching the current index and writing results + * to blackboard and the inbox. Also, posts results to listeners as Ingest + * data events. Searches entire index, and keeps track of only new results + * to report and save. Runs as a background thread. */ private class Searcher extends SwingWorker { @@ -574,7 +617,6 @@ public final class KeywordSearchIngestService implements IngestServiceAbstractFi final String displayName = "Keyword Search" + (finalRun ? " - Finalizing" : ""); progress = ProgressHandleFactory.createHandle(displayName + (" (Pending)"), new Cancellable() { - @Override public boolean cancel() { logger.log(Level.INFO, "Cancelling the searcher by user."); @@ -833,14 +875,14 @@ public final class KeywordSearchIngestService implements IngestServiceAbstractFi //without relying on done() method that is not guaranteed to run after background thread completes //NEED to call this method always right before doInBackground() returns /** - * Performs the cleanup that needs to be done right AFTER doInBackground() returns - * without relying on done() method that is not guaranteed to run after background thread completes - * REQUIRED to call this method always right before doInBackground() returns + * Performs the cleanup that needs to be done right AFTER + * doInBackground() returns without relying on done() method that is not + * guaranteed to run after background thread completes REQUIRED to call + * this method always right before doInBackground() returns */ private void finalizeSearcher() { logger.log(Level.INFO, "Searcher finalizing"); SwingUtilities.invokeLater(new Runnable() { - @Override public void run() { progress.finish(); @@ -871,9 +913,9 @@ public final class KeywordSearchIngestService implements IngestServiceAbstractFi /** * Checks if the content has already been hit previously - * + * * @param previousHits the previous hits to check against - * @param hit a hit to check for, that potentially had already been hit + * @param hit a hit to check for, that potentially had already been hit * @return true if the potential hit has already been hit, false otherwise */ private static boolean previouslyHit(List previousHits, ContentHit hit) { @@ -890,7 +932,9 @@ public final class KeywordSearchIngestService implements IngestServiceAbstractFi /** * Set the skip known files setting on the service - * @param skip true if skip, otherwise, will process known files as well, as reported by HashDB service + * + * @param skip true if skip, otherwise, will process known files as well, as + * reported by HashDB service */ void setSkipKnown(boolean skip) { this.skipKnown = skip; diff --git a/Report/src/org/sleuthkit/autopsy/report/Report.java b/Report/src/org/sleuthkit/autopsy/report/Report.java index e21fd6da26..916826a618 100644 --- a/Report/src/org/sleuthkit/autopsy/report/Report.java +++ b/Report/src/org/sleuthkit/autopsy/report/Report.java @@ -124,7 +124,7 @@ public class Report { tempdbconnect.executeStmt("DROP TABLE IF EXISTS report_hashset;"); tempdbconnect.executeStmt("DROP TABLE IF EXISTS report_hashname;"); tempdbconnect.executeStmt("DROP TABLE IF EXISTS report_hash;"); - String temp1 = "CREATE TABLE report_hashset AS SELECT value_text as hashset,blackboard_attributes.attribute_type_id, blackboard_attributes.artifact_id FROM blackboard_attributes WHERE attribute_type_id = " + BlackboardAttribute.ATTRIBUTE_TYPE.TSK_HASHSET_NAME.getTypeID() + ";"; + String temp1 = "CREATE TABLE report_hashset AS SELECT value_text as hashset,blackboard_attributes.attribute_type_id, blackboard_attributes.artifact_id FROM blackboard_attributes WHERE attribute_type_id = " + BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME.getTypeID() + ";"; String temp5 = "CREATE TABLE report_hashname AS SELECT name, size, report_hashset.artifact_id from tsk_files,blackboard_artifacts, report_hashset WHERE blackboard_artifacts.artifact_id = report_hashset.artifact_id AND blackboard_artifacts.obj_id = tsk_files.obj_id;"; String temp6 = "CREATE TABLE report_hash AS SELECT hashset,size,name from report_hashset INNER JOIN report_hashname ON report_hashset.artifact_id=report_hashname.artifact_id;"; tempdbconnect.executeStmt(temp1); @@ -215,7 +215,7 @@ public class Report { table.append(""); } table.append(""); - table.append("").append(""); + table.append("").append(""); SimpleDateFormat sdf = new java.text.SimpleDateFormat("yyyy/MM/dd HH:mm:ss"); String value = sdf.format(new java.util.Date(uniqueresults.getLong("date") * 1000)); table.append(""); diff --git a/Report/src/org/sleuthkit/autopsy/report/ReportHTML.java b/Report/src/org/sleuthkit/autopsy/report/ReportHTML.java index 9882e30302..234bb4af3f 100644 --- a/Report/src/org/sleuthkit/autopsy/report/ReportHTML.java +++ b/Report/src/org/sleuthkit/autopsy/report/ReportHTML.java @@ -246,7 +246,7 @@ public class ReportHTML implements ReportModule { StringBuilder nodeKeyword = new StringBuilder("

Keyword Search Hits (").append(countKeyword).append(")

"); StringBuilder nodeHash = new StringBuilder("

Hashset Hit (").append(countHash).append(")

"); StringBuilder nodeDevice = new StringBuilder("

Attached Devices (").append(countHash).append(")

").append(tableHeader).append(""); - StringBuilder nodeEmail = new StringBuilder("

Email Messages (").append(countHash).append(")

"); + StringBuilder nodeEmail = new StringBuilder("

Email Messages (").append(countEmail).append(")

"); int alt = 0; String altRow = ""; diff --git a/build.xml b/build.xml index 35d073e7c3..f14393f080 100644 --- a/build.xml +++ b/build.xml @@ -69,7 +69,7 @@ - + @@ -84,11 +84,17 @@ + + + + + + @@ -199,22 +205,8 @@ - + - - - - - - - - - - - - - diff --git a/docs/doxygen/design.dox b/docs/doxygen/design.dox index e5c8b43828..cd5bba97dd 100644 --- a/docs/doxygen/design.dox +++ b/docs/doxygen/design.dox @@ -1,18 +1,224 @@ /*! \page design_page General Design \section design_overview Overview +This section outlines Autopsy design from the typical analysis work flow perspective. +A typical Autopsy work flow consists of the following steps: -Talk about the various pieces and how things are working behind the scenes (in order of anlaysis). -- Wizards are used to create case and images (all from org.sleuthkit.autopsy.casemodule) -- DB is created -- Ingest modules are run (org.sleuthkit.autopsy.ingest.IngestManager) -- Ingest modules post results to the blackboard and inbox -- Tree displays blackboard contents -- Data is encapsulated into nodes and passed to table and content viewers -- Reports can be generated +- Wizards are used to create case and images (org.sleuthkit.autopsy.casemodule), +- TSK database is created, +- Ingest modules are run (org.sleuthkit.autopsy.ingest), +- Ingest modules post results to the blackboard and ingest inbox, +- Directory tree displays blackboard contents, +- Data is encapsulated into nodes and passed to table and content viewers, +- Reports can be generated. -\subsection design_overview_sub Sub-Section -This was added for reference. +\subsection design_overview_sub1 Creating a case -*/ +The first step in Autopsy work flow is creating a case. +User is guided with the case creation wizard (invoked by org.sleuthkit.autopsy.casemodule.NewCaseWizardAction) to enter the case name, base directory and optional case information. +The base directory is the directory where all files associated with the case are stored. +The directory is self contained (besides referenced images files, which are stored separately) and could be later moved to another location or another machine by the user (along with the linked image files). +The case directory contains: +- a newly created, empty case SQLite TSK database, autopsy.db, +- a case XML configuration file, named after the case name and .aut extension, +- directory structure for temporary files, case log files, cache files, and module specific files. +An example of module-specific directory is keywordsearch directory, created by the Keyword Search module. + +After case is created, currentCase singleton member variable in Case class is updated. +It contains access to higher-level case information stored in the case XML file. + +org.sleuthkit.autopsy.casemodule.Case class also contains support for case events; events are sent to registered listeners when new case is created, opened, changed or closed. +When a case is changed or created, also updated is org.sleuthkit.datamodel.SleuthkitCase handle to the TSK database. +SleuthkitCase contains a handle to org.sleuthkit.datamodel.SleuthkitJNI object, through which native sleuthkit API can be accessed. + + +\subsection design_overview_sub2 Adding an image +After case in created, user is guided to add an image to the case using the wizard invoked by org.sleuthkit.autopsy.casemodule.AddImageAction. +org.sleuthkit.autopsy.casemodule.AddImageWizardIterator instantiates and manages the wizard panels (there are currently 4 of them). + +User enters image information in the first panel org.sleuthkit.autopsy.casemodule.AddImageWizardPanel1 (image path, image timezone and additional options). + +In the subsequent panel, org.sleuthkit.autopsy.casemodule.AddImageWizardPanel2, a background worker thread is spawned in AddImgTask. + +Work is delegated to org.sleuthkit.datamodel.AddImageProcess, which calls native sleuthkit methods via SleuthkitJNI to: initialize, run and commit the new image. +The entire process is enclosed within a database transaction and the transaction is not committed until user finalizes the process in org.sleuthkit.autopsy.casemodule.AddImageWizardPanel3. +User can also interrupt the ongoing add image process, which results in a stop call in sleuthkit. The call sets a special stop flag. The flag is periodically checked by sleutkit code and, + if set, it will result in breaking out of any current processing methods and loops and return from sleuthkit. +The worker thread in Autopsy will terminate and revert will be called to back out of the current transaction. + +Actual work in the add image process is done in the native sleuthkit library. +The library reads the image and populates the TSK SQLite database with the image meta-data. +Rows are inserted into the following tables: +- tsk_objects (all content objects are given their unique object IDs and are associated with parents), +- tsk_file_layout (for file block information, such as for "special" files representing unallocated data), +- tsk_image_info, tsk_image_names (to store image info, such as local image paths, block size and time zone), +- tsk_vs_info (to store volume system information), +- tsk_vs_parts (to store volume information), +- tsk_fs_info (to store file system information), +- tsk_files (to store all files and directories discovered and their attributes). + +After image has been processed successfully and after the user confirmation, the transaction is committed to the database. + +Errors from processing the image in sleuthkit are propagated using org.sleuthkit.datamodel.TskCoreException and org.sleuthkit.datamodel.TskDataException java exceptions. +The errors are logged and can be reviewed by the user form the wizard. +org.sleuthkit.datamodel.TskCoreException is handled by the wizard as a critical, unrecoverable error condition with TSK core, resulting in the interruption of the add image process. +org.sleuthkit.datamodel.TskDataException, pertaining to an error associated with the data itself (such as invalid volume offset), is treated as a warning - the process still continues because there are likely data image that can be still read. + +\subsection design_overview_sub3 Concurrency + +Autopsy is a highly multi-threaded application; besides threads associated with the GUI, event dispatching and Netbeans RCP, the application uses threads to support concurrent user-driven processes. +For instance, multiple image ingest services can be ran at the same time. In addition, user can add another image to the database while ingest is running on previously added images. +During the add image process, a database lock is acquired using org.sleuthkit.autopsy.casemodule.SleuthkitCase.dbWriteLock() to ensure exclusive access to the database resource. +Once the lock is acquired by the add image process, other Autopsy threads trying to access the database as acquire the lock (such as ingest modules) will block for the duration of add image process. +The database lock is implemented with SQLite database in mind, which does not support concurrent writes. The database lock is released with org.sleuthkit.autopsy.casemodule.SleuthkitCase.dbWriteUnlock() when the add image process has ended. +The database lock is used for all database access methods in org.sleuthkit.autopsy.casemodule.SleuthkitCase. + +\subsection design_overview_sub4 Running ingest modules + +User has an option to run ingest modules after the image has been added using the wizard, and, optionally, +at any time ingest modules can be run or re-run. + +Ingest modules (also referred as ingest services) are designed as plugins that are separate from Autopsy core. +Ingest modules can be added to an existing Autopsy installation as jar files and they will be automatically recognized next time Autopsy starts. + +Every module generally has its own specific role. The two main use cases for ingest modules are: +- to extract information from the image and write result to blackboard, +- to analyze data already in blackboard and add more information to it. + +There may also be special-purpose ingest modules that run early in the ingest pipe-line. Results posted by such modules can useful to subsequent modules. +One example of such module is Hash DB module, which determines which files are known; known files are generally treated differently. +For instance, processing of known files can be skipped by subsequent modules in the pipeline (if chosen so), for performance reasons. + +Autopsy provides an ingest module framework in org.sleuthkit.autopsy.ingest package, located in a separate module. +The framework provides interfaces every ingest module needs to implement: +* org.sleuthkit.autopsy.ingest.IngestServiceImage (for modules that are interested in the image as a whole, or picking only specific data from the image of interest) +* org.sleuthkit.autopsy.ingest.IngestServiceAbstractFile (for modules that need to process every file). + +The interfaces define methods to initialize, process passed in data, configure the ingest service, query the service state and finalize the service. + +The framework also contains classes: +- org.sleuthkit.autopsy.ingest.IngestManager, the ingest manager, responsible for discovery of ingest modules, enqueuing work to the modules, starting and stopping the ingest pipeline, +propagating messages sent from the ingest modules to other Autopsy components. +- org.sleuthkit.autopsy.ingest.IngestManagerProxy, a facility used by the modules to communicate with the manager, +- additional classes to support threading, sending messages, ingest monitoring, ingest cancellation, progress bars, +- a user interface component (Ingest Inbox) used to display interesting messages posted by ingest modules to the user, + +To implement an ingest module it is required to implement one of the interfaces (for file or image ingest) +and to have the module register itself using Netbeans Lookup infrastructure in layer.xml file. +Please refer to ingest.dox, org.sleuthkit.autopsy.ingest package API and org.sleuthkit.autopsy.ingest.example examples for more details on implementing custom ingest modules. + +Most ingest modules typically require configuration before they are executed. +The configuration methods are defined in the ingest modules interfaces. +Module configuration is decentralized and module-specific; every module maintains its + own configuration state and is responsible for implementing its own JPanels to render + and present the configuration to the user. There are method hooks defined in the ingest service interface that are used to hint the module when the configuration should be preserved. + +Ingest modules run in background threads. There is a single background thread for file-level ingest modules, within which every file ingest module runs series for every file. +Image ingest modules run each in their own thread and thus can run in parallel (TODO we will change this in the future for performance reasons, and support image ingest module dependencies). +Every ingest thread is presented with a progress bar and can be cancelled by a user, or by the framework, in case of a critical event (such as Autopsy is terminating, or a system error). +Ingest module can also implement its own internal threads for any special-purpose processing that can occur in parallel. +However, the module is then responsible for creating, managing and tearing down the internal threads. +An example of a module that maintains its own threads is the KeywordSearch module. + + +\subsection design_overview_sub5 Ingest modules posting results + +Ingest services, when running, provide real-time updates to the user +by periodically posting data results and messages to registered components. + +The timing as to when a service posts results data is module-implementation-specific. +In a simple case, service may post new data as soon as the data is available +- the case for simple services that take a relatively short amount of time to execute and new data is expected +to arrive in the order of seconds. + +Another possibility is to post data in fixed time-intervals (e.g. for a service that takes minutes to produce results +and for a service that maintains internal threads to perform work). +There exist a global update setting that specifies maximum time interval for the service to post data. +User may adjust the interval for more frequent, real-time updates. Services that post data in periodic intervals should post their data according to this setting. +The setting is retrieved by the module using getUpdateFrequency() method in org.sleuthkit.autopsy.ingest.IngestManagerProxy class. + +Data events registration and posting data. + +When an ingest service produces data, it then writes it to the blackboard (as blackboard artifacts and associated attributes). + +Service should notify listeners of new data available periodically by invoking fireServiceDataEvent() method in org.sleuthkit.autopsy.ingest.IngestManagerProxy class. +The method accepts org.sleuthkit.autopsy.ingest.ServiceDataEvent parameter. +The parameter wraps a collection of blackboard artifacts and their associated attributes that are to be reported as the new data to listeners. +Passing the data as part of the event reduces memory footprint and decreases number of garbage collections +of the blackboard artifacts and attributes objects (the objects are expected to be reused by the data event listeners). + +Service name and artifact type for the collection of artifacts is also passed in as as part of the event. +The artifacts passed in a single event should be of the same type, which is enforced by the org.sleuthkit.autopsy.ingest.ServiceDataEvent constructor. + +If a service has new data, but the service implementation does not include new artifact tracking, it is possible to pass only the service name and artifact type in the event. +The event listener may choose to perform a blackboard query for the artifact type to query all data of that type currently stored in the blackboard, including the new data. + +Service event listeners need to register themselves with the org.sleuthkit.autopsy.ingest.IngestManager directly, using static addPropertyChangeListener() method. + +At the end of the ingest, org.sleuthkit.autopsy.ingest.IngestManager itself will notify all listeners of new data being available in the blackboard. +This ensures the listeners receive a new data notification, in case some of the modules fail to report availability of new data. +However, ingest module developers are encouraged to generate new data events in order to provide the real-time feedback to the user. + +Ingest messages registration and posting +In addition to data events, ingest services should send ingest messages about interesting events. +Examples of such events include service status (started, stopped) or information about new data. +The messages include the source service, message subject, message details, unique message id (in the context of the originating service) and a uniqueness attribute, used to group similar messages together and to determine the overall importance priority) of the message. +A message group with a higher number of aggregate messages with the same uniqueness is considered a lower priority. + +Ingest messages have different types: there are info messages, warning messages, error messages and data messages. +The data messages contain encapsulated blackboard artifacts and attributes. The passed in data is used by the ingest inbox GUI widget to navigate to the artifact view in the directory tree, if requested by the user. + +Ingest message API is defined in org.sleuthkit.autopsy.ingest.IngestMessage class. The class also contains factory methods to create new messages. +Messages are posted using org.sleuthkit.autopsy.ingest.IngestManagerProxy postMessage() method, which accepts a message created using of the factory methods. + +The recipient of the ingest messages is the Ingest Inbox viewer widget component, from the org.sleuthkit.autopsy.ingest.IngestManager package. + + +\subsection design_overview_sub6 Result viewers (directory tree, table viewers, content viewers) + +The directory tree result viewer (in the left-hand panel of the Autopsy viewer) is the core results viewer for the results saved during the ingest process. + +The component is by default registered as an ingest message listener with the ingest manager. + +When Autopsy starts, the viewer queries the blackboard data and populates the UI. +During ingest, the viewer responds to data events by refreshing the data nodes corresponding to the artifact type in the data event. +When ingest is completed, the viewer responds to the final ingest data event generated by the ingest manager, +and performs a final refresh of all data nodes. + +Data is encapsulated in nodes org.openide.nodes.Node before it is displayed in the UI. +A node is an abstraction for a displayable data unit. +The nodes contain property sheets to store data and are organized in a parent-child hierarchy. +The hierarchy is used to visually represent the data and to trigger child view update whenever the parent node is selected by the user. +Node child factories are invoked by the Netbeans framework at the time of parent node selection to create or refresh the child node view. + +Once a node is selected, its property sheet is rendered in the default table result viewer in the top-right part of the Autopsy UI. + +Nodes can also be registered with content viewer (bottom-right part of the Autopsy UI). +Nodes use the node lookup infrastructure org.openide.util.Lookup to register their content viewer capabilities. + +When a new node is selected, org.sleuthkit.autopsy.corecomponents.DataContentTopComponent queries registered data content viewers to determine support for the given node content. +The specific content viewers query the node lookup to determine the content capability match and return a number ranking the degree of the viewer support for the node type. +Based on return values of isSupported() and isPreferred() methods, the org.sleuthkit.autopsy.corecomponents.DataContentTopComponent enables or disables content viewers and selects a default active viewer for the node type. + + +\subsection design_overview_sub7 Report generation + +After ingest is run, user can generate reports. +There are several types of reports implemented as submodules that are shipped with Autopsy core: generic html, xml and Excel reports. +Each reporting submodule implements org.sleuthkit.autopsy.report.ReportModule interface and registers itself in layer.xml + +Reporting submodule typically interacts with 3 components: +- org.sleuthkit.autopsy.report.ReportConfiguration - to read current reporting configuration set by the user, +- Blackboard API in org.sleuthkit.datamodel.SleuthkitCase class - to traverse and read blackboard artifacts and attributes, +- an API (possibly external/thirdparty API) to convert blackboard artifacts data structures to the desired reporting format. + +Autopsy reporting module is present in org.sleuthkit.autopsy.report package. +Please refer to report.dox and org.sleuthkit.autopsy.report package API documentation for more details on how to implement a custom reporting submodule. + + + + + + +*/ \ No newline at end of file diff --git a/thunderbirdparser/src/org/sleuthkit/autopsy/thunderbirdparser/ThunderbirdMboxFileIngestService.java b/thunderbirdparser/src/org/sleuthkit/autopsy/thunderbirdparser/ThunderbirdMboxFileIngestService.java index a3d0e3ae0c..1c892548cc 100644 --- a/thunderbirdparser/src/org/sleuthkit/autopsy/thunderbirdparser/ThunderbirdMboxFileIngestService.java +++ b/thunderbirdparser/src/org/sleuthkit/autopsy/thunderbirdparser/ThunderbirdMboxFileIngestService.java @@ -85,9 +85,11 @@ public class ThunderbirdMboxFileIngestService implements IngestServiceAbstractFi } try { - byte[] t = new byte[(int) 128]; - int byteRead = fsContent.read(t, 0, 128); - isMbox = mbox.isValidMimeTypeMbox(t); + byte[] t = new byte[64]; + if(fsContent.getSize() > 64) { + int byteRead = fsContent.read(t, 0, 64); + isMbox = mbox.isValidMimeTypeMbox(t); + } } catch (TskException ex) { Logger.getLogger(ThunderbirdMboxFileIngestService.class.getName()).log(Level.WARNING, null, ex); }
").append("Folder").append("FromToSubjectDate/TimeContentCCBCCPath
").append(uniqueresults.getString("name")).append("").append(uniqueresults.getString("receiver")).append("").append(uniqueresults.getString("author")).append("
(").append(uniqueresults.getString("subject")).append(")").append("
").append(uniqueresults.getString("receiver")).append("").append(uniqueresults.getString("author")).append("").append(uniqueresults.getString("subject")).append("").append(value).append("NameSerial #Time