Merge pull request #5157 from sleuthkit/develop
Merge develop branch into the file-search feature branch
@ -24,7 +24,6 @@ import java.awt.event.ActionListener;
|
||||
import java.io.File;
|
||||
import java.util.concurrent.ExecutionException;
|
||||
import java.util.logging.Level;
|
||||
import javax.swing.JDialog;
|
||||
import javax.swing.JFileChooser;
|
||||
import javax.swing.JOptionPane;
|
||||
import javax.swing.SwingWorker;
|
||||
@ -62,8 +61,6 @@ public final class CaseOpenAction extends CallableSystemAction implements Action
|
||||
private static final String DISPLAY_NAME = Bundle.CTL_CaseOpenAction();
|
||||
private static final String PROP_BASECASE = "LBL_BaseCase_PATH"; //NON-NLS
|
||||
private static final Logger LOGGER = Logger.getLogger(CaseOpenAction.class.getName());
|
||||
private static JDialog multiUserCaseWindow;
|
||||
private final JFileChooser fileChooser = new JFileChooser();
|
||||
private final FileFilter caseMetadataFileFilter;
|
||||
|
||||
/**
|
||||
@ -74,13 +71,6 @@ public final class CaseOpenAction extends CallableSystemAction implements Action
|
||||
*/
|
||||
public CaseOpenAction() {
|
||||
caseMetadataFileFilter = new FileNameExtensionFilter(NbBundle.getMessage(CaseOpenAction.class, "CaseOpenAction.autFilter.title", Version.getName(), CaseMetadata.getFileExtension()), CaseMetadata.getFileExtension().substring(1));
|
||||
fileChooser.setDragEnabled(false);
|
||||
fileChooser.setFileSelectionMode(JFileChooser.FILES_ONLY);
|
||||
fileChooser.setMultiSelectionEnabled(false);
|
||||
fileChooser.setFileFilter(caseMetadataFileFilter);
|
||||
if (null != ModuleSettings.getConfigSetting(ModuleSettings.MAIN_SETTINGS, PROP_BASECASE)) {
|
||||
fileChooser.setCurrentDirectory(new File(ModuleSettings.getConfigSetting("Case", PROP_BASECASE))); //NON-NLS
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@ -88,7 +78,16 @@ public final class CaseOpenAction extends CallableSystemAction implements Action
|
||||
* metadata file (.aut file). Upon confirming the selection, it will attempt
|
||||
* to open the case described by the file.
|
||||
*/
|
||||
void openCaseSelectionWindow() {
|
||||
void openCaseSelectionWindow() {
|
||||
JFileChooser fileChooser = new JFileChooser();
|
||||
fileChooser.setDragEnabled(false);
|
||||
fileChooser.setFileSelectionMode(JFileChooser.FILES_ONLY);
|
||||
fileChooser.setMultiSelectionEnabled(false);
|
||||
fileChooser.setFileFilter(caseMetadataFileFilter);
|
||||
if (null != ModuleSettings.getConfigSetting(ModuleSettings.MAIN_SETTINGS, PROP_BASECASE)) {
|
||||
fileChooser.setCurrentDirectory(new File(ModuleSettings.getConfigSetting("Case", PROP_BASECASE))); //NON-NLS
|
||||
}
|
||||
|
||||
String optionsDlgTitle = NbBundle.getMessage(Case.class, "CloseCaseWhileIngesting.Warning.title");
|
||||
String optionsDlgMessage = NbBundle.getMessage(Case.class, "CloseCaseWhileIngesting.Warning");
|
||||
if (IngestRunningCheck.checkAndConfirmProceed(optionsDlgTitle, optionsDlgMessage)) {
|
||||
@ -106,9 +105,7 @@ public final class CaseOpenAction extends CallableSystemAction implements Action
|
||||
/*
|
||||
* Close the Open Multi-User Case window, if it is open.
|
||||
*/
|
||||
if (multiUserCaseWindow != null) {
|
||||
multiUserCaseWindow.setVisible(false);
|
||||
}
|
||||
OpenMultiUserCaseDialog.getInstance().setVisible(false);
|
||||
|
||||
/*
|
||||
* Try to open the case associated with the case metadata file
|
||||
@ -160,9 +157,7 @@ public final class CaseOpenAction extends CallableSystemAction implements Action
|
||||
if (UserPreferences.getIsMultiUserModeEnabled()) {
|
||||
WindowManager.getDefault().getMainWindow().setCursor(Cursor.getPredefinedCursor(Cursor.WAIT_CURSOR));
|
||||
|
||||
if (multiUserCaseWindow == null) {
|
||||
multiUserCaseWindow = OpenMultiUserCaseDialog.getInstance();
|
||||
}
|
||||
OpenMultiUserCaseDialog multiUserCaseWindow = OpenMultiUserCaseDialog.getInstance();
|
||||
multiUserCaseWindow.setLocationRelativeTo(WindowManager.getDefault().getMainWindow());
|
||||
multiUserCaseWindow.setVisible(true);
|
||||
|
||||
|
@ -1,7 +1,7 @@
|
||||
/*
|
||||
* Autopsy Forensic Browser
|
||||
*
|
||||
* Copyright 2011-2018 Basis Technology Corp.
|
||||
* Copyright 2011-2019 Basis Technology Corp.
|
||||
* Contact: carrier <at> sleuthkit <dot> org
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
@ -21,7 +21,6 @@ package org.sleuthkit.autopsy.casemodule;
|
||||
import java.awt.Cursor;
|
||||
import java.util.logging.Level;
|
||||
import javax.swing.JComboBox;
|
||||
import org.openide.util.Exceptions;
|
||||
import org.openide.util.NbBundle.Messages;
|
||||
import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationCase;
|
||||
import org.sleuthkit.autopsy.centralrepository.datamodel.EamDb;
|
||||
@ -573,7 +572,7 @@ final class OptionalCasePropertiesPanel extends javax.swing.JPanel {
|
||||
examinerTextField.getText(), tfExaminerPhoneText.getText(),
|
||||
tfExaminerEmailText.getText(), taNotesText.getText()));
|
||||
} catch (CaseActionException ex) {
|
||||
Exceptions.printStackTrace(ex);
|
||||
logger.log(Level.WARNING, "Error updating case details", ex);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1,7 +1,7 @@
|
||||
/*
|
||||
* Autopsy Forensic Browser
|
||||
*
|
||||
* Copyright 2013 Basis Technology Corp.
|
||||
* Copyright 2013-2019 Basis Technology Corp.
|
||||
* Contact: carrier <at> sleuthkit <dot> org
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
@ -62,7 +62,6 @@ public class StartupWindowProvider implements StartupWindowInterface {
|
||||
if (isRunningFromCommandLine()) {
|
||||
// Autopsy is running from command line
|
||||
logger.log(Level.INFO, "Running from command line"); //NON-NLS
|
||||
System.out.println("Running from command line");
|
||||
startupWindowToUse = new CommandLineStartupWindow();
|
||||
// kick off command line processing
|
||||
new CommandLineIngestManager().start();
|
||||
|
@ -1,7 +1,7 @@
|
||||
/*
|
||||
* Central Repository
|
||||
*
|
||||
* Copyright 2015-2018 Basis Technology Corp.
|
||||
* Copyright 2015-2019 Basis Technology Corp.
|
||||
* Contact: carrier <at> sleuthkit <dot> org
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
@ -26,7 +26,6 @@ import javax.swing.event.TableModelEvent;
|
||||
import javax.swing.event.TableModelListener;
|
||||
import javax.swing.table.DefaultTableModel;
|
||||
import javax.swing.table.TableModel;
|
||||
import org.openide.util.Exceptions;
|
||||
import org.openide.util.NbBundle.Messages;
|
||||
import org.openide.windows.WindowManager;
|
||||
import org.sleuthkit.autopsy.coreutils.Logger;
|
||||
@ -74,7 +73,7 @@ final class ManageCorrelationPropertiesDialog extends javax.swing.JDialog {
|
||||
correlationTypes.clear();
|
||||
correlationTypes.addAll(dbManager.getDefinedCorrelationTypes());
|
||||
} catch (EamDbException ex) {
|
||||
Exceptions.printStackTrace(ex);
|
||||
LOGGER.log(Level.WARNING, "Error loading data", ex);
|
||||
}
|
||||
|
||||
correlationTypes.forEach((aType) -> {
|
||||
|
@ -1,7 +1,7 @@
|
||||
/*
|
||||
* Central Repository
|
||||
*
|
||||
* Copyright 2015-2018 Basis Technology Corp.
|
||||
* Copyright 2015-2019 Basis Technology Corp.
|
||||
* Contact: carrier <at> sleuthkit <dot> org
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
@ -28,7 +28,6 @@ import javax.swing.JFrame;
|
||||
import javax.swing.JOptionPane;
|
||||
import javax.swing.event.ListSelectionEvent;
|
||||
import javax.swing.event.ListSelectionListener;
|
||||
import org.openide.util.Exceptions;
|
||||
import org.openide.util.NbBundle.Messages;
|
||||
import org.openide.windows.WindowManager;
|
||||
import org.sleuthkit.autopsy.centralrepository.datamodel.EamDb;
|
||||
@ -78,7 +77,7 @@ public final class ManageOrganizationsDialog extends JDialog {
|
||||
setButtonsEnabled(organizationList.getSelectedValue());
|
||||
newOrg = null;
|
||||
} catch (EamDbException ex) {
|
||||
Exceptions.printStackTrace(ex);
|
||||
logger.log(Level.WARNING, "Error getting Central Repo for Organizations dialog", ex);
|
||||
}
|
||||
display();
|
||||
}
|
||||
|
@ -27,10 +27,11 @@ import java.util.Map;
|
||||
import java.util.Map.Entry;
|
||||
import java.util.Observable;
|
||||
import java.util.Observer;
|
||||
import java.util.logging.Level;
|
||||
import javax.swing.ComboBoxModel;
|
||||
import org.openide.util.Exceptions;
|
||||
import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationAttributeInstance;
|
||||
import org.sleuthkit.autopsy.centralrepository.datamodel.EamDbException;
|
||||
import org.sleuthkit.autopsy.coreutils.Logger;
|
||||
|
||||
/**
|
||||
* UI controls for Common Files Search scenario where the user intends to find
|
||||
@ -38,6 +39,7 @@ import org.sleuthkit.autopsy.centralrepository.datamodel.EamDbException;
|
||||
*/
|
||||
public final class InterCasePanel extends javax.swing.JPanel {
|
||||
|
||||
private final static Logger logger = Logger.getLogger(InterCasePanel.class.getName());
|
||||
private static final long serialVersionUID = 1L;
|
||||
private final Observable fileTypeFilterObservable;
|
||||
static final int NO_CASE_SELECTED = -1;
|
||||
@ -121,7 +123,7 @@ public final class InterCasePanel extends javax.swing.JPanel {
|
||||
this.correlationTypeComboBox.addItem(type.getDisplayName());
|
||||
}
|
||||
} catch (EamDbException ex) {
|
||||
Exceptions.printStackTrace(ex);
|
||||
logger.log(Level.WARNING, "Error getting correlation types", ex);
|
||||
}
|
||||
this.correlationTypeComboBox.setSelectedIndex(0);
|
||||
}
|
||||
|
@ -47,7 +47,6 @@ import org.openide.nodes.Children;
|
||||
import org.openide.nodes.Node;
|
||||
import org.openide.nodes.Node.Property;
|
||||
import org.openide.nodes.Node.PropertySet;
|
||||
import org.openide.util.Exceptions;
|
||||
import org.openide.util.Lookup;
|
||||
import org.openide.util.NbBundle.Messages;
|
||||
import org.sleuthkit.autopsy.communications.ModifiableProxyLookup;
|
||||
@ -397,7 +396,7 @@ public class MessageViewer extends JPanel implements RelationshipsViewer {
|
||||
try {
|
||||
rootTablePane.getExplorerManager().setSelectedNodes(new Node[0]);
|
||||
} catch (PropertyVetoException ex) {
|
||||
Exceptions.printStackTrace(ex);
|
||||
logger.log(Level.WARNING, "Error setting selected nodes", ex);
|
||||
}
|
||||
showThreadsPane();
|
||||
}//GEN-LAST:event_backButtonActionPerformed
|
||||
|
@ -1,7 +1,7 @@
|
||||
/*
|
||||
* Autopsy Forensic Browser
|
||||
*
|
||||
* Copyright 2011-2018 Basis Technology Corp.
|
||||
* Copyright 2011-2019 Basis Technology Corp.
|
||||
* Contact: carrier <at> sleuthkit <dot> org
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
|
@ -1,7 +1,7 @@
|
||||
/*
|
||||
* Autopsy Forensic Browser
|
||||
*
|
||||
* Copyright 2011-18 Basis Technology Corp.
|
||||
* Copyright 2011-19 Basis Technology Corp.
|
||||
* Contact: carrier <at> sleuthkit <dot> org
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
@ -47,7 +47,6 @@ import org.openide.nodes.AbstractNode;
|
||||
import org.openide.nodes.Children;
|
||||
import org.openide.nodes.FilterNode;
|
||||
import org.openide.nodes.Node;
|
||||
import org.openide.util.Exceptions;
|
||||
import org.openide.util.NbBundle;
|
||||
import org.openide.util.lookup.Lookups;
|
||||
import org.sleuthkit.autopsy.corecomponents.ResultViewerPersistence.SortCriterion;
|
||||
@ -197,7 +196,7 @@ class ThumbnailViewChildren extends Children.Keys<Integer> {
|
||||
}
|
||||
|
||||
} catch (IllegalAccessException | InvocationTargetException ex) {
|
||||
Exceptions.printStackTrace(ex);
|
||||
logger.log(Level.WARNING, "Error getting value for thumbnail children", ex);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1,7 +1,7 @@
|
||||
/*
|
||||
* Autopsy Forensic Browser
|
||||
*
|
||||
* Copyright 2012-2018 Basis Technology Corp.
|
||||
* Copyright 2012-2019 Basis Technology Corp.
|
||||
* Contact: carrier <at> sleuthkit <dot> org
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
@ -119,19 +119,19 @@ public class PlatformUtil {
|
||||
public static String getUserPythonModulesPath() {
|
||||
return getUserDirectory().getAbsolutePath() + File.separator + PYTHON_MODULES_SUBDIRECTORY;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Get root path where the user's Ocr language packs are stored.
|
||||
*
|
||||
*
|
||||
* @return Absolute path to the Ocr language packs root directory.
|
||||
*/
|
||||
public static String getOcrLanguagePacksPath() {
|
||||
return getUserDirectory().getAbsolutePath() + File.separator + OCR_LANGUAGE_SUBDIRECTORY;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Get the names of the language packs installed at the user directory.
|
||||
*
|
||||
*
|
||||
* @return List of language packs base names
|
||||
*/
|
||||
public static List<String> getOcrLanguagePacks() {
|
||||
@ -139,20 +139,20 @@ public class PlatformUtil {
|
||||
|
||||
List<String> languagePacks = new ArrayList<>();
|
||||
for (File languagePack : languagePackRootDir.listFiles()) {
|
||||
String fileExt = FilenameUtils.getExtension(languagePack.getName());
|
||||
String fileExt = FilenameUtils.getExtension(languagePack.getName());
|
||||
if (!languagePack.isDirectory() && OCR_LANGUAGE_PACK_EXT.equals(fileExt)) {
|
||||
String packageName = FilenameUtils.getBaseName(languagePack.getName());
|
||||
languagePacks.add(packageName);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
return languagePacks;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get root path where the user's object detection classifiers are stored.
|
||||
*
|
||||
* @return Absolute path to the object detection classifiers root directory.
|
||||
*
|
||||
* @return Absolute path to the object detection classifiers root directory.
|
||||
*/
|
||||
public static String getObjectDetectionClassifierPath() {
|
||||
return getUserDirectory().getAbsolutePath() + File.separator + CLASSIFIERS_SUBDIRECTORY;
|
||||
|
@ -59,20 +59,21 @@ class GetSCOTask implements Runnable {
|
||||
public void run() {
|
||||
AbstractContentNode<?> contentNode = weakNodeRef.get();
|
||||
|
||||
//Check for stale reference
|
||||
if (contentNode == null) {
|
||||
//Check for stale reference or if columns are disabled
|
||||
if (contentNode == null || UserPreferences.getHideSCOColumns()) {
|
||||
return;
|
||||
}
|
||||
|
||||
// get the SCO column values
|
||||
List<Tag> tags = contentNode.getAllTagsFromDatabase();
|
||||
CorrelationAttributeInstance fileAttribute = contentNode.getCorrelationAttributeInstance();
|
||||
|
||||
SCOData scoData = new SCOData();
|
||||
scoData.setScoreAndDescription(contentNode.getScorePropertyAndDescription(tags));
|
||||
//getting the correlation attribute and setting the comment column is done before the eamdb isEnabled check
|
||||
//because the Comment column will reflect the presence of comments in the CR when the CR is enabled, but reflect tag comments regardless
|
||||
CorrelationAttributeInstance fileAttribute = contentNode.getCorrelationAttributeInstance();
|
||||
scoData.setComment(contentNode.getCommentProperty(tags, fileAttribute));
|
||||
|
||||
if (EamDb.isEnabled() && !UserPreferences.getHideSCOColumns()) {
|
||||
if (EamDb.isEnabled()) {
|
||||
Type type = null;
|
||||
String value = null;
|
||||
String description = Bundle.GetSCOTask_occurrences_defaultDescription();
|
||||
|
@ -1,7 +1,7 @@
|
||||
/*
|
||||
* Autopsy Forensic Browser
|
||||
*
|
||||
* Copyright 2011-2017 Basis Technology Corp.
|
||||
* Copyright 2011-2019 Basis Technology Corp.
|
||||
* Contact: carrier <at> sleuthkit <dot> org
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
@ -24,9 +24,10 @@ import java.nio.file.Files;
|
||||
import java.nio.file.Paths;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.logging.Level;
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.apache.commons.io.FilenameUtils;
|
||||
import org.openide.util.Exceptions;
|
||||
import org.sleuthkit.autopsy.coreutils.Logger;
|
||||
import org.sleuthkit.autopsy.coreutils.ModuleSettings;
|
||||
import org.sleuthkit.autopsy.coreutils.PlatformUtil;
|
||||
|
||||
@ -40,7 +41,7 @@ public final class IngestProfiles {
|
||||
private static final String PROFILE_DESC_KEY = "Profile_Description";
|
||||
private static final String PROFILE_FILTER_KEY = "Profile_Filter";
|
||||
private static final String PROFILE_FILE_EXT = ".properties";
|
||||
|
||||
private static final Logger logger = Logger.getLogger(IngestProfiles.class.getName());
|
||||
/**
|
||||
* Gets the collection of profiles which currently exist.
|
||||
*
|
||||
@ -143,7 +144,7 @@ public final class IngestProfiles {
|
||||
Files.deleteIfExists(Paths.get(PlatformUtil.getUserConfigDirectory(), selectedProfile.getName() + PROFILE_FILE_EXT));
|
||||
FileUtils.deleteDirectory(IngestJobSettings.getSavedModuleSettingsFolder(selectedProfile.getName() + File.separator).toFile());
|
||||
} catch (IOException ex) {
|
||||
Exceptions.printStackTrace(ex);
|
||||
logger.log(Level.WARNING, "Error deleting directory for profile " + selectedProfile.getName(), ex);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1,7 +1,7 @@
|
||||
/*
|
||||
* Autopsy Forensic Browser
|
||||
*
|
||||
* Copyright 2011-2018 Basis Technology Corp.
|
||||
* Copyright 2011-2019 Basis Technology Corp.
|
||||
* Contact: carrier <at> sleuthkit <dot> org
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
@ -22,12 +22,14 @@ import org.sleuthkit.autopsy.ingest.runIngestModuleWizard.RunIngestModulesAction
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.logging.Level;
|
||||
import javax.swing.JComponent;
|
||||
import javax.swing.JMenuItem;
|
||||
import org.openide.awt.DynamicMenuContent;
|
||||
import org.openide.util.NbBundle;
|
||||
import org.sleuthkit.autopsy.casemodule.Case;
|
||||
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
|
||||
import org.sleuthkit.autopsy.coreutils.Logger;
|
||||
import org.sleuthkit.datamodel.Content;
|
||||
import org.sleuthkit.datamodel.TskCoreException;
|
||||
|
||||
@ -37,6 +39,8 @@ import org.sleuthkit.datamodel.TskCoreException;
|
||||
*/
|
||||
final class RunIngestSubMenu extends JMenuItem implements DynamicMenuContent {
|
||||
|
||||
private static final Logger logger = Logger.getLogger(RunIngestSubMenu.class.getName());
|
||||
|
||||
/**
|
||||
* Creates main menu/popup menu items. It's called each time a popup menu is
|
||||
* constructed and just once for the main menu. Main menu updates happen
|
||||
@ -54,7 +58,7 @@ final class RunIngestSubMenu extends JMenuItem implements DynamicMenuContent {
|
||||
// No open Cases, create a disabled empty menu
|
||||
return getEmpty();
|
||||
} catch (TskCoreException | NoCurrentCaseException e) {
|
||||
System.out.println("Exception getting images: " + e.getMessage()); //NON-NLS
|
||||
logger.log(Level.INFO, "Exception getting images: " + e.getMessage());
|
||||
}
|
||||
JComponent[] comps = new JComponent[dataSources.size()];
|
||||
|
||||
|
@ -18,37 +18,51 @@
|
||||
*/
|
||||
package org.sleuthkit.autopsy.logicalimager.dsp;
|
||||
|
||||
import java.io.BufferedReader;
|
||||
import java.io.File;
|
||||
import java.io.FileInputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStreamReader;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.Paths;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.logging.Level;
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.openide.util.NbBundle.Messages;
|
||||
import org.sleuthkit.autopsy.casemodule.Case;
|
||||
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
|
||||
import org.sleuthkit.autopsy.casemodule.services.Blackboard;
|
||||
import org.sleuthkit.autopsy.corecomponentinterfaces.DataSourceProcessorCallback;
|
||||
import org.sleuthkit.autopsy.corecomponentinterfaces.DataSourceProcessorProgressMonitor;
|
||||
import org.sleuthkit.autopsy.coreutils.Logger;
|
||||
import org.sleuthkit.autopsy.ingest.IngestServices;
|
||||
import org.sleuthkit.autopsy.ingest.ModuleDataEvent;
|
||||
import org.sleuthkit.datamodel.AbstractFile;
|
||||
import org.sleuthkit.datamodel.BlackboardArtifact;
|
||||
import org.sleuthkit.datamodel.BlackboardAttribute;
|
||||
import org.sleuthkit.datamodel.Content;
|
||||
import org.sleuthkit.datamodel.TskCoreException;
|
||||
|
||||
/**
|
||||
* A runnable that - copy the logical image folder to a destination folder - add
|
||||
* alert.txt and users.txt files to report - add an image data source to the
|
||||
* SearchResults.txt and users.txt files to report - add an image data source to the
|
||||
* case database.
|
||||
*/
|
||||
final class AddLogicalImageTask extends AddMultipleImageTask {
|
||||
|
||||
private final static Logger logger = Logger.getLogger(AddLogicalImageTask.class.getName());
|
||||
private final static String ALERT_TXT = "alert.txt"; //NON-NLS
|
||||
private final static Logger LOGGER = Logger.getLogger(AddLogicalImageTask.class.getName());
|
||||
private final static String SEARCH_RESULTS_TXT = "SearchResults.txt"; //NON-NLS
|
||||
private final static String USERS_TXT = "users.txt"; //NON-NLS
|
||||
private final static String MODULE_NAME = "Logical Imager"; //NON-NLS
|
||||
private final File src;
|
||||
private final File dest;
|
||||
private final DataSourceProcessorCallback callback;
|
||||
private final DataSourceProcessorProgressMonitor progressMonitor;
|
||||
private final Blackboard blackboard;
|
||||
private final Case currentCase;
|
||||
|
||||
AddLogicalImageTask(String deviceId,
|
||||
List<String> imagePaths,
|
||||
@ -62,10 +76,12 @@ final class AddLogicalImageTask extends AddMultipleImageTask {
|
||||
this.dest = dest;
|
||||
this.progressMonitor = progressMonitor;
|
||||
this.callback = callback;
|
||||
this.currentCase = Case.getCurrentCase();
|
||||
this.blackboard = this.currentCase.getServices().getBlackboard();
|
||||
}
|
||||
|
||||
/**
|
||||
* Copy the src directory to dest. Add alert.txt and users.txt to the case
|
||||
* Add SearchResults.txt and users.txt to the case
|
||||
* report Adds the image to the case database.
|
||||
*/
|
||||
@Messages({
|
||||
@ -73,35 +89,34 @@ final class AddLogicalImageTask extends AddMultipleImageTask {
|
||||
"AddLogicalImageTask.doneCopying=Done copying",
|
||||
"# {0} - src", "# {1} - dest", "AddLogicalImageTask.failedToCopyDirectory=Failed to copy directory {0} to {1}",
|
||||
"# {0} - file", "AddLogicalImageTask.addingToReport=Adding {0} to report",
|
||||
"# {0} - file", "AddLogicalImageTask.doneAddingToReport=Done adding {0} to report"
|
||||
"# {0} - file", "AddLogicalImageTask.doneAddingToReport=Done adding {0} to report",
|
||||
"AddLogicalImageTask.addingInterestingFiles=Adding search results as interesting files",
|
||||
"AddLogicalImageTask.doneAddingInterestingFiles=Done adding search results as interesting files",
|
||||
"# {0} - SearchResults.txt", "# {1} - directory", "AddLogicalImageTask.cannotFindFiles=Cannot find {0} in {1}",
|
||||
"# {0} - reason", "AddLogicalImageTask.failedToAddInterestingFiles=Failed to add interesting files: {0}"
|
||||
})
|
||||
@Override
|
||||
public void run() {
|
||||
List<String> errorList = new ArrayList<>();
|
||||
List<Content> emptyDataSources = new ArrayList<>();
|
||||
|
||||
try {
|
||||
progressMonitor.setProgressText(Bundle.AddLogicalImageTask_copyingImageFromTo(src.toString(), dest.toString()));
|
||||
FileUtils.copyDirectory(src, dest);
|
||||
progressMonitor.setProgressText(Bundle.AddLogicalImageTask_doneCopying());
|
||||
} catch (IOException ex) {
|
||||
// Copy directory failed
|
||||
String msg = Bundle.AddLogicalImageTask_failedToCopyDirectory(src.toString(), dest.toString());
|
||||
errorList.add(msg);
|
||||
logger.log(Level.SEVERE, String.format("Failed to copy directory %s to %s", src.toString(), dest.toString()), ex);
|
||||
// Add the SearchResults.txt and users.txt to the case report
|
||||
String resultsFilename;
|
||||
if (Paths.get(dest.toString(), SEARCH_RESULTS_TXT).toFile().exists()) {
|
||||
resultsFilename = SEARCH_RESULTS_TXT;
|
||||
} else {
|
||||
errorList.add(Bundle.AddLogicalImageTask_cannotFindFiles(SEARCH_RESULTS_TXT, dest.toString()));
|
||||
callback.done(DataSourceProcessorCallback.DataSourceProcessorResult.CRITICAL_ERRORS, errorList, emptyDataSources);
|
||||
return;
|
||||
return;
|
||||
}
|
||||
|
||||
// Add the alert.txt and users.txt to the case report
|
||||
progressMonitor.setProgressText(Bundle.AddLogicalImageTask_addingToReport(ALERT_TXT));
|
||||
String status = addReport(Paths.get(dest.toString(), ALERT_TXT), ALERT_TXT + " " + src.getName());
|
||||
progressMonitor.setProgressText(Bundle.AddLogicalImageTask_addingToReport(resultsFilename));
|
||||
String status = addReport(Paths.get(dest.toString(), resultsFilename), resultsFilename + " " + src.getName());
|
||||
if (status != null) {
|
||||
errorList.add(status);
|
||||
callback.done(DataSourceProcessorCallback.DataSourceProcessorResult.CRITICAL_ERRORS, errorList, emptyDataSources);
|
||||
return;
|
||||
}
|
||||
progressMonitor.setProgressText(Bundle.AddLogicalImageTask_doneAddingToReport(ALERT_TXT));
|
||||
progressMonitor.setProgressText(Bundle.AddLogicalImageTask_doneAddingToReport(resultsFilename));
|
||||
|
||||
progressMonitor.setProgressText(Bundle.AddLogicalImageTask_addingToReport(USERS_TXT));
|
||||
status = addReport(Paths.get(dest.toString(), USERS_TXT), USERS_TXT + " " + src.getName());
|
||||
@ -113,6 +128,21 @@ final class AddLogicalImageTask extends AddMultipleImageTask {
|
||||
progressMonitor.setProgressText(Bundle.AddLogicalImageTask_doneAddingToReport(USERS_TXT));
|
||||
|
||||
super.run();
|
||||
if (super.getResult() == DataSourceProcessorCallback.DataSourceProcessorResult.CRITICAL_ERRORS) {
|
||||
callback.done(super.getResult(), super.getErrorMessages(), super.getNewDataSources());
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
progressMonitor.setProgressText(Bundle.AddLogicalImageTask_addingInterestingFiles());
|
||||
addInterestingFiles(dest, Paths.get(dest.toString(), resultsFilename));
|
||||
progressMonitor.setProgressText(Bundle.AddLogicalImageTask_doneAddingInterestingFiles());
|
||||
callback.done(super.getResult(), super.getErrorMessages(), super.getNewDataSources());
|
||||
} catch (IOException | TskCoreException ex) {
|
||||
errorList.add(Bundle.AddLogicalImageTask_failedToAddInterestingFiles(ex.getMessage()));
|
||||
LOGGER.log(Level.SEVERE, "Failed to add interesting files", ex); // NON-NLS
|
||||
callback.done(DataSourceProcessorCallback.DataSourceProcessorResult.NONCRITICAL_ERRORS, errorList, emptyDataSources);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@ -136,8 +166,87 @@ final class AddLogicalImageTask extends AddMultipleImageTask {
|
||||
return null;
|
||||
} catch (TskCoreException ex) {
|
||||
String msg = Bundle.AddLogicalImageTask_failedToAddReport(reportPath.toString(), ex.getMessage());
|
||||
logger.log(Level.SEVERE, String.format("Failed to add report %s. Reason= %s", reportPath.toString(), ex.getMessage()), ex);
|
||||
LOGGER.log(Level.SEVERE, String.format("Failed to add report %s. Reason= %s", reportPath.toString(), ex.getMessage()), ex); // NON-NLS
|
||||
return msg;
|
||||
}
|
||||
}
|
||||
|
||||
private Map<String, Long> imagePathsToDataSourceObjId(Map<Long, List<String>> imagePaths) {
|
||||
Map<String, Long> imagePathToObjIdMap = new HashMap<>();
|
||||
for (Map.Entry<Long, List<String>> entry : imagePaths.entrySet()) {
|
||||
Long key = entry.getKey();
|
||||
List<String> names = entry.getValue();
|
||||
for (String name : names) {
|
||||
imagePathToObjIdMap.put(name, key);
|
||||
}
|
||||
}
|
||||
return imagePathToObjIdMap;
|
||||
}
|
||||
|
||||
@Messages({
|
||||
"# {0} - line number", "# {1} - fields length", "# {2} - expected length", "AddLogicalImageTask.notEnoughFields=File does not contain enough fields at line {0}, got {1}, expecting {2}",
|
||||
"# {0} - target image path", "AddLogicalImageTask.cannotFindDataSourceObjId=Cannot find obj_id in tsk_image_names for {0}"
|
||||
})
|
||||
private void addInterestingFiles(File src, Path resultsPath) throws IOException, TskCoreException {
|
||||
Map<Long, List<String>> imagePaths = currentCase.getSleuthkitCase().getImagePaths();
|
||||
Map<String, Long> imagePathToObjIdMap = imagePathsToDataSourceObjId(imagePaths);
|
||||
|
||||
try (BufferedReader br = new BufferedReader(new InputStreamReader(
|
||||
new FileInputStream(resultsPath.toFile()), "UTF8"))) { // NON-NLS
|
||||
String line;
|
||||
br.readLine(); // skip the header line
|
||||
int lineNumber = 2;
|
||||
while ((line = br.readLine()) != null) {
|
||||
String[] fields = line.split("\t", -1); // NON-NLS
|
||||
if (fields.length != 9) {
|
||||
throw new IOException(Bundle.AddLogicalImageTask_notEnoughFields(lineNumber, fields.length, 9));
|
||||
}
|
||||
String vhdFilename = fields[0];
|
||||
|
||||
String targetImagePath = Paths.get(src.toString(), vhdFilename).toString();
|
||||
Long dataSourceObjId = imagePathToObjIdMap.get(targetImagePath);
|
||||
if (dataSourceObjId == null) {
|
||||
throw new TskCoreException(Bundle.AddLogicalImageTask_cannotFindDataSourceObjId(targetImagePath));
|
||||
}
|
||||
|
||||
// String fileSystemOffsetStr = fields[1];
|
||||
String fileMetaAddressStr = fields[2];
|
||||
// String extractStatusStr = fields[3];
|
||||
String ruleSetName = fields[4];
|
||||
String ruleName = fields[5];
|
||||
// String description = fields[6];
|
||||
String filename = fields[7];
|
||||
// String parentPath = fields[8];
|
||||
|
||||
String query = String.format("data_source_obj_id = '%s' AND meta_addr = '%s' AND name = '%s'", // NON-NLS
|
||||
dataSourceObjId.toString(), fileMetaAddressStr, filename);
|
||||
List<AbstractFile> matchedFiles = Case.getCurrentCase().getSleuthkitCase().findAllFilesWhere(query);
|
||||
for (AbstractFile file : matchedFiles) {
|
||||
addInterestingFile(file, ruleSetName, ruleName);
|
||||
}
|
||||
lineNumber++;
|
||||
}
|
||||
}
|
||||
IngestServices.getInstance().fireModuleDataEvent(new ModuleDataEvent(MODULE_NAME,
|
||||
BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT));
|
||||
}
|
||||
|
||||
private void addInterestingFile(AbstractFile file, String ruleSetName, String ruleName) throws TskCoreException {
|
||||
Collection<BlackboardAttribute> attributes = new ArrayList<>();
|
||||
BlackboardAttribute setNameAttribute = new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME, MODULE_NAME, ruleSetName);
|
||||
attributes.add(setNameAttribute);
|
||||
BlackboardAttribute ruleNameAttribute = new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_CATEGORY, MODULE_NAME, ruleName);
|
||||
attributes.add(ruleNameAttribute);
|
||||
org.sleuthkit.datamodel.Blackboard tskBlackboard = Case.getCurrentCase().getSleuthkitCase().getBlackboard();
|
||||
if (!tskBlackboard.artifactExists(file, BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT, attributes)) {
|
||||
BlackboardArtifact artifact = file.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT);
|
||||
artifact.addAttributes(attributes);
|
||||
try {
|
||||
// index the artifact for keyword search
|
||||
blackboard.indexArtifact(artifact);
|
||||
} catch (Blackboard.BlackboardException ex) {
|
||||
LOGGER.log(Level.SEVERE, "Unable to index blackboard artifact " + artifact.getArtifactID(), ex); //NON-NLS
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -59,6 +59,10 @@ class AddMultipleImageTask implements Runnable {
|
||||
|
||||
private boolean criticalErrorOccurred;
|
||||
private volatile boolean cancelled;
|
||||
|
||||
private List<Content> newDataSources;
|
||||
private List<String> errorMessages;
|
||||
private DataSourceProcessorResult result;
|
||||
|
||||
/**
|
||||
* Constructs a runnable that adds multiple image files to a case database.
|
||||
@ -90,16 +94,19 @@ class AddMultipleImageTask implements Runnable {
|
||||
this.callback = callback;
|
||||
this.progressMonitor = progressMonitor;
|
||||
currentCase = Case.getCurrentCaseThrows();
|
||||
this.criticalErrorOccurred = false;
|
||||
this.result = DataSourceProcessorResult.NO_ERRORS;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void run() {
|
||||
newDataSources = new ArrayList<>();
|
||||
errorMessages = new ArrayList<>();
|
||||
|
||||
/*
|
||||
* Try to add the input image files as images.
|
||||
*/
|
||||
List<Content> newDataSources = new ArrayList<>();
|
||||
List<String> corruptedImageFilePaths = new ArrayList<>();
|
||||
List<String> errorMessages = new ArrayList<>();
|
||||
currentCase.getSleuthkitCase().acquireSingleUserCaseWriteLock();
|
||||
try {
|
||||
progressMonitor.setIndeterminate(true);
|
||||
@ -163,7 +170,6 @@ class AddMultipleImageTask implements Runnable {
|
||||
/*
|
||||
* Pass the results back via the callback.
|
||||
*/
|
||||
DataSourceProcessorResult result;
|
||||
if (criticalErrorOccurred) {
|
||||
result = DataSourceProcessorResult.CRITICAL_ERRORS;
|
||||
} else if (!errorMessages.isEmpty()) {
|
||||
@ -171,8 +177,6 @@ class AddMultipleImageTask implements Runnable {
|
||||
} else {
|
||||
result = DataSourceProcessorResult.NO_ERRORS;
|
||||
}
|
||||
callback.done(result, errorMessages, newDataSources);
|
||||
criticalErrorOccurred = false;
|
||||
}
|
||||
|
||||
/**
|
||||
@ -270,4 +274,16 @@ class AddMultipleImageTask implements Runnable {
|
||||
}
|
||||
}
|
||||
|
||||
public List<Content> getNewDataSources() {
|
||||
return newDataSources;
|
||||
}
|
||||
|
||||
public List<String> getErrorMessages() {
|
||||
return errorMessages;
|
||||
}
|
||||
|
||||
public DataSourceProcessorResult getResult() {
|
||||
return result;
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -2,20 +2,31 @@
|
||||
# To change this template file, choose Tools | Templates
|
||||
# and open the template in the editor.
|
||||
|
||||
AddLogicalImageTask.addingInterestingFiles=Adding search results as interesting files
|
||||
# {0} - file
|
||||
AddLogicalImageTask.addingToReport=Adding {0} to report
|
||||
# {0} - SearchResults.txt
|
||||
# {1} - directory
|
||||
AddLogicalImageTask.cannotFindFiles=Cannot find {0} in {1}
|
||||
# {0} - src
|
||||
# {1} - dest
|
||||
AddLogicalImageTask.copyingImageFromTo=Copying image from {0} to {1}
|
||||
AddLogicalImageTask.doneAddingInterestingFiles=Done adding search results as interesting files
|
||||
# {0} - file
|
||||
AddLogicalImageTask.doneAddingToReport=Done adding {0} to report
|
||||
AddLogicalImageTask.doneCopying=Done copying
|
||||
# {0} - reason
|
||||
AddLogicalImageTask.failedToAddInterestingFiles=Failed to add interesting files: {0}
|
||||
# {0} - file
|
||||
# {1} - exception message
|
||||
AddLogicalImageTask.failedToAddReport=Failed to add report {0}. Reason= {1}
|
||||
# {0} - src
|
||||
# {1} - dest
|
||||
AddLogicalImageTask.failedToCopyDirectory=Failed to copy directory {0} to {1}
|
||||
# {0} - line number
|
||||
# {1} - fields length
|
||||
# {2} - expected length
|
||||
AddLogicalImageTask.notEnoughFields=File does not contain enough fields at line {0}, got {1}, expecting {2}
|
||||
# {0} - imageFilePath
|
||||
AddMultipleImageTask.adding=Adding: {0}
|
||||
# {0} - file
|
||||
|
@ -27,6 +27,7 @@ import java.util.Calendar;
|
||||
import java.util.List;
|
||||
import java.util.UUID;
|
||||
import javax.swing.JPanel;
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.openide.util.NbBundle.Messages;
|
||||
import org.openide.util.lookup.ServiceProvider;
|
||||
import org.openide.util.lookup.ServiceProviders;
|
||||
@ -170,9 +171,21 @@ public final class LogicalImagerDSProcessor implements DataSourceProcessor {
|
||||
}
|
||||
File src = imageDirPath.toFile();
|
||||
|
||||
try {
|
||||
progressMonitor.setProgressText(Bundle.AddLogicalImageTask_copyingImageFromTo(src.toString(), dest.toString()));
|
||||
FileUtils.copyDirectory(src, dest);
|
||||
progressMonitor.setProgressText(Bundle.AddLogicalImageTask_doneCopying());
|
||||
} catch (IOException ex) {
|
||||
// Copy directory failed
|
||||
String msg = Bundle.AddLogicalImageTask_failedToCopyDirectory(src.toString(), dest.toString());
|
||||
errorList.add(msg);
|
||||
callback.done(DataSourceProcessorCallback.DataSourceProcessorResult.CRITICAL_ERRORS, errorList, emptyDataSources);
|
||||
return;
|
||||
}
|
||||
|
||||
// Get all VHD files in the src directory
|
||||
List<String> imagePaths = new ArrayList<>();
|
||||
for (File f : src.listFiles()) {
|
||||
for (File f : dest.listFiles()) {
|
||||
if (f.getName().endsWith(".vhd")) {
|
||||
try {
|
||||
imagePaths.add(f.getCanonicalPath());
|
||||
@ -194,7 +207,6 @@ public final class LogicalImagerDSProcessor implements DataSourceProcessor {
|
||||
String msg = Bundle.LogicalImagerDSProcessor_noCurrentCase();
|
||||
errorList.add(msg);
|
||||
callback.done(DataSourceProcessorCallback.DataSourceProcessorResult.CRITICAL_ERRORS, errorList, emptyDataSources);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1,7 +1,7 @@
|
||||
/*
|
||||
* Autopsy Forensic Browser
|
||||
*
|
||||
* Copyright 2014-2018 Basis Technology Corp.
|
||||
* Copyright 2014-2019 Basis Technology Corp.
|
||||
* Contact: carrier <at> sleuthkit <dot> org
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
@ -216,7 +216,6 @@ final class ContactAnalyzer {
|
||||
try {
|
||||
while ((length = is.read(buffer)) != -1) {
|
||||
os.write(buffer, 0, length);
|
||||
System.out.println(length);
|
||||
os.flush();
|
||||
|
||||
}
|
||||
@ -239,13 +238,13 @@ final class ContactAnalyzer {
|
||||
ostream.write(c);
|
||||
}
|
||||
} catch (IOException e) {
|
||||
System.out.println("Error: " + e.getMessage()); //NON-NLS
|
||||
logger.log(Level.WARNING, "Error copying file", e);
|
||||
} finally {
|
||||
try {
|
||||
istream.close();
|
||||
ostream.close();
|
||||
} catch (IOException e) {
|
||||
System.out.println("File did not close"); //NON-NLS
|
||||
logger.log(Level.WARNING, "File did not close", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1,7 +1,7 @@
|
||||
/*
|
||||
* Autopsy Forensic Browser
|
||||
*
|
||||
* Copyright 2013-2018 Basis Technology Corp.
|
||||
*
|
||||
* Copyright 2013-2019 Basis Technology Corp.
|
||||
* Contact: carrier <at> sleuthkit <dot> org
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
@ -70,7 +70,7 @@ class StixArtifactData {
|
||||
|
||||
@Messages({"StixArtifactData.indexError.message=Failed to index STIX interesting file hit artifact for keyword search.",
|
||||
"StixArtifactData.noOpenCase.errMsg=No open case available."})
|
||||
public void createArtifact(String a_title) throws TskCoreException {
|
||||
void createArtifact(String a_title) throws TskCoreException {
|
||||
Blackboard blackboard;
|
||||
try {
|
||||
blackboard = Case.getCurrentCaseThrows().getSleuthkitCase().getBlackboard();
|
||||
@ -104,8 +104,4 @@ class StixArtifactData {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public void print() {
|
||||
System.out.println(" " + observableId + " " + file.getName());
|
||||
}
|
||||
}
|
||||
|
@ -42,6 +42,7 @@ import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil;
|
||||
import org.sleuthkit.autopsy.coreutils.PlatformUtil;
|
||||
import org.sleuthkit.autopsy.datamodel.ContentUtils;
|
||||
import org.sleuthkit.autopsy.coreutils.FileTypeUtils.FileTypeCategory;
|
||||
import org.sleuthkit.autopsy.report.caseuco.CaseUcoFormatExporter;
|
||||
import org.sleuthkit.datamodel.AbstractFile;
|
||||
import org.sleuthkit.datamodel.BlackboardArtifact;
|
||||
import org.sleuthkit.datamodel.BlackboardArtifactTag;
|
||||
@ -355,6 +356,19 @@ class PortableCaseReportModule implements ReportModule {
|
||||
return;
|
||||
}
|
||||
|
||||
File reportsFolder = Paths.get(caseFolder.toString(), "Reports").toFile();
|
||||
if(!reportsFolder.mkdir()) {
|
||||
handleError("Could not make report folder", "Could not make report folder", null, progressPanel); // NON-NLS
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
CaseUcoFormatExporter.export(tagNames, setNames, reportsFolder, progressPanel);
|
||||
} catch (IOException | SQLException | NoCurrentCaseException | TskCoreException ex) {
|
||||
handleError("Problem while generating CASE-UCO report",
|
||||
"Problem while generating CASE-UCO report", ex, progressPanel); // NON-NLS
|
||||
}
|
||||
|
||||
// Compress the case (if desired)
|
||||
if (options.shouldCompress()) {
|
||||
progressPanel.updateStatusLabel(Bundle.PortableCaseReportModule_generateReport_compressingCase());
|
||||
|
@ -1,3 +1,6 @@
|
||||
CaseUcoFormatExporter.datasourceMsg=Generating CASE-UCO Report for %s
|
||||
CaseUcoFormatExporter.finishMsg=Finished generating CASE-UCO Report
|
||||
CaseUcoFormatExporter.startMsg=Generating CASE-UCO Report
|
||||
OpenIDE-Module-Name=CaseUcoModule
|
||||
ReportCaseUco.getName.text=CASE-UCO
|
||||
ReportCaseUco.getDesc.text=CASE-UCO format report with basic property fields for every file.
|
||||
|
@ -23,24 +23,39 @@ import com.fasterxml.jackson.core.JsonFactory;
|
||||
import com.fasterxml.jackson.core.JsonGenerator;
|
||||
import com.fasterxml.jackson.core.util.DefaultIndenter;
|
||||
import com.fasterxml.jackson.core.util.DefaultPrettyPrinter;
|
||||
import com.google.common.collect.Lists;
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.Paths;
|
||||
import java.sql.ResultSet;
|
||||
import java.sql.SQLException;
|
||||
import java.util.List;
|
||||
import java.util.SimpleTimeZone;
|
||||
import java.util.TimeZone;
|
||||
import java.util.logging.Level;
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.openide.util.NbBundle;
|
||||
import org.sleuthkit.autopsy.casemodule.Case;
|
||||
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
|
||||
import org.sleuthkit.autopsy.casemodule.services.TagsManager;
|
||||
import org.sleuthkit.autopsy.coreutils.Logger;
|
||||
import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil;
|
||||
import org.sleuthkit.autopsy.datamodel.ContentUtils;
|
||||
import org.sleuthkit.autopsy.ingest.IngestManager;
|
||||
import org.sleuthkit.autopsy.report.ReportProgressPanel;
|
||||
import org.sleuthkit.datamodel.AbstractFile;
|
||||
import org.sleuthkit.datamodel.BlackboardArtifact;
|
||||
import org.sleuthkit.datamodel.BlackboardArtifactTag;
|
||||
import org.sleuthkit.datamodel.BlackboardAttribute;
|
||||
import org.sleuthkit.datamodel.Content;
|
||||
import org.sleuthkit.datamodel.ContentTag;
|
||||
import org.sleuthkit.datamodel.DataSource;
|
||||
import org.sleuthkit.datamodel.SleuthkitCase;
|
||||
import org.sleuthkit.datamodel.TskCoreException;
|
||||
import org.sleuthkit.datamodel.TskData;
|
||||
import org.sleuthkit.datamodel.TagName;
|
||||
|
||||
/**
|
||||
* Generates CASE-UCO report file for a data source
|
||||
@ -49,6 +64,11 @@ public final class CaseUcoFormatExporter {
|
||||
|
||||
private static final Logger logger = Logger.getLogger(CaseUcoFormatExporter.class.getName());
|
||||
|
||||
private static final BlackboardAttribute.Type SET_NAME = new BlackboardAttribute.Type(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME);
|
||||
private static final BlackboardArtifact.ARTIFACT_TYPE INTERESTING_FILE_HIT = BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT;
|
||||
private static final BlackboardArtifact.ARTIFACT_TYPE INTERESTING_ARTIFACT_HIT = BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_ARTIFACT_HIT;
|
||||
private static final String TEMP_DIR_NAME = "case_uco_tmp";
|
||||
|
||||
private CaseUcoFormatExporter() {
|
||||
}
|
||||
|
||||
@ -177,6 +197,136 @@ public final class CaseUcoFormatExporter {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Exports files that are tagged w/ the following TagNames and that belong to
|
||||
* the following interesting file sets (set name attributes of TSK_INTERSTING_FILE_HIT
|
||||
* and TSK_INTERESTING_ARTIFACT_HIT). Artifacts that are tagged with
|
||||
* the following TagNames also have their associated source files included.
|
||||
*
|
||||
* Duplicate files are excluded.
|
||||
*
|
||||
* @param tagTypes Collection of TagNames to match
|
||||
* @param interestingItemSets Collection of SET_NAMEs to match on in TSK_INTERESTING_FILE_HITs
|
||||
* and TSK_INTERESTING_ARTIFACT_HITs.
|
||||
* @param outputFilePath Path to the folder that the CASE-UCO report should be written into
|
||||
* @param progressPanel UI Component to be updated with current processing status
|
||||
*/
|
||||
@NbBundle.Messages({
|
||||
"CaseUcoFormatExporter.startMsg=Generating CASE-UCO Report",
|
||||
"CaseUcoFormatExporter.datasourceMsg=Generating CASE-UCO Report for %s",
|
||||
"CaseUcoFormatExporter.finishMsg=Finished generating CASE-UCO Report"
|
||||
})
|
||||
public static void export(List<TagName> tagTypes, List<String> interestingItemSets,
|
||||
File caseReportFolder, ReportProgressPanel progressPanel) throws IOException, SQLException,
|
||||
NoCurrentCaseException, TskCoreException {
|
||||
|
||||
progressPanel.updateStatusLabel(Bundle.CaseUcoFormatExporter_startMsg());
|
||||
//Acquire references for file discovery
|
||||
Case currentCase = Case.getCurrentCaseThrows();
|
||||
String caseTempDirectory = currentCase.getTempDirectory();
|
||||
SleuthkitCase skCase = currentCase.getSleuthkitCase();
|
||||
TagsManager tagsManager = currentCase.getServices().getTagsManager();
|
||||
|
||||
//Create temp directory to filter out duplicate files.
|
||||
Path tmpDir = Paths.get(caseTempDirectory, TEMP_DIR_NAME);
|
||||
FileUtils.deleteDirectory(tmpDir.toFile());
|
||||
Files.createDirectory(tmpDir);
|
||||
|
||||
//Create our report file
|
||||
Path reportFile = Paths.get(caseReportFolder.toString(),
|
||||
ReportCaseUco.getReportFileName());
|
||||
|
||||
//Timezone for formatting file creation, modification, and accessed times
|
||||
SimpleTimeZone timeZone = new SimpleTimeZone(0, "GMT");
|
||||
|
||||
try (JsonGenerator jsonGenerator = createJsonGenerator(reportFile.toFile())) {
|
||||
initializeJsonOutputFile(jsonGenerator);
|
||||
//Make the case the first entity in the report file.
|
||||
String caseTraceId = saveCaseInfo(skCase, jsonGenerator);
|
||||
|
||||
for (DataSource ds : skCase.getDataSources()) {
|
||||
progressPanel.updateStatusLabel(String.format(
|
||||
Bundle.CaseUcoFormatExporter_datasourceMsg(), ds.getName()));
|
||||
String dataSourceTraceId = saveDataSourceInfo(ds.getId(),
|
||||
caseTraceId, skCase, jsonGenerator);
|
||||
for (TagName tn : tagTypes) {
|
||||
for (ContentTag ct : tagsManager.getContentTagsByTagName(tn, ds.getId())) {
|
||||
saveUniqueFilesToCaseUcoFormat(ct.getContent(), tmpDir,
|
||||
jsonGenerator, timeZone, dataSourceTraceId);
|
||||
}
|
||||
for (BlackboardArtifactTag bat : tagsManager.getBlackboardArtifactTagsByTagName(tn, ds.getId())) {
|
||||
saveUniqueFilesToCaseUcoFormat(bat.getContent(), tmpDir,
|
||||
jsonGenerator, timeZone, dataSourceTraceId);
|
||||
}
|
||||
}
|
||||
if(!interestingItemSets.isEmpty()) {
|
||||
List<BlackboardArtifact.ARTIFACT_TYPE> typesToQuery = Lists.newArrayList(
|
||||
INTERESTING_FILE_HIT, INTERESTING_ARTIFACT_HIT);
|
||||
for(BlackboardArtifact.ARTIFACT_TYPE artType : typesToQuery) {
|
||||
for(BlackboardArtifact bArt : skCase.getBlackboardArtifacts(artType)) {
|
||||
if(bArt.getDataSource().getId() != ds.getId()) {
|
||||
continue;
|
||||
}
|
||||
BlackboardAttribute setAttr = bArt.getAttribute(SET_NAME);
|
||||
if (interestingItemSets.contains(setAttr.getValueString())) {
|
||||
Content content = skCase.getContentById(bArt.getObjectID());
|
||||
saveUniqueFilesToCaseUcoFormat(content, tmpDir,
|
||||
jsonGenerator, timeZone, dataSourceTraceId);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
finilizeJsonOutputFile(jsonGenerator);
|
||||
progressPanel.updateStatusLabel(Bundle.CaseUcoFormatExporter_finishMsg());
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Saves only unique abstract files to the report. Uniqueness is
|
||||
* determined by object id. The tmpDir Path is used to stored object
|
||||
* ids that have already been visited.
|
||||
*
|
||||
* @param content Abstractfile isntance
|
||||
* @param tmpDir Directory to write object ids
|
||||
* @param jsonGenerator Report generator
|
||||
* @param timeZone Time zore for ctime, atime, and mtime formatting
|
||||
* @param dataSourceTraceId TraceID number for the parent data source
|
||||
* @throws IOException
|
||||
*/
|
||||
private static void saveUniqueFilesToCaseUcoFormat(Content content, Path tmpDir, JsonGenerator jsonGenerator,
|
||||
TimeZone timeZone, String dataSourceTraceId) throws IOException {
|
||||
if (content instanceof AbstractFile && !(content instanceof DataSource)) {
|
||||
AbstractFile absFile = (AbstractFile) content;
|
||||
Path filePath = tmpDir.resolve(Long.toString(absFile.getId()));
|
||||
if (!Files.exists(filePath) && !absFile.isDir()) {
|
||||
saveFileInCaseUcoFormat(
|
||||
absFile.getId(),
|
||||
absFile.getName(),
|
||||
absFile.getParentPath(),
|
||||
absFile.getMd5Hash(),
|
||||
absFile.getMIMEType(),
|
||||
absFile.getSize(),
|
||||
ContentUtils.getStringTimeISO8601(absFile.getCtime(), timeZone),
|
||||
ContentUtils.getStringTimeISO8601(absFile.getAtime(), timeZone),
|
||||
ContentUtils.getStringTimeISO8601(absFile.getMtime(), timeZone),
|
||||
absFile.getNameExtension(),
|
||||
jsonGenerator,
|
||||
dataSourceTraceId
|
||||
);
|
||||
filePath.toFile().createNewFile();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static JsonGenerator createJsonGenerator(File reportFile) throws IOException {
|
||||
JsonFactory jsonGeneratorFactory = new JsonFactory();
|
||||
JsonGenerator jsonGenerator = jsonGeneratorFactory.createGenerator(reportFile, JsonEncoding.UTF8);
|
||||
// instert \n after each field for more readable formatting
|
||||
jsonGenerator.setPrettyPrinter(new DefaultPrettyPrinter().withObjectIndenter(new DefaultIndenter(" ", "\n")));
|
||||
return jsonGenerator;
|
||||
}
|
||||
|
||||
private static void initializeJsonOutputFile(JsonGenerator catalog) throws IOException {
|
||||
catalog.writeStartObject();
|
||||
catalog.writeFieldName("@graph");
|
||||
|
@ -1,7 +1,7 @@
|
||||
/*
|
||||
* Autopsy Forensic Browser
|
||||
*
|
||||
* Copyright 2011-2018 Basis Technology Corp.
|
||||
* Copyright 2011-2019 Basis Technology Corp.
|
||||
* Contact: carrier <at> sleuthkit <dot> org
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
@ -21,8 +21,6 @@ package org.sleuthkit.autopsy.test;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.logging.Level;
|
||||
|
||||
import org.openide.util.Exceptions;
|
||||
import org.openide.util.NbBundle;
|
||||
import org.sleuthkit.autopsy.casemodule.Case;
|
||||
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
|
||||
@ -128,7 +126,7 @@ final class InterestingArtifactCreatorIngestModule extends FileIngestModuleAdapt
|
||||
logger.log(Level.SEVERE, String.format("Failed to process file (obj_id = %d)", file.getId()), ex);
|
||||
return ProcessResult.ERROR;
|
||||
} catch (Blackboard.BlackboardException ex) {
|
||||
Exceptions.printStackTrace(ex);
|
||||
logger.log(Level.WARNING, "Blackboard Exception processing file with obj_id = " + file.getId(), ex);
|
||||
}
|
||||
return ProcessResult.OK;
|
||||
}
|
||||
|
@ -1,7 +1,7 @@
|
||||
/*
|
||||
* Autopsy Forensic Browser
|
||||
*
|
||||
* Copyright 2011-2018 Basis Technology Corp.
|
||||
* Copyright 2011-2019 Basis Technology Corp.
|
||||
* Contact: carrier <at> sleuthkit <dot> org
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
@ -454,7 +454,6 @@ final class StringsTextExtractor implements TextExtractor {
|
||||
convertBuffRemain = bytesInConvertBuff - convertBuffOffset;
|
||||
}
|
||||
} catch (TskCoreException ex) {
|
||||
//Exceptions.printStackTrace(ex);
|
||||
fileEOF = true;
|
||||
}
|
||||
}
|
||||
|
@ -2,7 +2,7 @@
|
||||
*
|
||||
* Autopsy Forensic Browser
|
||||
*
|
||||
* Copyright 2018 Basis Technology Corp.
|
||||
* Copyright 2018-2019 Basis Technology Corp.
|
||||
* Contact: carrier <at> sleuthkit <dot> org
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
@ -67,36 +67,36 @@ public class CommonAttributeSearchInterCaseTest extends NbTestCase {
|
||||
this.utils = new InterCaseTestUtils(this);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setUp() {
|
||||
this.utils.clearTestDir();
|
||||
try {
|
||||
this.utils.enableCentralRepo();
|
||||
|
||||
String[] cases = new String[]{
|
||||
CASE1,
|
||||
CASE2,
|
||||
CASE3,
|
||||
CASE4};
|
||||
|
||||
Path[][] paths = {
|
||||
{this.utils.attrCase1Path},
|
||||
{this.utils.attrCase2Path},
|
||||
{this.utils.attrCase3Path},
|
||||
{this.utils.attrCase4Path}};
|
||||
|
||||
this.utils.createCases(cases, paths, this.utils.getIngestSettingsForKitchenSink(), InterCaseTestUtils.CASE1);
|
||||
} catch (TskCoreException | EamDbException ex) {
|
||||
Exceptions.printStackTrace(ex);
|
||||
Assert.fail(ex.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void tearDown() {
|
||||
this.utils.clearTestDir();
|
||||
this.utils.tearDown();
|
||||
}
|
||||
// @Override
|
||||
// public void setUp() {
|
||||
// this.utils.clearTestDir();
|
||||
// try {
|
||||
// this.utils.enableCentralRepo();
|
||||
//
|
||||
// String[] cases = new String[]{
|
||||
// CASE1,
|
||||
// CASE2,
|
||||
// CASE3,
|
||||
// CASE4};
|
||||
//
|
||||
// Path[][] paths = {
|
||||
// {this.utils.attrCase1Path},
|
||||
// {this.utils.attrCase2Path},
|
||||
// {this.utils.attrCase3Path},
|
||||
// {this.utils.attrCase4Path}};
|
||||
//
|
||||
// this.utils.createCases(cases, paths, this.utils.getIngestSettingsForKitchenSink(), InterCaseTestUtils.CASE1);
|
||||
// } catch (TskCoreException | EamDbException ex) {
|
||||
// Exceptions.printStackTrace(ex);
|
||||
// Assert.fail(ex.getMessage());
|
||||
// }
|
||||
// }
|
||||
//
|
||||
// @Override
|
||||
// public void tearDown() {
|
||||
// this.utils.clearTestDir();
|
||||
// this.utils.tearDown();
|
||||
// }
|
||||
|
||||
/**
|
||||
* Run a search on the given type and ensure that all results are off that
|
||||
@ -106,25 +106,25 @@ public class CommonAttributeSearchInterCaseTest extends NbTestCase {
|
||||
*
|
||||
* @param type
|
||||
*/
|
||||
private void assertResultsAreOfType(CorrelationAttributeInstance.Type type) {
|
||||
|
||||
try {
|
||||
|
||||
AbstractCommonAttributeSearcher builder = new AllInterCaseCommonAttributeSearcher(false, false, type, 0);
|
||||
|
||||
CommonAttributeCountSearchResults metadata = builder.findMatchesByCount();
|
||||
|
||||
metadata.size();
|
||||
|
||||
assertFalse(verifyInstanceCount(metadata, 0));
|
||||
|
||||
assertTrue(this.utils.areAllResultsOfType(metadata, type));
|
||||
|
||||
} catch (TskCoreException | NoCurrentCaseException | SQLException | EamDbException ex) {
|
||||
Exceptions.printStackTrace(ex);
|
||||
Assert.fail(ex.getMessage());
|
||||
}
|
||||
}
|
||||
// private void assertResultsAreOfType(CorrelationAttributeInstance.Type type) {
|
||||
//
|
||||
// try {
|
||||
//
|
||||
// AbstractCommonAttributeSearcher builder = new AllInterCaseCommonAttributeSearcher(false, false, type, 0);
|
||||
//
|
||||
// CommonAttributeCountSearchResults metadata = builder.findMatchesByCount();
|
||||
//
|
||||
// metadata.size();
|
||||
//
|
||||
// assertFalse(verifyInstanceCount(metadata, 0));
|
||||
//
|
||||
// assertTrue(this.utils.areAllResultsOfType(metadata, type));
|
||||
//
|
||||
// } catch (TskCoreException | NoCurrentCaseException | SQLException | EamDbException ex) {
|
||||
// Exceptions.printStackTrace(ex);
|
||||
// Assert.fail(ex.getMessage());
|
||||
// }
|
||||
// }
|
||||
|
||||
/**
|
||||
* Test that a search for each type returns results of that type only.
|
||||
@ -142,29 +142,29 @@ public class CommonAttributeSearchInterCaseTest extends NbTestCase {
|
||||
* than the file type.
|
||||
*/
|
||||
public void testTwo() {
|
||||
try {
|
||||
|
||||
AbstractCommonAttributeSearcher builder;
|
||||
CommonAttributeCountSearchResults metadata;
|
||||
|
||||
builder = new AllInterCaseCommonAttributeSearcher(false, false, this.utils.USB_ID_TYPE, 100);
|
||||
metadata = builder.findMatchesByCount();
|
||||
metadata.size();
|
||||
//assertTrue("This should yield 13 results.", verifyInstanceCount(metadata, 13));
|
||||
|
||||
builder = new AllInterCaseCommonAttributeSearcher(false, false, this.utils.USB_ID_TYPE, 20);
|
||||
metadata = builder.findMatchesByCount();
|
||||
metadata.size();
|
||||
//assertTrue("This should yield no results.", verifyInstanceCount(metadata, 0));
|
||||
|
||||
builder = new AllInterCaseCommonAttributeSearcher(false, false, this.utils.USB_ID_TYPE, 90);
|
||||
metadata = builder.findMatchesByCount();
|
||||
metadata.size();
|
||||
//assertTrue("This should yield 2 results.", verifyInstanceCount(metadata, 2));
|
||||
|
||||
} catch (TskCoreException | NoCurrentCaseException | SQLException | EamDbException ex) {
|
||||
Exceptions.printStackTrace(ex);
|
||||
Assert.fail(ex.getMessage());
|
||||
}
|
||||
// try {
|
||||
//
|
||||
// AbstractCommonAttributeSearcher builder;
|
||||
// CommonAttributeCountSearchResults metadata;
|
||||
//
|
||||
// builder = new AllInterCaseCommonAttributeSearcher(false, false, this.utils.USB_ID_TYPE, 100);
|
||||
// metadata = builder.findMatchesByCount();
|
||||
// metadata.size();
|
||||
// //assertTrue("This should yield 13 results.", verifyInstanceCount(metadata, 13));
|
||||
//
|
||||
// builder = new AllInterCaseCommonAttributeSearcher(false, false, this.utils.USB_ID_TYPE, 20);
|
||||
// metadata = builder.findMatchesByCount();
|
||||
// metadata.size();
|
||||
// //assertTrue("This should yield no results.", verifyInstanceCount(metadata, 0));
|
||||
//
|
||||
// builder = new AllInterCaseCommonAttributeSearcher(false, false, this.utils.USB_ID_TYPE, 90);
|
||||
// metadata = builder.findMatchesByCount();
|
||||
// metadata.size();
|
||||
// //assertTrue("This should yield 2 results.", verifyInstanceCount(metadata, 2));
|
||||
//
|
||||
// } catch (TskCoreException | NoCurrentCaseException | SQLException | EamDbException ex) {
|
||||
// Exceptions.printStackTrace(ex);
|
||||
// Assert.fail(ex.getMessage());
|
||||
// }
|
||||
}
|
||||
}
|
||||
|
@ -2,7 +2,7 @@
|
||||
*
|
||||
* Autopsy Forensic Browser
|
||||
*
|
||||
* Copyright 2018 Basis Technology Corp.
|
||||
* Copyright 2018-2019 Basis Technology Corp.
|
||||
* Contact: carrier <at> sleuthkit <dot> org
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
@ -63,139 +63,139 @@ public class IngestedWithHashAndFileTypeInterCaseTest extends NbTestCase {
|
||||
this.utils = new InterCaseTestUtils(this);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setUp() {
|
||||
this.utils.clearTestDir();
|
||||
try {
|
||||
this.utils.enableCentralRepo();
|
||||
|
||||
String[] cases = new String[]{
|
||||
CASE1,
|
||||
CASE2,
|
||||
CASE3};
|
||||
|
||||
Path[][] paths = {
|
||||
{this.utils.case1DataSet1Path, this.utils.case1DataSet2Path},
|
||||
{this.utils.case2DataSet1Path, this.utils.case2DataSet2Path},
|
||||
{this.utils.case3DataSet1Path, this.utils.case3DataSet2Path}};
|
||||
|
||||
this.utils.createCases(cases, paths, this.utils.getIngestSettingsForHashAndFileType(), InterCaseTestUtils.CASE3);
|
||||
} catch (TskCoreException | EamDbException ex) {
|
||||
Exceptions.printStackTrace(ex);
|
||||
Assert.fail(ex.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void tearDown() {
|
||||
this.utils.clearTestDir();
|
||||
this.utils.tearDown();
|
||||
}
|
||||
// @Override
|
||||
// public void setUp() {
|
||||
// this.utils.clearTestDir();
|
||||
// try {
|
||||
// this.utils.enableCentralRepo();
|
||||
//
|
||||
// String[] cases = new String[]{
|
||||
// CASE1,
|
||||
// CASE2,
|
||||
// CASE3};
|
||||
//
|
||||
// Path[][] paths = {
|
||||
// {this.utils.case1DataSet1Path, this.utils.case1DataSet2Path},
|
||||
// {this.utils.case2DataSet1Path, this.utils.case2DataSet2Path},
|
||||
// {this.utils.case3DataSet1Path, this.utils.case3DataSet2Path}};
|
||||
//
|
||||
// this.utils.createCases(cases, paths, this.utils.getIngestSettingsForHashAndFileType(), InterCaseTestUtils.CASE3);
|
||||
// } catch (TskCoreException | EamDbException ex) {
|
||||
// Exceptions.printStackTrace(ex);
|
||||
// Assert.fail(ex.getMessage());
|
||||
// }
|
||||
// }
|
||||
//
|
||||
// @Override
|
||||
// public void tearDown() {
|
||||
// this.utils.clearTestDir();
|
||||
// this.utils.tearDown();
|
||||
// }
|
||||
|
||||
/**
|
||||
* Search All cases with no file type filtering.
|
||||
*/
|
||||
public void testOne() {
|
||||
try {
|
||||
AbstractCommonAttributeSearcher builder = new AllInterCaseCommonAttributeSearcher(false, false, this.utils.FILE_TYPE, 0);
|
||||
CommonAttributeCountSearchResults metadata = builder.findMatchesByCount();
|
||||
|
||||
assertTrue("Results should not be empty", metadata.size() != 0);
|
||||
|
||||
//case 1 data set 1
|
||||
assertEquals("Verify Existence or Count failed for HASH_0_DAT, CASE1_DATASET_1, CASE1", 0, getInstanceCount(metadata, HASH_0_DAT, CASE1_DATASET_1, CASE1));
|
||||
assertEquals("Verify Existence or Count failed for HASH_A_PDF, CASE1_DATASET_1, CASE1", 1, getInstanceCount(metadata, HASH_A_PDF, CASE1_DATASET_1, CASE1));
|
||||
assertEquals("Verify Existence or Count failed for HASH_A_JPG, CASE1_DATASET_1, CASE1", 1, getInstanceCount(metadata, HASH_A_JPG, CASE1_DATASET_1, CASE1));
|
||||
|
||||
//case 1 data set 2
|
||||
assertEquals("Verify Existence or Count failed for HASH_0_DAT, CASE1_DATASET_2, CASE1", 0, getInstanceCount(metadata, HASH_0_DAT, CASE1_DATASET_2, CASE1));
|
||||
assertEquals("Verify Existence or Count failed for HASH_A_PDF, CASE1_DATASET_2, CASE1", 1, getInstanceCount(metadata, HASH_A_PDF, CASE1_DATASET_2, CASE1));
|
||||
assertEquals("Verify Existence or Count failed for HASH_A_JPG, CASE1_DATASET_2, CASE1", 1, getInstanceCount(metadata, HASH_A_JPG, CASE1_DATASET_2, CASE1));
|
||||
|
||||
//case 2 data set 1
|
||||
assertEquals("Verify Existence or Count failed for HASH_B_PDF, CASE2_DATASET_1, CASE2", 0, getInstanceCount(metadata, HASH_B_PDF, CASE2_DATASET_1, CASE2));
|
||||
assertEquals("Verify Existence or Count failed for HASH_B_JPG, CASE2_DATASET_1, CASE2", 0, getInstanceCount(metadata, HASH_B_JPG, CASE2_DATASET_1, CASE2));
|
||||
|
||||
//case 2 data set 2
|
||||
assertEquals("Verify Existence or Count failed for HASH_A_PDF, CASE2_DATASET_2, CASE2", 1, getInstanceCount(metadata, HASH_A_PDF, CASE2_DATASET_2, CASE2));
|
||||
assertEquals("Verify Existence or Count failed for HASH_A_JPG, CASE2_DATASET_2, CASE2", 1, getInstanceCount(metadata, HASH_A_JPG, CASE2_DATASET_2, CASE2));
|
||||
assertEquals("Verify Existence or Count failed for HASH_D_DOC, CASE2_DATASET_2, CASE2", 1, getInstanceCount(metadata, HASH_D_DOC, CASE2_DATASET_2, CASE2));
|
||||
|
||||
//case 3 data set 1
|
||||
assertEquals("Verify Existence or Count failed for HASH_A_JPG, CASE3_DATASET_1, CASE3", 1, getInstanceCount(metadata, HASH_A_JPG, CASE3_DATASET_1, CASE3));
|
||||
assertEquals("Verify Existence or Count failed for HASH_A_PDF, CASE3_DATASET_1, CASE3", 1, getInstanceCount(metadata, HASH_A_PDF, CASE3_DATASET_1, CASE3));
|
||||
assertEquals("Verify Existence or Count failed for HASH_C_JPG, CASE3_DATASET_1, CASE3", 0, getInstanceCount(metadata, HASH_C_JPG, CASE3_DATASET_1, CASE3));
|
||||
assertEquals("Verify Existence or Count failed for HASH_C_PDF, CASE3_DATASET_1, CASE3", 0, getInstanceCount(metadata, HASH_C_PDF, CASE3_DATASET_1, CASE3));
|
||||
assertEquals("Verify Existence or Count failed for HASH_D_JPG, CASE3_DATASET_1, CASE3", 0, getInstanceCount(metadata, HASH_D_JPG, CASE3_DATASET_1, CASE3));
|
||||
|
||||
//case 3 data set 2
|
||||
assertEquals("Verify Existence or Count failed for HASH_C_JPG, CASE3_DATASET_2, CASE3", 0, getInstanceCount(metadata, HASH_C_JPG, CASE3_DATASET_2, CASE3));
|
||||
assertEquals("Verify Existence or Count failed for HASH_C_PDF, CASE3_DATASET_2, CASE3", 0, getInstanceCount(metadata, HASH_C_PDF, CASE3_DATASET_2, CASE3));
|
||||
assertEquals("Verify Existence or Count failed for HASH_D_DOC, CASE3_DATASET_2, CASE3", 1, getInstanceCount(metadata, HASH_D_DOC, CASE3_DATASET_2, CASE3));
|
||||
|
||||
} catch (TskCoreException | NoCurrentCaseException | SQLException | EamDbException ex) {
|
||||
Exceptions.printStackTrace(ex);
|
||||
Assert.fail(ex.getMessage());
|
||||
}
|
||||
// try {
|
||||
// AbstractCommonAttributeSearcher builder = new AllInterCaseCommonAttributeSearcher(false, false, this.utils.FILE_TYPE, 0);
|
||||
// CommonAttributeCountSearchResults metadata = builder.findMatchesByCount();
|
||||
//
|
||||
// assertTrue("Results should not be empty", metadata.size() != 0);
|
||||
//
|
||||
// //case 1 data set 1
|
||||
// assertEquals("Verify Existence or Count failed for HASH_0_DAT, CASE1_DATASET_1, CASE1", 0, getInstanceCount(metadata, HASH_0_DAT, CASE1_DATASET_1, CASE1));
|
||||
// assertEquals("Verify Existence or Count failed for HASH_A_PDF, CASE1_DATASET_1, CASE1", 1, getInstanceCount(metadata, HASH_A_PDF, CASE1_DATASET_1, CASE1));
|
||||
// assertEquals("Verify Existence or Count failed for HASH_A_JPG, CASE1_DATASET_1, CASE1", 1, getInstanceCount(metadata, HASH_A_JPG, CASE1_DATASET_1, CASE1));
|
||||
//
|
||||
// //case 1 data set 2
|
||||
// assertEquals("Verify Existence or Count failed for HASH_0_DAT, CASE1_DATASET_2, CASE1", 0, getInstanceCount(metadata, HASH_0_DAT, CASE1_DATASET_2, CASE1));
|
||||
// assertEquals("Verify Existence or Count failed for HASH_A_PDF, CASE1_DATASET_2, CASE1", 1, getInstanceCount(metadata, HASH_A_PDF, CASE1_DATASET_2, CASE1));
|
||||
// assertEquals("Verify Existence or Count failed for HASH_A_JPG, CASE1_DATASET_2, CASE1", 1, getInstanceCount(metadata, HASH_A_JPG, CASE1_DATASET_2, CASE1));
|
||||
//
|
||||
// //case 2 data set 1
|
||||
// assertEquals("Verify Existence or Count failed for HASH_B_PDF, CASE2_DATASET_1, CASE2", 0, getInstanceCount(metadata, HASH_B_PDF, CASE2_DATASET_1, CASE2));
|
||||
// assertEquals("Verify Existence or Count failed for HASH_B_JPG, CASE2_DATASET_1, CASE2", 0, getInstanceCount(metadata, HASH_B_JPG, CASE2_DATASET_1, CASE2));
|
||||
//
|
||||
// //case 2 data set 2
|
||||
// assertEquals("Verify Existence or Count failed for HASH_A_PDF, CASE2_DATASET_2, CASE2", 1, getInstanceCount(metadata, HASH_A_PDF, CASE2_DATASET_2, CASE2));
|
||||
// assertEquals("Verify Existence or Count failed for HASH_A_JPG, CASE2_DATASET_2, CASE2", 1, getInstanceCount(metadata, HASH_A_JPG, CASE2_DATASET_2, CASE2));
|
||||
// assertEquals("Verify Existence or Count failed for HASH_D_DOC, CASE2_DATASET_2, CASE2", 1, getInstanceCount(metadata, HASH_D_DOC, CASE2_DATASET_2, CASE2));
|
||||
//
|
||||
// //case 3 data set 1
|
||||
// assertEquals("Verify Existence or Count failed for HASH_A_JPG, CASE3_DATASET_1, CASE3", 1, getInstanceCount(metadata, HASH_A_JPG, CASE3_DATASET_1, CASE3));
|
||||
// assertEquals("Verify Existence or Count failed for HASH_A_PDF, CASE3_DATASET_1, CASE3", 1, getInstanceCount(metadata, HASH_A_PDF, CASE3_DATASET_1, CASE3));
|
||||
// assertEquals("Verify Existence or Count failed for HASH_C_JPG, CASE3_DATASET_1, CASE3", 0, getInstanceCount(metadata, HASH_C_JPG, CASE3_DATASET_1, CASE3));
|
||||
// assertEquals("Verify Existence or Count failed for HASH_C_PDF, CASE3_DATASET_1, CASE3", 0, getInstanceCount(metadata, HASH_C_PDF, CASE3_DATASET_1, CASE3));
|
||||
// assertEquals("Verify Existence or Count failed for HASH_D_JPG, CASE3_DATASET_1, CASE3", 0, getInstanceCount(metadata, HASH_D_JPG, CASE3_DATASET_1, CASE3));
|
||||
//
|
||||
// //case 3 data set 2
|
||||
// assertEquals("Verify Existence or Count failed for HASH_C_JPG, CASE3_DATASET_2, CASE3", 0, getInstanceCount(metadata, HASH_C_JPG, CASE3_DATASET_2, CASE3));
|
||||
// assertEquals("Verify Existence or Count failed for HASH_C_PDF, CASE3_DATASET_2, CASE3", 0, getInstanceCount(metadata, HASH_C_PDF, CASE3_DATASET_2, CASE3));
|
||||
// assertEquals("Verify Existence or Count failed for HASH_D_DOC, CASE3_DATASET_2, CASE3", 1, getInstanceCount(metadata, HASH_D_DOC, CASE3_DATASET_2, CASE3));
|
||||
//
|
||||
// } catch (TskCoreException | NoCurrentCaseException | SQLException | EamDbException ex) {
|
||||
// Exceptions.printStackTrace(ex);
|
||||
// Assert.fail(ex.getMessage());
|
||||
// }
|
||||
}
|
||||
|
||||
/**
|
||||
* Search All cases with no file type filtering.
|
||||
*/
|
||||
public void testTwo() {
|
||||
try {
|
||||
int matchesMustAlsoBeFoundInThisCase = 0;
|
||||
|
||||
// Filter out the time stamp to get the correct case name.
|
||||
Set<String> caseNames = this.utils.getCaseMap().keySet();
|
||||
for (String caseName : caseNames) {
|
||||
if (caseName.substring(0, caseName.length() - 20).equalsIgnoreCase(CASE2)) {
|
||||
// Case match found. Get the number of matches.
|
||||
matchesMustAlsoBeFoundInThisCase = this.utils.getCaseMap().get(caseName);
|
||||
}
|
||||
}
|
||||
CorrelationAttributeInstance.Type fileType = CorrelationAttributeInstance.getDefaultCorrelationTypes().get(0);
|
||||
AbstractCommonAttributeSearcher builder = new SingleInterCaseCommonAttributeSearcher(matchesMustAlsoBeFoundInThisCase, false, false, fileType, 0);
|
||||
|
||||
CommonAttributeCountSearchResults metadata = builder.findMatchesByCount();
|
||||
|
||||
assertTrue("Results should not be empty", metadata.size() != 0);
|
||||
|
||||
//case 1 data set 1
|
||||
assertEquals("Verify Existence or Count failed for HASH_0_DAT, CASE1_DATASET_1, CASE1", 0, getInstanceCount(metadata, HASH_0_DAT, CASE1_DATASET_1, CASE1));
|
||||
assertEquals("Verify Existence or Count failed for HASH_A_PDF, CASE1_DATASET_1, CASE1", 1, getInstanceCount(metadata, HASH_A_PDF, CASE1_DATASET_1, CASE1));
|
||||
assertEquals("Verify Existence or Count failed for HASH_A_JPG, CASE1_DATASET_1, CASE1", 1, getInstanceCount(metadata, HASH_A_JPG, CASE1_DATASET_1, CASE1));
|
||||
|
||||
//case 1 data set 2
|
||||
assertEquals("Verify Existence or Count failed for HASH_0_DAT, CASE1_DATASET_2, CASE1", 0, getInstanceCount(metadata, HASH_0_DAT, CASE1_DATASET_2, CASE1));
|
||||
assertEquals("Verify Existence or Count failed for HASH_A_PDF, CASE1_DATASET_2, CASE1", 1, getInstanceCount(metadata, HASH_A_PDF, CASE1_DATASET_2, CASE1));
|
||||
assertEquals("Verify Existence or Count failed for HASH_A_JPG, CASE1_DATASET_2, CASE1", 1, getInstanceCount(metadata, HASH_A_JPG, CASE1_DATASET_2, CASE1));
|
||||
|
||||
//case 2 data set 1
|
||||
assertEquals("Verify Existence or Count failed for HASH_B_PDF, CASE2_DATASET_1, CASE2", 0, getInstanceCount(metadata, HASH_B_PDF, CASE2_DATASET_1, CASE2));
|
||||
assertEquals("Verify Existence or Count failed for HASH_B_JPG, CASE2_DATASET_1, CASE2", 0, getInstanceCount(metadata, HASH_B_JPG, CASE2_DATASET_1, CASE2));
|
||||
|
||||
//case 2 data set 2
|
||||
assertEquals("Verify Existence or Count failed for HASH_A_PDF, CASE2_DATASET_2, CASE2", 1, getInstanceCount(metadata, HASH_A_PDF, CASE2_DATASET_2, CASE2));
|
||||
assertEquals("Verify Existence or Count failed for HASH_A_JPG, CASE2_DATASET_2, CASE2", 1, getInstanceCount(metadata, HASH_A_JPG, CASE2_DATASET_2, CASE2));
|
||||
assertEquals("Verify Existence or Count failed for HASH_D_DOC, CASE2_DATASET_2, CASE2", 1, getInstanceCount(metadata, HASH_D_DOC, CASE2_DATASET_2, CASE2));
|
||||
|
||||
//case 3 data set 1
|
||||
assertEquals("Verify Existence or Count failed for HASH_A_JPG, CASE3_DATASET_1, CASE3", 1, getInstanceCount(metadata, HASH_A_JPG, CASE3_DATASET_1, CASE3));
|
||||
assertEquals("Verify Existence or Count failed for HASH_A_PDF, CASE3_DATASET_1, CASE3", 1, getInstanceCount(metadata, HASH_A_PDF, CASE3_DATASET_1, CASE3));
|
||||
assertEquals("Verify Existence or Count failed for HASH_C_JPG, CASE3_DATASET_1, CASE3", 0, getInstanceCount(metadata, HASH_C_JPG, CASE3_DATASET_1, CASE3));
|
||||
assertEquals("Verify Existence or Count failed for HASH_C_PDF, CASE3_DATASET_1, CASE3", 0, getInstanceCount(metadata, HASH_C_PDF, CASE3_DATASET_1, CASE3));
|
||||
assertEquals("Verify Existence or Count failed for HASH_D_JPG, CASE3_DATASET_1, CASE3", 0, getInstanceCount(metadata, HASH_D_JPG, CASE3_DATASET_1, CASE3));
|
||||
|
||||
//case 3 data set 2
|
||||
assertEquals("Verify Existence or Count failed for HASH_C_JPG, CASE3_DATASET_2, CASE3", 0, getInstanceCount(metadata, HASH_C_JPG, CASE3_DATASET_2, CASE3));
|
||||
assertEquals("Verify Existence or Count failed for HASH_C_PDF, CASE3_DATASET_2, CASE3", 0, getInstanceCount(metadata, HASH_C_PDF, CASE3_DATASET_2, CASE3));
|
||||
assertEquals("Verify Existence or Count failed for HASH_D_DOC, CASE3_DATASET_2, CASE3", 1, getInstanceCount(metadata, HASH_D_DOC, CASE3_DATASET_2, CASE3));
|
||||
|
||||
} catch (TskCoreException | NoCurrentCaseException | SQLException | EamDbException ex) {
|
||||
Exceptions.printStackTrace(ex);
|
||||
Assert.fail(ex.getMessage());
|
||||
}
|
||||
// try {
|
||||
// int matchesMustAlsoBeFoundInThisCase = 0;
|
||||
//
|
||||
// // Filter out the time stamp to get the correct case name.
|
||||
// Set<String> caseNames = this.utils.getCaseMap().keySet();
|
||||
// for (String caseName : caseNames) {
|
||||
// if (caseName.substring(0, caseName.length() - 20).equalsIgnoreCase(CASE2)) {
|
||||
// // Case match found. Get the number of matches.
|
||||
// matchesMustAlsoBeFoundInThisCase = this.utils.getCaseMap().get(caseName);
|
||||
// }
|
||||
// }
|
||||
// CorrelationAttributeInstance.Type fileType = CorrelationAttributeInstance.getDefaultCorrelationTypes().get(0);
|
||||
// AbstractCommonAttributeSearcher builder = new SingleInterCaseCommonAttributeSearcher(matchesMustAlsoBeFoundInThisCase, false, false, fileType, 0);
|
||||
//
|
||||
// CommonAttributeCountSearchResults metadata = builder.findMatchesByCount();
|
||||
//
|
||||
// assertTrue("Results should not be empty", metadata.size() != 0);
|
||||
//
|
||||
// //case 1 data set 1
|
||||
// assertEquals("Verify Existence or Count failed for HASH_0_DAT, CASE1_DATASET_1, CASE1", 0, getInstanceCount(metadata, HASH_0_DAT, CASE1_DATASET_1, CASE1));
|
||||
// assertEquals("Verify Existence or Count failed for HASH_A_PDF, CASE1_DATASET_1, CASE1", 1, getInstanceCount(metadata, HASH_A_PDF, CASE1_DATASET_1, CASE1));
|
||||
// assertEquals("Verify Existence or Count failed for HASH_A_JPG, CASE1_DATASET_1, CASE1", 1, getInstanceCount(metadata, HASH_A_JPG, CASE1_DATASET_1, CASE1));
|
||||
//
|
||||
// //case 1 data set 2
|
||||
// assertEquals("Verify Existence or Count failed for HASH_0_DAT, CASE1_DATASET_2, CASE1", 0, getInstanceCount(metadata, HASH_0_DAT, CASE1_DATASET_2, CASE1));
|
||||
// assertEquals("Verify Existence or Count failed for HASH_A_PDF, CASE1_DATASET_2, CASE1", 1, getInstanceCount(metadata, HASH_A_PDF, CASE1_DATASET_2, CASE1));
|
||||
// assertEquals("Verify Existence or Count failed for HASH_A_JPG, CASE1_DATASET_2, CASE1", 1, getInstanceCount(metadata, HASH_A_JPG, CASE1_DATASET_2, CASE1));
|
||||
//
|
||||
// //case 2 data set 1
|
||||
// assertEquals("Verify Existence or Count failed for HASH_B_PDF, CASE2_DATASET_1, CASE2", 0, getInstanceCount(metadata, HASH_B_PDF, CASE2_DATASET_1, CASE2));
|
||||
// assertEquals("Verify Existence or Count failed for HASH_B_JPG, CASE2_DATASET_1, CASE2", 0, getInstanceCount(metadata, HASH_B_JPG, CASE2_DATASET_1, CASE2));
|
||||
//
|
||||
// //case 2 data set 2
|
||||
// assertEquals("Verify Existence or Count failed for HASH_A_PDF, CASE2_DATASET_2, CASE2", 1, getInstanceCount(metadata, HASH_A_PDF, CASE2_DATASET_2, CASE2));
|
||||
// assertEquals("Verify Existence or Count failed for HASH_A_JPG, CASE2_DATASET_2, CASE2", 1, getInstanceCount(metadata, HASH_A_JPG, CASE2_DATASET_2, CASE2));
|
||||
// assertEquals("Verify Existence or Count failed for HASH_D_DOC, CASE2_DATASET_2, CASE2", 1, getInstanceCount(metadata, HASH_D_DOC, CASE2_DATASET_2, CASE2));
|
||||
//
|
||||
// //case 3 data set 1
|
||||
// assertEquals("Verify Existence or Count failed for HASH_A_JPG, CASE3_DATASET_1, CASE3", 1, getInstanceCount(metadata, HASH_A_JPG, CASE3_DATASET_1, CASE3));
|
||||
// assertEquals("Verify Existence or Count failed for HASH_A_PDF, CASE3_DATASET_1, CASE3", 1, getInstanceCount(metadata, HASH_A_PDF, CASE3_DATASET_1, CASE3));
|
||||
// assertEquals("Verify Existence or Count failed for HASH_C_JPG, CASE3_DATASET_1, CASE3", 0, getInstanceCount(metadata, HASH_C_JPG, CASE3_DATASET_1, CASE3));
|
||||
// assertEquals("Verify Existence or Count failed for HASH_C_PDF, CASE3_DATASET_1, CASE3", 0, getInstanceCount(metadata, HASH_C_PDF, CASE3_DATASET_1, CASE3));
|
||||
// assertEquals("Verify Existence or Count failed for HASH_D_JPG, CASE3_DATASET_1, CASE3", 0, getInstanceCount(metadata, HASH_D_JPG, CASE3_DATASET_1, CASE3));
|
||||
//
|
||||
// //case 3 data set 2
|
||||
// assertEquals("Verify Existence or Count failed for HASH_C_JPG, CASE3_DATASET_2, CASE3", 0, getInstanceCount(metadata, HASH_C_JPG, CASE3_DATASET_2, CASE3));
|
||||
// assertEquals("Verify Existence or Count failed for HASH_C_PDF, CASE3_DATASET_2, CASE3", 0, getInstanceCount(metadata, HASH_C_PDF, CASE3_DATASET_2, CASE3));
|
||||
// assertEquals("Verify Existence or Count failed for HASH_D_DOC, CASE3_DATASET_2, CASE3", 1, getInstanceCount(metadata, HASH_D_DOC, CASE3_DATASET_2, CASE3));
|
||||
//
|
||||
// } catch (TskCoreException | NoCurrentCaseException | SQLException | EamDbException ex) {
|
||||
// Exceptions.printStackTrace(ex);
|
||||
// Assert.fail(ex.getMessage());
|
||||
// }
|
||||
}
|
||||
|
||||
/**
|
||||
@ -203,49 +203,49 @@ public class IngestedWithHashAndFileTypeInterCaseTest extends NbTestCase {
|
||||
* the result set since they exist too frequently
|
||||
*/
|
||||
public void testThree() {
|
||||
try {
|
||||
|
||||
CorrelationAttributeInstance.Type fileType = CorrelationAttributeInstance.getDefaultCorrelationTypes().get(0);
|
||||
AbstractCommonAttributeSearcher builder = new AllInterCaseCommonAttributeSearcher(false, false, fileType, 50);
|
||||
|
||||
CommonAttributeCountSearchResults metadata = builder.findMatchesByCount();
|
||||
metadata.filterMetadata();
|
||||
assertTrue("Results should not be empty", metadata.size() != 0);
|
||||
|
||||
//case 1 data set 1
|
||||
assertEquals("Verify Existence or Count failed for HASH_0_DAT, CASE1_DATASET_1, CASE1", 0, getInstanceCount(metadata, HASH_0_DAT, CASE1_DATASET_1, CASE1));
|
||||
assertEquals("Verify Existence or Count failed for HASH_A_PDF, CASE1_DATASET_1, CASE1", 0, getInstanceCount(metadata, HASH_A_PDF, CASE1_DATASET_1, CASE1));
|
||||
assertEquals("Verify Existence or Count failed for HASH_A_JPG, CASE1_DATASET_1, CASE1", 0, getInstanceCount(metadata, HASH_A_JPG, CASE1_DATASET_1, CASE1));
|
||||
|
||||
//case 1 data set 2
|
||||
assertEquals("Verify Existence or Count failed for HASH_0_DAT, CASE1_DATASET_2, CASE1", 0, getInstanceCount(metadata, HASH_0_DAT, CASE1_DATASET_2, CASE1));
|
||||
assertEquals("Verify Existence or Count failed for HASH_A_PDF, CASE1_DATASET_2, CASE1", 0, getInstanceCount(metadata, HASH_A_PDF, CASE1_DATASET_2, CASE1));
|
||||
assertEquals("Verify Existence or Count failed for HASH_A_JPG, CASE1_DATASET_2, CASE1", 0, getInstanceCount(metadata, HASH_A_JPG, CASE1_DATASET_2, CASE1));
|
||||
|
||||
//case 2 data set 1
|
||||
assertEquals("Verify Existence or Count failed for HASH_B_PDF, CASE2_DATASET_1, CASE2", 0, getInstanceCount(metadata, HASH_B_PDF, CASE2_DATASET_1, CASE2));
|
||||
assertEquals("Verify Existence or Count failed for HASH_B_JPG, CASE2_DATASET_1, CASE2", 0, getInstanceCount(metadata, HASH_B_JPG, CASE2_DATASET_1, CASE2));
|
||||
|
||||
//case 2 data set 2
|
||||
assertEquals("Verify Existence or Count failed for HASH_A_PDF, CASE2_DATASET_2, CASE2", 0, getInstanceCount(metadata, HASH_A_PDF, CASE2_DATASET_2, CASE2));
|
||||
assertEquals("Verify Existence or Count failed for HASH_A_JPG, CASE2_DATASET_2, CASE2", 0, getInstanceCount(metadata, HASH_A_JPG, CASE2_DATASET_2, CASE2));
|
||||
assertEquals("Verify Existence or Count failed for HASH_D_DOC, CASE2_DATASET_2, CASE2", 1, getInstanceCount(metadata, HASH_D_DOC, CASE2_DATASET_2, CASE2));
|
||||
|
||||
//case 3 data set 1
|
||||
assertEquals("Verify Existence or Count failed for HASH_A_JPG, CASE3_DATASET_1, CASE3", 0, getInstanceCount(metadata, HASH_A_JPG, CASE3_DATASET_1, CASE3));
|
||||
assertEquals("Verify Existence or Count failed for HASH_A_PDF, CASE3_DATASET_1, CASE3", 0, getInstanceCount(metadata, HASH_A_PDF, CASE3_DATASET_1, CASE3));
|
||||
assertEquals("Verify Existence or Count failed for HASH_C_JPG, CASE3_DATASET_1, CASE3", 0, getInstanceCount(metadata, HASH_C_JPG, CASE3_DATASET_1, CASE3));
|
||||
assertEquals("Verify Existence or Count failed for HASH_C_PDF, CASE3_DATASET_1, CASE3", 0, getInstanceCount(metadata, HASH_C_PDF, CASE3_DATASET_1, CASE3));
|
||||
assertEquals("Verify Existence or Count failed for HASH_D_JPG, CASE3_DATASET_1, CASE3", 0, getInstanceCount(metadata, HASH_D_JPG, CASE3_DATASET_1, CASE3));
|
||||
|
||||
//case 3 data set 2
|
||||
assertEquals("Verify Existence or Count failed for HASH_C_JPG, CASE3_DATASET_2, CASE3", 0, getInstanceCount(metadata, HASH_C_JPG, CASE3_DATASET_2, CASE3));
|
||||
assertEquals("Verify Existence or Count failed for HASH_C_PDF, CASE3_DATASET_2, CASE3", 0, getInstanceCount(metadata, HASH_C_PDF, CASE3_DATASET_2, CASE3));
|
||||
assertEquals("Verify Existence or Count failed for HASH_D_DOC, CASE3_DATASET_2, CASE3", 1, getInstanceCount(metadata, HASH_D_DOC, CASE3_DATASET_2, CASE3));
|
||||
|
||||
} catch (TskCoreException | NoCurrentCaseException | SQLException | EamDbException ex) {
|
||||
Exceptions.printStackTrace(ex);
|
||||
Assert.fail(ex.getMessage());
|
||||
}
|
||||
// try {
|
||||
//
|
||||
// CorrelationAttributeInstance.Type fileType = CorrelationAttributeInstance.getDefaultCorrelationTypes().get(0);
|
||||
// AbstractCommonAttributeSearcher builder = new AllInterCaseCommonAttributeSearcher(false, false, fileType, 50);
|
||||
//
|
||||
// CommonAttributeCountSearchResults metadata = builder.findMatchesByCount();
|
||||
// metadata.filterMetadata();
|
||||
// assertTrue("Results should not be empty", metadata.size() != 0);
|
||||
//
|
||||
// //case 1 data set 1
|
||||
// assertEquals("Verify Existence or Count failed for HASH_0_DAT, CASE1_DATASET_1, CASE1", 0, getInstanceCount(metadata, HASH_0_DAT, CASE1_DATASET_1, CASE1));
|
||||
// assertEquals("Verify Existence or Count failed for HASH_A_PDF, CASE1_DATASET_1, CASE1", 0, getInstanceCount(metadata, HASH_A_PDF, CASE1_DATASET_1, CASE1));
|
||||
// assertEquals("Verify Existence or Count failed for HASH_A_JPG, CASE1_DATASET_1, CASE1", 0, getInstanceCount(metadata, HASH_A_JPG, CASE1_DATASET_1, CASE1));
|
||||
//
|
||||
// //case 1 data set 2
|
||||
// assertEquals("Verify Existence or Count failed for HASH_0_DAT, CASE1_DATASET_2, CASE1", 0, getInstanceCount(metadata, HASH_0_DAT, CASE1_DATASET_2, CASE1));
|
||||
// assertEquals("Verify Existence or Count failed for HASH_A_PDF, CASE1_DATASET_2, CASE1", 0, getInstanceCount(metadata, HASH_A_PDF, CASE1_DATASET_2, CASE1));
|
||||
// assertEquals("Verify Existence or Count failed for HASH_A_JPG, CASE1_DATASET_2, CASE1", 0, getInstanceCount(metadata, HASH_A_JPG, CASE1_DATASET_2, CASE1));
|
||||
//
|
||||
// //case 2 data set 1
|
||||
// assertEquals("Verify Existence or Count failed for HASH_B_PDF, CASE2_DATASET_1, CASE2", 0, getInstanceCount(metadata, HASH_B_PDF, CASE2_DATASET_1, CASE2));
|
||||
// assertEquals("Verify Existence or Count failed for HASH_B_JPG, CASE2_DATASET_1, CASE2", 0, getInstanceCount(metadata, HASH_B_JPG, CASE2_DATASET_1, CASE2));
|
||||
//
|
||||
// //case 2 data set 2
|
||||
// assertEquals("Verify Existence or Count failed for HASH_A_PDF, CASE2_DATASET_2, CASE2", 0, getInstanceCount(metadata, HASH_A_PDF, CASE2_DATASET_2, CASE2));
|
||||
// assertEquals("Verify Existence or Count failed for HASH_A_JPG, CASE2_DATASET_2, CASE2", 0, getInstanceCount(metadata, HASH_A_JPG, CASE2_DATASET_2, CASE2));
|
||||
// assertEquals("Verify Existence or Count failed for HASH_D_DOC, CASE2_DATASET_2, CASE2", 1, getInstanceCount(metadata, HASH_D_DOC, CASE2_DATASET_2, CASE2));
|
||||
//
|
||||
// //case 3 data set 1
|
||||
// assertEquals("Verify Existence or Count failed for HASH_A_JPG, CASE3_DATASET_1, CASE3", 0, getInstanceCount(metadata, HASH_A_JPG, CASE3_DATASET_1, CASE3));
|
||||
// assertEquals("Verify Existence or Count failed for HASH_A_PDF, CASE3_DATASET_1, CASE3", 0, getInstanceCount(metadata, HASH_A_PDF, CASE3_DATASET_1, CASE3));
|
||||
// assertEquals("Verify Existence or Count failed for HASH_C_JPG, CASE3_DATASET_1, CASE3", 0, getInstanceCount(metadata, HASH_C_JPG, CASE3_DATASET_1, CASE3));
|
||||
// assertEquals("Verify Existence or Count failed for HASH_C_PDF, CASE3_DATASET_1, CASE3", 0, getInstanceCount(metadata, HASH_C_PDF, CASE3_DATASET_1, CASE3));
|
||||
// assertEquals("Verify Existence or Count failed for HASH_D_JPG, CASE3_DATASET_1, CASE3", 0, getInstanceCount(metadata, HASH_D_JPG, CASE3_DATASET_1, CASE3));
|
||||
//
|
||||
// //case 3 data set 2
|
||||
// assertEquals("Verify Existence or Count failed for HASH_C_JPG, CASE3_DATASET_2, CASE3", 0, getInstanceCount(metadata, HASH_C_JPG, CASE3_DATASET_2, CASE3));
|
||||
// assertEquals("Verify Existence or Count failed for HASH_C_PDF, CASE3_DATASET_2, CASE3", 0, getInstanceCount(metadata, HASH_C_PDF, CASE3_DATASET_2, CASE3));
|
||||
// assertEquals("Verify Existence or Count failed for HASH_D_DOC, CASE3_DATASET_2, CASE3", 1, getInstanceCount(metadata, HASH_D_DOC, CASE3_DATASET_2, CASE3));
|
||||
//
|
||||
// } catch (TskCoreException | NoCurrentCaseException | SQLException | EamDbException ex) {
|
||||
// Exceptions.printStackTrace(ex);
|
||||
// Assert.fail(ex.getMessage());
|
||||
// }
|
||||
}
|
||||
}
|
||||
|
@ -2,7 +2,7 @@
|
||||
*
|
||||
* Autopsy Forensic Browser
|
||||
*
|
||||
* Copyright 2018 Basis Technology Corp.
|
||||
* Copyright 2018-2019 Basis Technology Corp.
|
||||
* Contact: carrier <at> sleuthkit <dot> org
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
@ -46,19 +46,11 @@ import org.sleuthkit.autopsy.testutils.CaseUtils;
|
||||
import org.sleuthkit.autopsy.testutils.IngestUtils;
|
||||
import org.sleuthkit.datamodel.TskCoreException;
|
||||
import junit.framework.Assert;
|
||||
import org.sleuthkit.autopsy.casemodule.CaseActionException;
|
||||
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
|
||||
import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationAttributeInstance;
|
||||
import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationCase;
|
||||
import org.sleuthkit.autopsy.centralrepository.datamodel.EamDb;
|
||||
import org.sleuthkit.autopsy.commonpropertiessearch.AbstractCommonAttributeInstance;
|
||||
import org.sleuthkit.autopsy.commonpropertiessearch.CaseDBCommonAttributeInstanceNode;
|
||||
import org.sleuthkit.autopsy.commonpropertiessearch.CentralRepoCommonAttributeInstance;
|
||||
import org.sleuthkit.autopsy.commonpropertiessearch.CentralRepoCommonAttributeInstanceNode;
|
||||
import org.sleuthkit.autopsy.commonpropertiessearch.CommonAttributeCountSearchResults;
|
||||
import org.sleuthkit.autopsy.datamodel.utils.DataSourceLoader;
|
||||
import org.sleuthkit.autopsy.commonpropertiessearch.CommonAttributeValue;
|
||||
import org.sleuthkit.autopsy.commonpropertiessearch.CommonAttributeValueList;
|
||||
import org.sleuthkit.autopsy.coreutils.TimeStampUtils;
|
||||
import org.sleuthkit.autopsy.datamodel.DisplayableItemNode;
|
||||
import org.sleuthkit.autopsy.modules.dataSourceIntegrity.DataSourceIntegrityModuleFactory;
|
||||
@ -279,10 +271,8 @@ class InterCaseTestUtils {
|
||||
for (CorrelationCase correlationCase : EamDb.getInstance().getCases()) {
|
||||
mapOfCaseIdsToCase.put(correlationCase.getDisplayName(), correlationCase.getID());
|
||||
}
|
||||
System.out.println("EAM IS ENABLED");
|
||||
return mapOfCaseIdsToCase;
|
||||
} else {
|
||||
System.out.println("EAMDB NOT ENABLED");
|
||||
//it is reasonable that this might happen...
|
||||
// for example when we test the feature in the absence of an enabled eamdb
|
||||
return new HashMap<>(0);
|
||||
|
@ -85,8 +85,7 @@ public class BingTranslatorTest {
|
||||
// /*
|
||||
// //It's unrealistic to expect the same answer every time, but sometimes
|
||||
// //it's helpful to have this in your debug process.
|
||||
// System.out.println(translation);
|
||||
// assertEquals(expectedTranslation, translation);
|
||||
// assertEquals("Result did not match expected result", expectedTranslation, translation);
|
||||
// */
|
||||
// }
|
||||
}
|
||||
|
@ -37,8 +37,8 @@ public class GoogleTranslatorTest {
|
||||
// //It's unrealistic to expect the same answer every time, but sometimes
|
||||
// //it's helpful to have this in your debug process.
|
||||
//
|
||||
// String expResult = "translate"; assertEquals(expResult, result);
|
||||
// System.out.println(result);
|
||||
// String expResult = "translate"; assertEquals(expResult, result);
|
||||
// assertEquals("Result did not match expected result" expResult, result);
|
||||
|
||||
}
|
||||
//Commented out because using TranslateOption with the current version of Guava is not supported JIRA-5063
|
||||
@ -63,7 +63,6 @@ public class GoogleTranslatorTest {
|
||||
// //It's unrealistic to expect the same answer every time, but sometimes
|
||||
// //it's helpful to have this in your debug process.
|
||||
// String expResult = "¡Hola Mundo!";
|
||||
// assertEquals(expResult, result);
|
||||
// System.out.println(result);
|
||||
// assertEquals("Result did not match expected result", expResult, result);
|
||||
// }
|
||||
}
|
||||
|
@ -64,7 +64,6 @@ import static org.apache.commons.collections4.CollectionUtils.isNotEmpty;
|
||||
import static org.apache.commons.lang3.ObjectUtils.notEqual;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.commons.lang3.concurrent.BasicThreadFactory;
|
||||
import org.openide.util.Exceptions;
|
||||
import org.openide.util.NbBundle;
|
||||
import org.sleuthkit.autopsy.casemodule.Case;
|
||||
import org.sleuthkit.autopsy.casemodule.events.ContentTagAddedEvent;
|
||||
@ -658,7 +657,7 @@ public class GroupManager {
|
||||
|
||||
updateCurrentPathGroup(pathGroupKey);
|
||||
} catch (TskCoreException | TskDataException ex) {
|
||||
Exceptions.printStackTrace(ex);
|
||||
logger.log(Level.WARNING, "Error getting drawabledb for fileId " + fileId, ex);
|
||||
}
|
||||
|
||||
// Update all the groups that this file belongs to
|
||||
@ -986,7 +985,7 @@ public class GroupManager {
|
||||
.findAny().ifPresent(obj_id -> types.add(mimeType));
|
||||
}
|
||||
} catch (SQLException | TskCoreException ex) {
|
||||
Exceptions.printStackTrace(ex);
|
||||
logger.log(Level.WARNING, "Error getting group by MIME type", ex);
|
||||
}
|
||||
results.putAll(null, types);
|
||||
|
||||
|
@ -174,7 +174,6 @@ public class SortChooser<X, Y extends Comparator<X>> extends HBox {
|
||||
Image icon = (Image) item.getClass().getMethod("getIcon").invoke(item);
|
||||
setGraphic(new ImageView(icon));
|
||||
} catch (NoSuchMethodException | SecurityException | IllegalAccessException | IllegalArgumentException | InvocationTargetException ex) {
|
||||
// Exceptions.printStackTrace(ex);
|
||||
setText(item.toString());
|
||||
setGraphic(null);
|
||||
}
|
||||
|
@ -561,10 +561,13 @@
|
||||
leading wildcard queries. -->
|
||||
<!--<field name="text_rev" type="text_general_rev" indexed="true" stored="false" multiValued="true"/>-->
|
||||
|
||||
<!-- field with white-space tokenized words for TermsComponent regex search (useful for fast search of IP addresses, URLs, certain phone numbers)
|
||||
also be useful for Lucene based queries containing special characters-->
|
||||
<!-- populated via copyField -->
|
||||
<!-- field with white-space tokenized words for TermsComponent regex search.
|
||||
No longer being populated by Autopsy, but remains for backward compatability.
|
||||
content_str is used instead to better deal with regex that have white space. -->
|
||||
<field name="content_ws" type="text_ws" indexed="true" stored="false" multiValued="true" />
|
||||
|
||||
<!-- field that contains the raw string form of the chunk. Used for regular expression
|
||||
matching. Populated by Autopsy using lower-case text and a copyField for file name. -->
|
||||
<field name="content_str" type="string" indexed="true" stored="true" multiValued="true" />
|
||||
|
||||
<!-- Uncommenting the following will create a "timestamp" field using
|
||||
|
@ -101,25 +101,29 @@ public class Server {
|
||||
return "image_id"; //NON-NLS
|
||||
}
|
||||
},
|
||||
// This is not stored or index . it is copied to Text and Content_Ws
|
||||
// This is not stored or indexed. it is copied to text by the schema
|
||||
CONTENT {
|
||||
@Override
|
||||
public String toString() {
|
||||
return "content"; //NON-NLS
|
||||
}
|
||||
},
|
||||
// String representation for regular expression searching
|
||||
CONTENT_STR {
|
||||
@Override
|
||||
public String toString() {
|
||||
return "content_str"; //NON-NLS
|
||||
}
|
||||
},
|
||||
// default search field. Populated by schema
|
||||
TEXT {
|
||||
@Override
|
||||
public String toString() {
|
||||
return "text"; //NON-NLS
|
||||
}
|
||||
},
|
||||
// no longer populated. Was used for regular expression searching.
|
||||
// Should not be used.
|
||||
CONTENT_WS {
|
||||
@Override
|
||||
public String toString() {
|
||||
@ -132,28 +136,28 @@ public class Server {
|
||||
return "file_name"; //NON-NLS
|
||||
}
|
||||
},
|
||||
// note that we no longer index this field
|
||||
// note that we no longer store or index this field
|
||||
CTIME {
|
||||
@Override
|
||||
public String toString() {
|
||||
return "ctime"; //NON-NLS
|
||||
}
|
||||
},
|
||||
// note that we no longer index this field
|
||||
// note that we no longer store or index this field
|
||||
ATIME {
|
||||
@Override
|
||||
public String toString() {
|
||||
return "atime"; //NON-NLS
|
||||
}
|
||||
},
|
||||
// note that we no longer index this field
|
||||
// note that we no longer store or index this field
|
||||
MTIME {
|
||||
@Override
|
||||
public String toString() {
|
||||
return "mtime"; //NON-NLS
|
||||
}
|
||||
},
|
||||
// note that we no longer index this field
|
||||
// note that we no longer store or index this field
|
||||
CRTIME {
|
||||
@Override
|
||||
public String toString() {
|
||||
|
@ -1,7 +1,7 @@
|
||||
/*
|
||||
* Autopsy Forensic Browser
|
||||
*
|
||||
* Copyright 2011-2017 Basis Technology Corp.
|
||||
* Copyright 2011-2019 Basis Technology Corp.
|
||||
* Contact: carrier <at> sleuthkit <dot> org
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
@ -48,8 +48,6 @@ public class CreditCardValidatorTest {
|
||||
|
||||
@Test
|
||||
public void testLengthMatchesBin() {
|
||||
System.out.println("lengthMatchesBin");
|
||||
|
||||
//amex must be 15
|
||||
assertEquals(true, CreditCardValidator.isValidCCN("3431 136294 58529"));
|
||||
assertEquals(false, CreditCardValidator.isValidCCN("3431-136294-5850")); //too short
|
||||
@ -95,8 +93,6 @@ public class CreditCardValidatorTest {
|
||||
*/
|
||||
@Test
|
||||
public void testIsValidCCN16() {
|
||||
System.out.println("isValidCCN");
|
||||
|
||||
//rules for separators and grouping for 16 digits
|
||||
assertEquals(true, CreditCardValidator.isValidCCN("1234567890318342"));// dashes
|
||||
assertEquals(true, CreditCardValidator.isValidCCN("1234-5678-9031-8342"));// dashes
|
||||
@ -111,8 +107,6 @@ public class CreditCardValidatorTest {
|
||||
|
||||
@Test
|
||||
public void testIsValidCCN15() {
|
||||
System.out.println("isValidCCN");
|
||||
|
||||
//amex are fifteen digits, and grouped 4 6 5
|
||||
//amex cards that strart with 34
|
||||
assertEquals(true, CreditCardValidator.isValidCCN("3431 136294 58529"));
|
||||
@ -143,7 +137,6 @@ public class CreditCardValidatorTest {
|
||||
|
||||
@Test
|
||||
public void testIsValidCCN19() {
|
||||
System.out.println("isValidCCN");
|
||||
//nineteen digit (visa) cards 4-4-4-4-3
|
||||
assertEquals(true, CreditCardValidator.isValidCCN("4539747947839518654"));
|
||||
assertEquals(true, CreditCardValidator.isValidCCN("4539-7479-4783-9518-654"));
|
||||
@ -168,8 +161,6 @@ public class CreditCardValidatorTest {
|
||||
|
||||
@Test
|
||||
public void testIsValidCCN18() {
|
||||
System.out.println("isValidCCN");
|
||||
|
||||
assertEquals(true, CreditCardValidator.isValidCCN("123456789031834267"));
|
||||
assertEquals(true, CreditCardValidator.isValidCCN("1234 5678 9031 8342 67"));
|
||||
assertEquals(true, CreditCardValidator.isValidCCN("1234-56789031834-267"));
|
||||
@ -181,8 +172,6 @@ public class CreditCardValidatorTest {
|
||||
|
||||
@Test
|
||||
public void testIsValidCCN17() {
|
||||
System.out.println("isValidCCN");
|
||||
|
||||
assertEquals(true, CreditCardValidator.isValidCCN("12345678903183426"));
|
||||
assertEquals(true, CreditCardValidator.isValidCCN("1234 5678 9031 8342 6"));
|
||||
assertEquals(true, CreditCardValidator.isValidCCN("1234-56789031834-26"));
|
||||
@ -194,8 +183,6 @@ public class CreditCardValidatorTest {
|
||||
|
||||
@Test
|
||||
public void testIsValidCCN14() {
|
||||
System.out.println("isValidCCN");
|
||||
|
||||
assertEquals(true, CreditCardValidator.isValidCCN("12345678903183"));
|
||||
assertEquals(true, CreditCardValidator.isValidCCN("1234 5678 9031 83"));
|
||||
assertEquals(true, CreditCardValidator.isValidCCN("1234-5678903183"));
|
||||
@ -207,8 +194,6 @@ public class CreditCardValidatorTest {
|
||||
|
||||
@Test
|
||||
public void testIsValidCCN13() {
|
||||
System.out.println("isValidCCN");
|
||||
|
||||
assertEquals(true, CreditCardValidator.isValidCCN("1234567890318"));
|
||||
assertEquals(true, CreditCardValidator.isValidCCN("1234 5678 9031 8"));
|
||||
assertEquals(true, CreditCardValidator.isValidCCN("1234-567890318"));
|
||||
@ -220,8 +205,6 @@ public class CreditCardValidatorTest {
|
||||
|
||||
@Test
|
||||
public void testIsValidCCN12() {
|
||||
System.out.println("isValidCCN");
|
||||
|
||||
assertEquals(true, CreditCardValidator.isValidCCN("123456789031"));
|
||||
assertEquals(true, CreditCardValidator.isValidCCN("1234 5678 9031"));
|
||||
assertEquals(true, CreditCardValidator.isValidCCN("1234-56789031"));
|
||||
|
@ -2,14 +2,9 @@ cannotBuildXmlParser=Unable to build XML parser:
|
||||
cannotLoadSEUQA=Unable to load Search Engine URL Query Analyzer settings file, SEUQAMappings.xml:
|
||||
cannotParseXml=Unable to parse XML file:
|
||||
ChromeCacheExtractor.moduleName=ChromeCacheExtractor
|
||||
# {0} - module name
|
||||
# {1} - row number
|
||||
# {2} - table length
|
||||
# {3} - cache path
|
||||
ChromeCacheExtractor.progressMsg={0}: Extracting cache entry {1} of {2} entries from {3}
|
||||
DataSourceUsage_AndroidMedia=Android Media Card
|
||||
DataSourceUsage_FlashDrive=Flash Drive
|
||||
# {0} - OS name
|
||||
DataSourceUsageAnalyzer.customVolume.label=OS Drive ({0})
|
||||
DataSourceUsageAnalyzer.parentModuleName=Recent Activity
|
||||
Extract.indexError.message=Failed to index artifact for keyword search.
|
||||
@ -64,7 +59,7 @@ ExtractZone_progress_Msg=Extracting :Zone.Identifer files
|
||||
ExtractZone_Restricted=Restricted Sites Zone
|
||||
ExtractZone_Trusted=Trusted Sites Zone
|
||||
OpenIDE-Module-Display-Category=Ingest Module
|
||||
OpenIDE-Module-Long-Description=Recent Activity ingest module.\n\n\The module extracts useful information about the recent user activity on the disk image being ingested, such as:\n\n- Recently open documents,\n- Web activity (sites visited, stored cookies, book marked sites, search engine queries, file downloads),\n- Recently attached devices,\n- Installed programs.\n\nThe module currently supports Windows only disk images.\nThe plugin is also fully functional when deployed on Windows version of Autopsy.
|
||||
OpenIDE-Module-Long-Description=Recent Activity ingest module.\n\nThe module extracts useful information about the recent user activity on the disk image being ingested, such as:\n\n- Recently open documents,\n- Web activity (sites visited, stored cookies, book marked sites, search engine queries, file downloads),\n- Recently attached devices,\n- Installed programs.\n\nThe module currently supports Windows only disk images.\nThe plugin is also fully functional when deployed on Windows version of Autopsy.
|
||||
OpenIDE-Module-Name=RecentActivity
|
||||
OpenIDE-Module-Short-Description=Recent Activity finder ingest module
|
||||
Chrome.moduleName=Chrome
|
||||
@ -187,7 +182,6 @@ RecentDocumentsByLnk.parentModuleName.noSpace=RecentActivity
|
||||
RecentDocumentsByLnk.parentModuleName=Recent Activity
|
||||
RegRipperFullNotFound=Full version RegRipper executable not found.
|
||||
RegRipperNotFound=Autopsy RegRipper executable not found.
|
||||
# {0} - file name
|
||||
SearchEngineURLQueryAnalyzer.init.exception.msg=Unable to find {0}.
|
||||
SearchEngineURLQueryAnalyzer.moduleName.text=Search Engine
|
||||
SearchEngineURLQueryAnalyzer.engineName.none=NONE
|
||||
|
@ -390,12 +390,6 @@ class ExtractRegistry extends Extract {
|
||||
Element oroot = doc.getDocumentElement();
|
||||
NodeList children = oroot.getChildNodes();
|
||||
int len = children.getLength();
|
||||
// Add all "usb" dataType nodes to collection of BlackboardArtifacts
|
||||
// that we will submit in a ModuleDataEvent for additional processing.
|
||||
Collection<BlackboardArtifact> usbBBartifacts = new ArrayList<>();
|
||||
// Add all "ssid" dataType nodes to collection of BlackboardArtifacts
|
||||
// that we will submit in a ModuleDataEvent for additional processing.
|
||||
Collection<BlackboardArtifact> wifiBBartifacts = new ArrayList<>();
|
||||
for (int i = 0; i < len; i++) {
|
||||
|
||||
if (context.dataSourceIngestIsCancelled()) {
|
||||
@ -652,10 +646,8 @@ class ExtractRegistry extends Extract {
|
||||
|
||||
// index the artifact for keyword search
|
||||
postArtifact(bbart);
|
||||
// add to collection for ModuleDataEvent
|
||||
usbBBartifacts.add(bbart);
|
||||
} catch (TskCoreException ex) {
|
||||
logger.log(Level.SEVERE, "Error adding device attached artifact to blackboard."); //NON-NLS
|
||||
logger.log(Level.SEVERE, "Error adding device attached artifact to blackboard.", ex); //NON-NLS
|
||||
}
|
||||
break;
|
||||
case "uninstall": //NON-NLS
|
||||
@ -666,8 +658,8 @@ class ExtractRegistry extends Extract {
|
||||
itemMtime = new SimpleDateFormat("EEE MMM d HH:mm:ss yyyy").parse(mTimeAttr).getTime(); //NON-NLS
|
||||
itemMtime /= MS_IN_SEC;
|
||||
}
|
||||
} catch (ParseException e) {
|
||||
logger.log(Level.WARNING, "Failed to parse epoch time for installed program artifact."); //NON-NLS
|
||||
} catch (ParseException ex) {
|
||||
logger.log(Level.WARNING, "Failed to parse epoch time for installed program artifact.", ex); //NON-NLS
|
||||
}
|
||||
|
||||
try {
|
||||
@ -679,7 +671,7 @@ class ExtractRegistry extends Extract {
|
||||
// index the artifact for keyword search
|
||||
postArtifact(bbart);
|
||||
} catch (TskCoreException ex) {
|
||||
logger.log(Level.SEVERE, "Error adding installed program artifact to blackboard."); //NON-NLS
|
||||
logger.log(Level.SEVERE, "Error adding installed program artifact to blackboard.", ex); //NON-NLS
|
||||
}
|
||||
break;
|
||||
case "office": //NON-NLS
|
||||
@ -699,7 +691,7 @@ class ExtractRegistry extends Extract {
|
||||
// index the artifact for keyword search
|
||||
postArtifact(bbart);
|
||||
} catch (TskCoreException ex) {
|
||||
logger.log(Level.SEVERE, "Error adding recent object artifact to blackboard."); //NON-NLS
|
||||
logger.log(Level.SEVERE, "Error adding recent object artifact to blackboard.", ex); //NON-NLS
|
||||
}
|
||||
break;
|
||||
|
||||
@ -766,7 +758,7 @@ class ExtractRegistry extends Extract {
|
||||
// index the artifact for keyword search
|
||||
postArtifact(bbart);
|
||||
} catch (TskCoreException ex) {
|
||||
logger.log(Level.SEVERE, "Error adding account artifact to blackboard."); //NON-NLS
|
||||
logger.log(Level.SEVERE, "Error adding account artifact to blackboard.", ex); //NON-NLS
|
||||
}
|
||||
break;
|
||||
|
||||
@ -783,7 +775,7 @@ class ExtractRegistry extends Extract {
|
||||
// index the artifact for keyword search
|
||||
postArtifact(bbart);
|
||||
} catch (TskCoreException ex) {
|
||||
logger.log(Level.SEVERE, "Error adding network artifact to blackboard."); //NON-NLS
|
||||
logger.log(Level.SEVERE, "Error adding network artifact to blackboard.", ex); //NON-NLS
|
||||
}
|
||||
break;
|
||||
case "SSID": // NON-NLS
|
||||
@ -798,9 +790,8 @@ class ExtractRegistry extends Extract {
|
||||
bbart.addAttributes(bbattributes);
|
||||
// index the artifact for keyword search
|
||||
postArtifact(bbart);
|
||||
wifiBBartifacts.add(bbart);
|
||||
} catch (TskCoreException ex) {
|
||||
logger.log(Level.SEVERE, "Error adding SSID artifact to blackboard."); //NON-NLS
|
||||
logger.log(Level.SEVERE, "Error adding SSID artifact to blackboard.", ex); //NON-NLS
|
||||
}
|
||||
break;
|
||||
case "shellfolders": // NON-NLS
|
||||
@ -818,9 +809,6 @@ class ExtractRegistry extends Extract {
|
||||
break;
|
||||
}
|
||||
} // for
|
||||
|
||||
postArtifacts(usbBBartifacts);
|
||||
postArtifacts(wifiBBartifacts);
|
||||
return true;
|
||||
} catch (FileNotFoundException ex) {
|
||||
logger.log(Level.SEVERE, "Error finding the registry file.", ex); //NON-NLS
|
||||
|
@ -1,7 +1,7 @@
|
||||
/*
|
||||
* Autopsy Forensic Browser
|
||||
*
|
||||
* Copyright 2013 Basis Technology Corp.
|
||||
* Copyright 2013-2019 Basis Technology Corp.
|
||||
* Contact: carrier <at> sleuthkit <dot> org
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
@ -79,11 +79,9 @@ public class ScalpelCarver {
|
||||
success = true;
|
||||
} catch (UnsatisfiedLinkError ex) {
|
||||
String msg = NbBundle.getMessage(ScalpelCarver.class, "ScalpelCarver.loadLib.errMsg.cannotLoadLib", id);
|
||||
System.out.println(msg + ex.toString());
|
||||
logger.log(Level.SEVERE, msg, ex);
|
||||
} catch (Exception ex) {
|
||||
String msg = NbBundle.getMessage(ScalpelCarver.class, "ScalpelCarver.loadLib.errMsg.cannotLoadLib2", id);
|
||||
System.out.println(msg + ex.toString());
|
||||
logger.log(Level.SEVERE, msg, ex);
|
||||
}
|
||||
|
||||
|
16
build.xml
@ -69,12 +69,16 @@
|
||||
|
||||
<target name="clean" depends="suite.clean">
|
||||
<delete includeEmptyDirs="true" failonerror="false">
|
||||
<fileset dir="docs\doxygen-user\user-docs" includes="**/*"/>
|
||||
<fileset dir="docs\doxygen-user\user-docs" includes="**/*"/>
|
||||
</delete>
|
||||
|
||||
<delete includeEmptyDirs="true" failonerror="false">
|
||||
<fileset dir="docs\doxygen\doxygen_docs\api-docs" includes="**/*"/>
|
||||
<delete includeEmptyDirs="true" failonerror="false">
|
||||
<fileset dir="docs\doxygen\doxygen_docs\api-docs" includes="**/*"/>
|
||||
</delete>
|
||||
|
||||
<delete includeemptydirs="true" failonerror="false">
|
||||
<fileset dir="${basedir}/docs/doxygen-dev/dev-docs" includes="**/*"/>
|
||||
</delete>
|
||||
</target>
|
||||
|
||||
<!-- This target will create a custom ZIP file for us. It first uses the general
|
||||
@ -260,6 +264,9 @@
|
||||
<delete includeemptydirs="true" failonerror="false">
|
||||
<fileset dir="${basedir}/docs/doxygen-user/user-docs" includes="**/*"/>
|
||||
</delete>
|
||||
<delete includeemptydirs="true" failonerror="false">
|
||||
<fileset dir="${basedir}/docs/doxygen-dev/dev-docs" includes="**/*"/>
|
||||
</delete>
|
||||
|
||||
<!-- Generate new -->
|
||||
<exec executable="doxygen" dir="${basedir}/docs/doxygen">
|
||||
@ -268,6 +275,9 @@
|
||||
<exec executable="doxygen" dir="${basedir}/docs/doxygen-user">
|
||||
<arg value="Doxyfile"/>
|
||||
</exec>
|
||||
<exec executable="doxygen" dir="${basedir}/docs/doxygen-dev">
|
||||
<arg value="Doxyfile"/>
|
||||
</exec>
|
||||
</target>
|
||||
|
||||
<target name="check-release">
|
||||
|
2364
docs/doxygen-dev/Doxyfile
Executable file
43
docs/doxygen-dev/css/doxygen_html_style.css
Executable file
@ -0,0 +1,43 @@
|
||||
div.image img[src="regression_example_workspace-env.PNG"]{
|
||||
width:750px;
|
||||
}
|
||||
|
||||
div.image img[src="regression_example_rebuild-command.PNG"]{
|
||||
width:750px;
|
||||
}
|
||||
|
||||
div.image img[src="regression_example_case-output-folder.PNG"]{
|
||||
width:750px;
|
||||
}
|
||||
|
||||
div.image img[src="regression_example_diff-failure.PNG"]{
|
||||
width:750px;
|
||||
}
|
||||
|
||||
div.image img[src="regression_example_diff.PNG"]{
|
||||
width:750px;
|
||||
}
|
||||
|
||||
div.image img[src="regression_example_gold-dir.PNG"]{
|
||||
width:750px;
|
||||
}
|
||||
|
||||
div.image img[src="regression_example_output-folder.PNG"]{
|
||||
width:750px;
|
||||
}
|
||||
|
||||
div.image img[src="regression_example_run-command.PNG"]{
|
||||
width:750px;
|
||||
}
|
||||
|
||||
div.image img[src="unit_functional_testing_file.png"]{
|
||||
height:500px;
|
||||
}
|
||||
|
||||
div.image img[src="unit_functional_testing_package.png"]{
|
||||
height:500px;
|
||||
}
|
||||
|
||||
div.image img[src="unit_functional_testing_nbm.png"]{
|
||||
height:500px;
|
||||
}
|
7
docs/doxygen-dev/footer.html
Executable file
@ -0,0 +1,7 @@
|
||||
<hr/>
|
||||
<p><i>Copyright © 2012-2019 Basis Technology. Generated on $date<br/>
|
||||
This work is licensed under a
|
||||
<a rel="license" href="http://creativecommons.org/licenses/by-sa/3.0/us/">Creative Commons Attribution-Share Alike 3.0 United States License</a>.
|
||||
</i></p>
|
||||
</body>
|
||||
</html>
|
BIN
docs/doxygen-dev/images/regression_example_case-output-folder.PNG
Executable file
After Width: | Height: | Size: 64 KiB |
BIN
docs/doxygen-dev/images/regression_example_diff-failure.PNG
Executable file
After Width: | Height: | Size: 17 KiB |
BIN
docs/doxygen-dev/images/regression_example_diff.PNG
Executable file
After Width: | Height: | Size: 31 KiB |
BIN
docs/doxygen-dev/images/regression_example_gold-dir.PNG
Executable file
After Width: | Height: | Size: 44 KiB |
BIN
docs/doxygen-dev/images/regression_example_output-folder.PNG
Executable file
After Width: | Height: | Size: 49 KiB |
BIN
docs/doxygen-dev/images/regression_example_rebuild-command.PNG
Executable file
After Width: | Height: | Size: 12 KiB |
BIN
docs/doxygen-dev/images/regression_example_run-command.PNG
Executable file
After Width: | Height: | Size: 10 KiB |
BIN
docs/doxygen-dev/images/regression_example_workspace-env.PNG
Executable file
After Width: | Height: | Size: 47 KiB |
BIN
docs/doxygen-dev/images/unit_functional_testing_file.png
Executable file
After Width: | Height: | Size: 716 KiB |
BIN
docs/doxygen-dev/images/unit_functional_testing_nbm.png
Executable file
After Width: | Height: | Size: 379 KiB |
BIN
docs/doxygen-dev/images/unit_functional_testing_package.png
Executable file
After Width: | Height: | Size: 478 KiB |
23
docs/doxygen-dev/main.dox
Executable file
@ -0,0 +1,23 @@
|
||||
/*! \mainpage Autopsy Developer's Guide
|
||||
|
||||
|
||||
Overview
|
||||
-----
|
||||
|
||||
This is the Developer's Guide for the <a href="http://www.sleuthkit.org/autopsy/">open source Autopsy platform</a>. Autopsy allows you to examine a hard drive or mobile device and recover evidence from it. This guide should help you with using Autopsy. There is a seperate <a href="http://www.sleuthkit.org/autopsy/docs/api-docs/"> developer's guide</a> for writing Autopsy modules.
|
||||
|
||||
Topics
|
||||
-------
|
||||
The following topics are available here:
|
||||
|
||||
- Testing
|
||||
- \subpage regression_testing
|
||||
- \subpage unit_functional_testing
|
||||
|
||||
|
||||
If the topic you need is not listed, then you can:
|
||||
- Refer to the <a href="http://wiki.sleuthkit.org/index.php?title=Autopsy_User%27s_Guide">Autopsy Wiki</a>
|
||||
- Ask a question on the <a href="http://forum.sleuthkit.org">Forum</a>
|
||||
- Ask a question on the <a href="https://lists.sourceforge.net/lists/listinfo/sleuthkit-users">Email List</a>.
|
||||
|
||||
*/
|
215
docs/doxygen-dev/regression_testing.dox
Executable file
@ -0,0 +1,215 @@
|
||||
/*! \page regression_testing Regression Testing in Autopsy
|
||||
|
||||
\section intro_regression Introduction
|
||||
The autopsy/test folder contains scripts that are necessary to run the regression test. Developers that are interested in running regression tests should run autopsy/test/script/regression.py. This guide will familiarize you with regression.py and walk you through a simple example at the end.
|
||||
|
||||
From a bird’s eye view, regression.py will:
|
||||
- Run ingest on an image by starting up Autopsy and controlling the UI
|
||||
- Generate a HTML report once ingest is complete
|
||||
- \ref dump "Dump" the case database and perform \ref diff "diffs" on the \ref gold_standard "gold standards". HTML reports are also \ref diff "diffed".
|
||||
|
||||
What’s important is that regression.py will enable you to isolate changes that you may have caused while developing and adding features.
|
||||
|
||||
Regression.py is also responsible for producing and saving gold standards for you. This allows your gold standards to evolve as your source code changes. In the following sections, we will learn how to accomplish this and what regression.py will need to know in order to run smoothly.
|
||||
\section general General
|
||||
Regression.py will use the most recent Autopsy and Sleuthkit builds as a launching point. To ensure that your latest changes are included, you will need to make sure you build.
|
||||
\section terminology Terminology
|
||||
Before we jump in, let's start off by defining some common terminology.
|
||||
- \anchor dump Dump
|
||||
- A dump refers to writing all case database INSERT statements to file. The case database is what Autopsy uses to persist its analysis. Items that are time or execution order dependent are obfuscated so that only meaningful changes are captured.
|
||||
- \anchor gold_standard Gold standard
|
||||
- A gold standard is simply a dump that we designate to be the base expectation for any given ingest run. Some changes may add to this base (potentially positive) and some may modify or remove from this base (potentially negative). HTML reports will also be saved as gold standards.
|
||||
- \anchor diff Diff
|
||||
- A diff is the result of running the <a href="http://man7.org/linux/man-pages/man1/diff.1.html">diff</a> command against a new dump and a corresponding gold standard. If there are any regressions, they will hopefully be captured in these files.
|
||||
|
||||
\section param_desc Parameter Descriptions
|
||||
Now that we have defined some common terminology, we can move on to learning which command line arguments regression.py can recognize.
|
||||
A regression test (one that may produce diffs) is the default unless otherwise specified.
|
||||
One and only one of the bolded arguments is required.
|
||||
- <b>-f PATH</b>
|
||||
- This indicates that a regression test should be run on a single image at the following PATH. However, it is strongly recommended that you specify an image (even if just one) in a config file instead (see -l below). There are no guarantees that this will be maintained as we are not currently using it internally.
|
||||
- -r (for rebuild)
|
||||
- This indicates that the gold standards should be rebuilt (overwritten) rather than diffed against. Rebuilds are necessary in order to update the gold standards for new (approved) diffs. This parameter needs no additional arguments.
|
||||
- -b (for both compare and rebuild)
|
||||
- This indicates that diffs should be produced on the current gold standards and then overwritten with new ones. It’s recommended to separate this behavior if you are not using version control on your gold standards. This is logically equivalent to running a regression test and then running with -r. This parameter needs no additional arguments.
|
||||
- <b>-l PATH</b>
|
||||
- This parameter specifies the PATH to the config file. The config file is very important and has its own \ref creating_config_file section.
|
||||
- -u (Ignore unallocated space)
|
||||
- Unallocated space can take a long time to ingest. Depending on your use case, you may omit processing unallocated space on all images. This parameter needs no additional arguments.
|
||||
- -k (Do not delete the Solr index)
|
||||
- This parameter indicates that the Solr index should be kept (rather than deleted) for each case. Some keyword hits will be written to the case database, however it may be useful to keep the index around if you’d like to debug or perform additional analysis. This parameter needs no additional arguments.
|
||||
- -o PATH (path to output folder for diff files)
|
||||
- This parameter indicates that diff files from multiple images should be aggregated into one folder at the specified PATH. If this argument is omitted, the diffs for an image can be found in its respective case output directory.
|
||||
- -v (verbose mode)
|
||||
- This parameter indicates that all ingest logs should automatically be printed to console. This parameter needs no additional arguments.
|
||||
- -e ARG (Enable exception mode with given string)
|
||||
- This parameter indicates that all lines from the ingest logs that contain the given ARG string should be printed to console.
|
||||
e.g. ‘-e IOException’
|
||||
- -h (help)
|
||||
|
||||
@section creating_config_file Creating a Config File
|
||||
The config file is essential to giving regression.py the details it needs to run. This file is simply an XML file with some expected tag names. We’ll dive into the tag names and their descriptions in the \ref tag_def section. But first, let’s start by taking a look at a template.
|
||||
|
||||
\subsection template Template
|
||||
Here is a complete config file template:
|
||||
@code{.xml}
|
||||
<?xml version="1.0" encoding="ASCII"?>
|
||||
<!-- Configuration File for Regression Testing -->
|
||||
<Properties>
|
||||
<build value="..\..\Testing"/>
|
||||
<!-- List all the images you would like to run through ingest. -->
|
||||
<image value="path/to/my/first/test/image.img"/>
|
||||
<image value="path/to/my/second/test/image.001"/>
|
||||
<!-- Logical File Set -->
|
||||
<image value="path/to/my/third/test/documents_folder"/>
|
||||
|
||||
<singleUser_golddir value="path/to/my/gold/dir/single_user"/>
|
||||
<singleUser_outdir value="path/to/my/output/dir"/>
|
||||
|
||||
<!-- Consider execution time as a test metric -->
|
||||
<timing value="True"/>
|
||||
|
||||
<!-- Multi-user case settings -->
|
||||
<multiUser_outdir value="\\YourSharedDriveName\path\to\output\dir"/>
|
||||
<multiUser_golddir value="path/to/my/gold/dir/multi_user"/>
|
||||
|
||||
<!--Please input the PostgreSQL database connection information.-->
|
||||
<dbHost value="myDBHostName"/>
|
||||
<dbPort value="myDBPortNumber"/>
|
||||
<dbUserName value="myDBUsername"/>
|
||||
<dbPassword value="myDBPassword"/>
|
||||
|
||||
<!--Please input the Solr connection information.-->
|
||||
<solrHost value="mySolrHostName"/>
|
||||
<solrPort value="mySolrHostPortNumber"/>
|
||||
|
||||
<!--Please input the ActiveMQ connection information.-->
|
||||
<messageServiceHost value="myActiveMQHostName"/>
|
||||
<messageServicePort value="myActiveMQPortNumber"/>
|
||||
|
||||
<!-- Indicate the type of test to run (single, multi, or both) -->
|
||||
<userCaseType value="Both-user"/>
|
||||
</Properties>
|
||||
@endcode
|
||||
|
||||
\subsection tag_def Tag Definitions
|
||||
With our template above, let’s break down what some of these tags mean:
|
||||
- \<userCaseType\>
|
||||
- The value of this tag specifies the type of case that should be produced during the regression test. This may be significant since multi-user and single-user cases use different databases. Specify this value as “Single-user” to create only single user cases, “Multi-user” to create only multi-user cases, or “Both-user” to create both for each \<image\>. “Both-user” does two passes over all \<image\> tags. The first will be run as “Single-user” and the second will be run as “Multi-user”. Each \<image\> gets its own case.
|
||||
- \<build\>
|
||||
- This specifies the path to the Autopsy/Testing folder. This directory has code that will control the Autopsy UI during ingest. You can leave this path as relative, since the regression.py file already lives in the autopsy repository.
|
||||
- \<image\>
|
||||
- The value of this tag is the path to the image that will be ingested. A new case will be created for each one. You may have any number of image tags, but at least one.
|
||||
- \<singleUser_golddir\>
|
||||
- The value of this tag specifies the path to the directory that contains single user (See \<userCaseType\>) gold standards for each \<image\> tag. This will also be the place that new single user gold standards will be saved when run in rebuild mode (-r).
|
||||
- \<singleUser_outdir\>
|
||||
- The value of this tag will be the location on disk that single user case output (See \<userCaseType\>) will be stored. Each image will create a case folder in this directory.
|
||||
- \<timing\>
|
||||
- The value of this tag indicates that ingest time should be considered a test metric.
|
||||
- \<multiUser_golddir\>
|
||||
- The value of this tag specifies the path to the directory that contains multi user (see \<userCaseType\>) gold standards for each \<image\> tag. This will also be the place that new multi user gold standards will be saved when run in rebuild mode (-r).
|
||||
- \<multiUser_outdir\>
|
||||
- The value of this tag will be the location on disk that multi user case output (See \<userCaseType\>) will be stored. Each image will create a case folder in this directory. It is highly recommended that this folder be shared and the value be a UNC path.
|
||||
- \<dbHost\>
|
||||
- This tag is only relevant when run in multi user mode (See \<userCaseType\>). The value of this tag indicates the host name of the machine running the PostgreSQL database that Autopsy will use during ingest.
|
||||
- \<dbPort\>
|
||||
- This tag is only relevant when run in multi user mode (See \<userCaseType\>). The value of this tag indicates the port number that the PostgreSQL database is listening on at \<dbHost\>.
|
||||
- \<dbUserName\>
|
||||
- This tag is only relevant when run in multi user mode (See \<userCaseType\>). The value of this tag represents the database user Autopsy will act as when adding records to the case database on \<dbHost\> and \<dbPort\>.
|
||||
- \<dbPassword\>
|
||||
- This tag is only relevant when run in multi user mode (See \<userCaseType\>). The value of this tag represents the password to the \<dbUserName\> that Autopsy will act as when adding records to the case database on \<dbHost\> and \<dbPort\>.
|
||||
- \<solrHost\>
|
||||
- This tag is only relevant when run in multi user mode (See \<userCaseType\>). The value of this tag indicates the host name of the Solr service. The solr service is needed when running keyword search.
|
||||
- \<solrPort\>
|
||||
- This tag is only relevant when run in multi user mode (See \<userCaseType\>). The value of this tag indicates the port number Solr is listening on at \<solrHost\>.
|
||||
- \<messageServiceHost\>
|
||||
- This tag is only relevant when run in multi user mode (See \<userCaseType\>). The value of this tag indicates the host name of the ActiveMQ service that Autopsy will use for message brokerage between clients.
|
||||
- \<messageServicePort\>
|
||||
- This tag is only relevant when run in multi user mode (See \<userCaseType\>). The value of this tag indicates the port number that the ActiveMQ service is listening on at \<messageServiceHost\>.
|
||||
|
||||
\section example Simple Example
|
||||
Now that we are familiar with the parameter types and config file definitions, let's walk through a simple example. In this example, we are going to do a rebuild to establish our gold standards, introduce a regression, and discover it by running a test and examining the diffs.
|
||||
\subsection wrkspace Workspace
|
||||
Here is how our workspace is set up:
|
||||
|
||||
\image html regression_example_workspace-env.PNG
|
||||
- autopsy, sleuthkit
|
||||
- These folders are clones of the Autopsy and Sleuthkit GitHub repositories. Sleuthkit was built using sleuthkit/win32/updateAndBuildAll.py and Autopsy was built in Netbeans 8.2.
|
||||
- autopsy_regression_config
|
||||
- This folder contains the regression_config.xml that is covered in the next subsection.
|
||||
- autopsy_single_gold
|
||||
- This folder will contain the gold standards for the \<image\> tags we define in the config file.
|
||||
- autopsy_single_output
|
||||
- This folder will contain the case output for each \<image\> tag ingest.
|
||||
- small2
|
||||
- The image that we will be used in our regression testing.
|
||||
|
||||
\subsection config_file Config File
|
||||
Here is the config file we will be using for our regression testing. This file is located in the autopsy_regression_config folder.
|
||||
|
||||
@code{.xml}
|
||||
<?xml version="1.0" encoding="ASCII"?>
|
||||
<!-- Configuration File for Regression Testing -->
|
||||
<Properties>
|
||||
<image value="C:\test-env\small2.img"/>
|
||||
<build value="..\..\Testing"/>
|
||||
|
||||
<singleUser_golddir value="C:\test-env\autopsy_single_gold"/>
|
||||
<singleUser_outdir value="C:\test-env\autopsy_single_output"/>>
|
||||
|
||||
<userCaseType value="Single-user"/>
|
||||
</Properties>
|
||||
@endcode
|
||||
|
||||
\subsection rebuild Rebuilding Gold Standards
|
||||
In order to produce diffs, we need to first establish gold standards. We can do this by performing a rebuild (see -r in \ref param_desc).
|
||||
|
||||
Let’s run the following command from within autopsy/test/script directory.
|
||||
|
||||
\image html regression_example_rebuild-command.PNG
|
||||
|
||||
This will instruct regression.py to make gold standards using our config file.
|
||||
|
||||
Regression.py is going to start ingesting the images found in the config file one by one from top to bottom. An ant command will be printed to console so we can see exactly what is being run. The Autopsy UI should become visible shortly after. Refrain from clicking on anything as this may interrupt the regression process.
|
||||
|
||||
When this process is complete, the directory specified in \<singleUser_golddir\> should be populated with our gold standards. Here is what our folder looks like after the rebuild:
|
||||
|
||||
\image html regression_example_gold-dir.PNG
|
||||
|
||||
\subsection perform_reg_test Performing a Regression Test
|
||||
Now that we have our gold standards, it’s time to start our development cycle. Let's pretend we accidentally introduced a MIME type regression. All MIME types that were once text/plain are now mistakenly labeled as text/error-plain. Using the diffs that regression.py will produce, we can catch this before it becomes a problem.
|
||||
|
||||
Let’s run the following command from within the autopsy/test/script directory.
|
||||
|
||||
\image html regression_example_run-command.PNG
|
||||
|
||||
This will instruct regression.py to run a regression test using our config file.
|
||||
|
||||
Like the rebuild, an ant command will be printed to console so we can see exactly what is being run. Autopsy should become visible shortly. Refrain from clicking on anything as this may interrupt the regression process.
|
||||
|
||||
After completing on an image, Autopsy will disappear and you will be presented with a brief summary of the results (which may take some time to compute). Diffs that did not pass will be located in the output directory of the image. This process will continue for all images in the config file.
|
||||
|
||||
Since we only had one image in our config file and we ran in single-user mode, regression.py is finished. As expected, our case database diffs did not pass:
|
||||
|
||||
\image html regression_example_diff-failure.PNG
|
||||
|
||||
We’ll learn how to examine our diffs in the next section.
|
||||
\subsection examine_diffs Examining the Diffs
|
||||
Let’s start by opening up our output folder:
|
||||
|
||||
\image html regression_example_output-folder.PNG
|
||||
|
||||
Each timestamp corresponds to the start of a regression or rebuild run. The most recent folder is where our latest diff will be contained. A folder named “small2” will be in this directory. Diff files will be stored at the top level of the image directory.
|
||||
|
||||
As expected, there is a DBDump-Diff file waiting for us.
|
||||
|
||||
\image html regression_example_case-output-folder.PNG
|
||||
|
||||
We can examine our regressions by opening this file in Notepad++:
|
||||
|
||||
\image html regression_example_diff.PNG
|
||||
|
||||
Since our dumps are INSERT statements from the database, we can see exactly which files have changed.
|
||||
|
||||
A line that’s preceded by a “<” indicates the record in the gold standard. A line that’s preceded by a “>” indicates the record in the current dump. As you can see on the second line, our MIME type was mistakenly changed. In the event that this change was correct, we can overwrite our current gold standards by running another rebuild (see -r in \ref param_desc).
|
||||
*/
|
32
docs/doxygen-dev/unit_functional_testing.dox
Executable file
@ -0,0 +1,32 @@
|
||||
/*! \page unit_functional_testing Functional/Unit Testing in Autopsy
|
||||
|
||||
\section intro_unit Introduction
|
||||
Autopsy has both functional and unit tests for the Core and KeywordSearch NBMs. These tests can be run at the NBM, package or file level via Netbeans or Apache Ant. The following sections will demonstrate how to run at each of these granularities with both Netbeans and Apache Ant.
|
||||
\section nbm Running Tests by NBM
|
||||
- In Netbeans, right click the Autopsy-Core or Keyword Search NBM and click Test. This will run both the unit and functional tests.
|
||||
|
||||
\image html unit_functional_testing_nbm.png
|
||||
|
||||
- Run this ant command from within your autopsy directory. Replace {$NBM} with either KeywordSearch or Core.
|
||||
- $ ant -f {$NBM} test
|
||||
<br> e.g. ant -f Core test
|
||||
|
||||
An internet connection may be required the first time the Autopsy Core tests are run. The ant target will attempt to reach out to Google Drive to download the required test data. This data is stored in autopsy\\Core\\test\\qa-functional\\data.
|
||||
In the event that tests are failing due to missing test data, manually run the “getTestDataFiles” target from the Core/build.xml script.
|
||||
\section package Running Tests by Package
|
||||
- In Netbeans, right click the package and click Test. Unit and Functional tests are separated into their own packages.
|
||||
|
||||
\image html unit_functional_testing_package.png
|
||||
|
||||
- Run this ant command from within the autopsy directory. Replace {$PACKAGE_NAME} with the name of the test package and {$NBM} with either KeywordSearch or Core.
|
||||
- $ ant -f {$NBM} -Dtest.includes=org/sleuthkit/autopsy/{$PACKAGE_NAME}/\**\htmlonly\endhtmlonly/\*Test.java test-single
|
||||
<br> e.g. ant -f Core -Dtest.includes=org/sleuthkit/autopsy/ingest/\**\htmlonly\endhtmlonly/\*Test.java test-single
|
||||
\section file Running Tests by File
|
||||
- In Netbeans, right click the File and click “Test File”.
|
||||
|
||||
\image html unit_functional_testing_file.png
|
||||
|
||||
- Run this ant command from within the autopsy directory. Replace {$TEST_NAME} with the name of the .java file, {$NBM} with either KeywordSearch or Core and {$PACKAGE_NAME} with the name of the test package.
|
||||
- $ ant -f {$NBM} -Dtest.includes=org/sleuthkit/autopsy/{$PACKAGE_NAME}/{$TEST_NAME} test-single
|
||||
<br> e.g. ant -f KeywordSearch -Dtest.includes=org/sleuthkit/autopsy/keywordsearch/CreditCardValidatorTest.java test-single
|
||||
*/
|
@ -11,6 +11,7 @@ import sys
|
||||
import psycopg2
|
||||
import psycopg2.extras
|
||||
import socket
|
||||
import csv
|
||||
|
||||
class TskDbDiff(object):
|
||||
"""Compares two TSK/Autospy SQLite databases.
|
||||
@ -436,8 +437,27 @@ def normalize_db_entry(line, files_table, vs_parts_table, vs_info_table, fs_info
|
||||
ig_groups_seen_index = line.find('INSERT INTO "image_gallery_groups_seen"') > -1 or line.find('INSERT INTO image_gallery_groups_seen ') > -1
|
||||
|
||||
parens = line[line.find('(') + 1 : line.rfind(')')]
|
||||
fields_list = parens.replace(" ", "").split(',')
|
||||
|
||||
no_space_parens = parens.replace(" ", "")
|
||||
fields_list = list(csv.reader([no_space_parens], quotechar="'"))[0]
|
||||
#Add back in the quotechar for values that were originally wrapped (csv reader consumes this character)
|
||||
fields_list_with_quotes = []
|
||||
ptr = 0
|
||||
for field in fields_list:
|
||||
if(len(field) == 0):
|
||||
field = "'" + field + "'"
|
||||
else:
|
||||
start = no_space_parens.find(field, ptr)
|
||||
if((start - 1) >= 0 and no_space_parens[start - 1] == '\''):
|
||||
if((start + len(field)) < len(no_space_parens) and no_space_parens[start + len(field)] == '\''):
|
||||
field = "'" + field + "'"
|
||||
fields_list_with_quotes.append(field)
|
||||
if(ptr > 0):
|
||||
#Add one for each comma that is used to separate values in the original string
|
||||
ptr+=1
|
||||
ptr += len(field)
|
||||
|
||||
fields_list = fields_list_with_quotes
|
||||
|
||||
# remove object ID
|
||||
if files_index:
|
||||
newLine = ('INSERT INTO "tsk_files" VALUES(' + ', '.join(fields_list[1:]) + ');')
|
||||
|