diff --git a/Core/src/org/sleuthkit/autopsy/casemodule/DefaultIngestStream.java b/Core/src/org/sleuthkit/autopsy/casemodule/DefaultIngestStream.java index 08bc0fa427..98f2855fcb 100644 --- a/Core/src/org/sleuthkit/autopsy/casemodule/DefaultIngestStream.java +++ b/Core/src/org/sleuthkit/autopsy/casemodule/DefaultIngestStream.java @@ -19,6 +19,7 @@ package org.sleuthkit.autopsy.casemodule; import java.util.List; +import org.sleuthkit.autopsy.ingest.IngestJob; import org.sleuthkit.autopsy.ingest.IngestStream; import org.sleuthkit.autopsy.ingest.IngestStreamClosedException; @@ -35,6 +36,11 @@ class DefaultIngestStream implements IngestStream { public void addFiles(List fileObjectIds) throws IngestStreamClosedException { // Do nothing } + + @Override + public IngestJob getIngestJob() { + throw new UnsupportedOperationException("DefaultIngestStream has no associated IngestJob"); + } @Override public synchronized boolean isClosed() { diff --git a/Core/src/org/sleuthkit/autopsy/casemodule/ImageDSProcessor.java b/Core/src/org/sleuthkit/autopsy/casemodule/ImageDSProcessor.java index 9dd9a39fd4..207b83ed8a 100644 --- a/Core/src/org/sleuthkit/autopsy/casemodule/ImageDSProcessor.java +++ b/Core/src/org/sleuthkit/autopsy/casemodule/ImageDSProcessor.java @@ -464,6 +464,42 @@ public class ImageDSProcessor implements DataSourceProcessor, AutoIngestDataSour doAddImageProcess(deviceId, dataSourcePath.toString(), sectorSize, timeZone, ignoreFatOrphanFiles, null, null, null, progressMonitor, callBack); } + + @Override + public IngestStream processWithIngestStream(String deviceId, Path dataSourcePath, IngestJobSettings settings, DataSourceProcessorProgressMonitor progressMonitor, DataSourceProcessorCallback callBack) { + this.deviceId = deviceId; + this.imagePath = dataSourcePath.toString(); + this.sectorSize = 0; + this.timeZone = Calendar.getInstance().getTimeZone().getID(); + this.ignoreFatOrphanFiles = false; + setDataSourceOptionsCalled = true; + + // Set up the data source before creating the ingest stream + try { + image = SleuthkitJNI.addImageToDatabase(Case.getCurrentCase().getSleuthkitCase(), + new String[]{imagePath}, sectorSize, timeZone, md5, sha1, sha256, deviceId); + } catch (TskCoreException ex) { + logger.log(Level.SEVERE, "Error adding data source with path " + imagePath + " to database", ex); + final List errors = new ArrayList<>(); + errors.add(ex.getMessage()); + callBack.done(DataSourceProcessorCallback.DataSourceProcessorResult.CRITICAL_ERRORS, errors, new ArrayList<>()); + return null; + } + + // Now initialize the ingest stream + try { + ingestStream = IngestManager.getInstance().openIngestStream(image, settings); + } catch (TskCoreException ex) { + logger.log(Level.SEVERE, "Error starting ingest modules", ex); + final List errors = new ArrayList<>(); + errors.add(ex.getMessage()); + callBack.done(DataSourceProcessorCallback.DataSourceProcessorResult.CRITICAL_ERRORS, errors, new ArrayList<>()); + return null; + } + + doAddImageProcess(deviceId, dataSourcePath.toString(), sectorSize, timeZone, ignoreFatOrphanFiles, null, null, null, progressMonitor, callBack); + return ingestStream; + } /** * Sets the configuration of the data source processor without using the diff --git a/Core/src/org/sleuthkit/autopsy/contentviewers/MediaViewImagePanel.java b/Core/src/org/sleuthkit/autopsy/contentviewers/MediaViewImagePanel.java index 957b887c75..55163535cb 100644 --- a/Core/src/org/sleuthkit/autopsy/contentviewers/MediaViewImagePanel.java +++ b/Core/src/org/sleuthkit/autopsy/contentviewers/MediaViewImagePanel.java @@ -1,7 +1,7 @@ /* * Autopsy Forensic Browser * - * Copyright 2011-2019 Basis Technology Corp. + * Copyright 2018-2020 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -97,68 +97,64 @@ import org.sleuthkit.datamodel.TskCoreException; * Image viewer part of the Media View layered pane. Uses JavaFX to display the * image. */ -@NbBundle.Messages({"MediaViewImagePanel.externalViewerButton.text=Open in External Viewer Ctrl+E", +@NbBundle.Messages({ + "MediaViewImagePanel.externalViewerButton.text=Open in External Viewer Ctrl+E", "MediaViewImagePanel.errorLabel.text=Could not load file into Media View.", - "MediaViewImagePanel.errorLabel.OOMText=Could not load file into Media View: insufficent memory."}) + "MediaViewImagePanel.errorLabel.OOMText=Could not load file into Media View: insufficent memory." +}) @SuppressWarnings("PMD.SingularField") // UI widgets cause lots of false positives class MediaViewImagePanel extends JPanel implements MediaFileViewer.MediaViewPanel { - private static final Image EXTERNAL = new Image(MediaViewImagePanel.class.getResource("/org/sleuthkit/autopsy/images/external.png").toExternalForm()); - private final static Logger LOGGER = Logger.getLogger(MediaViewImagePanel.class.getName()); - - private final boolean fxInited; - - private JFXPanel fxPanel; - private AbstractFile file; + private static final long serialVersionUID = 1L; + private static final Logger logger = Logger.getLogger(MediaViewImagePanel.class.getName()); + private static final double[] ZOOM_STEPS = { + 0.0625, 0.125, 0.25, 0.375, 0.5, 0.75, + 1, 1.5, 2, 2.5, 3, 4, 5, 6, 8, 10}; + private static final double MIN_ZOOM_RATIO = 0.0625; // 6.25% + private static final double MAX_ZOOM_RATIO = 10.0; // 1000% + private static final Image externalImage = new Image(MediaViewImagePanel.class.getResource("/org/sleuthkit/autopsy/images/external.png").toExternalForm()); + private static final SortedSet supportedMimes = ImageUtils.getSupportedImageMimeTypes(); + private static final List supportedExtensions = ImageUtils.getSupportedImageExtensions().stream() + .map("."::concat) //NOI18N + .collect(Collectors.toList()); + + /* + * JFX components + */ + private final ProgressBar progressBar = new ProgressBar(); + private final MaskerPane maskerPane = new MaskerPane(); private Group masterGroup; private ImageTagsGroup tagsGroup; private ImageTagCreator imageTagCreator; private ImageView fxImageView; private ScrollPane scrollPane; - private final ProgressBar progressBar = new ProgressBar(); - private final MaskerPane maskerPane = new MaskerPane(); - + private Task readImageTask; + + /* + * Swing components + */ private final JPopupMenu imageTaggingOptions = new JPopupMenu(); private final JMenuItem createTagMenuItem; private final JMenuItem deleteTagMenuItem; private final JMenuItem hideTagsMenuItem; private final JMenuItem exportTagsMenuItem; - private final JFileChooser exportChooser; - private final PropertyChangeSupport pcs = new PropertyChangeSupport(this); - + private JFXPanel fxPanel; + + /* + * State + */ + private final boolean fxInited; private double zoomRatio; private double rotation; // Can be 0, 90, 180, and 270. - - private boolean autoResize = true; // Auto resize when the user changes the size - // of the content viewer unless the user has used the zoom buttons. - private static final double[] ZOOM_STEPS = { - 0.0625, 0.125, 0.25, 0.375, 0.5, 0.75, - 1, 1.5, 2, 2.5, 3, 4, 5, 6, 8, 10}; - - private static final double MIN_ZOOM_RATIO = 0.0625; // 6.25% - private static final double MAX_ZOOM_RATIO = 10.0; // 1000% + private boolean autoResize = true; // Auto resize when the user changes the size of the content viewer unless the user has used the zoom buttons. + private AbstractFile file; static { ImageIO.scanForPlugins(); } - /** - * mime types we should be able to display. if the mimetype is unknown we - * will fall back on extension and jpg/png header - */ - static private final SortedSet supportedMimes = ImageUtils.getSupportedImageMimeTypes(); - - /** - * extensions we should be able to display - */ - static private final List supportedExtensions = ImageUtils.getSupportedImageExtensions().stream() - .map("."::concat) //NOI18N - .collect(Collectors.toList()); - - private Task readImageTask; - /** * Creates new form MediaViewImagePanel */ @@ -168,7 +164,7 @@ class MediaViewImagePanel extends JPanel implements MediaFileViewer.MediaViewPan "MediaViewImagePanel.hideTagOption=Hide", "MediaViewImagePanel.exportTagOption=Export" }) - public MediaViewImagePanel() { + MediaViewImagePanel() { initComponents(); fxInited = org.sleuthkit.autopsy.core.Installer.isJavaFxInited(); @@ -354,14 +350,13 @@ class MediaViewImagePanel extends JPanel implements MediaFileViewer.MediaViewPan } private void showErrorNode(String errorMessage, AbstractFile file) { - final Button externalViewerButton = new Button(Bundle.MediaViewImagePanel_externalViewerButton_text(), new ImageView(EXTERNAL)); - externalViewerButton.setOnAction(actionEvent - -> //fx ActionEvent - /* - * TODO: why is the name passed into the action constructor? it - * means we duplicate this string all over the place -jm - */ new ExternalViewerAction(Bundle.MediaViewImagePanel_externalViewerButton_text(), new FileNode(file)) - .actionPerformed(new ActionEvent(this, ActionEvent.ACTION_PERFORMED, "")) //Swing ActionEvent + final Button externalViewerButton = new Button(Bundle.MediaViewImagePanel_externalViewerButton_text(), new ImageView(externalImage)); + /* + * Tie a Swing action (ExternalViewerAction) to a JFX button action. + */ + externalViewerButton.setOnAction(actionEvent -> + new ExternalViewerAction(Bundle.MediaViewImagePanel_externalViewerButton_text(), new FileNode(file)) + .actionPerformed(new ActionEvent(this, ActionEvent.ACTION_PERFORMED, "")) ); final VBox errorNode = new VBox(10, new Label(errorMessage), externalViewerButton); @@ -420,7 +415,7 @@ class MediaViewImagePanel extends JPanel implements MediaFileViewer.MediaViewPan "state", null, State.NONEMPTY)); } } catch (TskCoreException | NoCurrentCaseException ex) { - LOGGER.log(Level.WARNING, "Could not retrieve image tags for file in case db", ex); //NON-NLS + logger.log(Level.WARNING, "Could not retrieve image tags for file in case db", ex); //NON-NLS } scrollPane.setContent(masterGroup); } else { @@ -693,14 +688,14 @@ class MediaViewImagePanel extends JPanel implements MediaFileViewer.MediaViewPan private void rotateLeftButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_rotateLeftButtonActionPerformed autoResize = false; - + rotation = (rotation + 270) % 360; updateView(); }//GEN-LAST:event_rotateLeftButtonActionPerformed private void rotateRightButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_rotateRightButtonActionPerformed autoResize = false; - + rotation = (rotation + 90) % 360; updateView(); }//GEN-LAST:event_rotateRightButtonActionPerformed @@ -760,7 +755,7 @@ class MediaViewImagePanel extends JPanel implements MediaFileViewer.MediaViewPan Case.getCurrentCase().getServices().getTagsManager().deleteContentTag(contentViewerTag.getContentTag()); tagsGroup.getChildren().remove(tagInFocus); } catch (TskCoreException | NoCurrentCaseException ex) { - LOGGER.log(Level.WARNING, "Could not delete image tag in case db", ex); //NON-NLS + logger.log(Level.WARNING, "Could not delete image tag in case db", ex); //NON-NLS } scrollPane.setCursor(Cursor.DEFAULT); @@ -793,7 +788,7 @@ class MediaViewImagePanel extends JPanel implements MediaFileViewer.MediaViewPan ImageTag imageTag = buildImageTag(contentViewerTag); tagsGroup.getChildren().add(imageTag); } catch (TskCoreException | SerializationException | NoCurrentCaseException ex) { - LOGGER.log(Level.WARNING, "Could not save new image tag in case db", ex); //NON-NLS + logger.log(Level.WARNING, "Could not save new image tag in case db", ex); //NON-NLS } scrollPane.setCursor(Cursor.DEFAULT); @@ -832,7 +827,7 @@ class MediaViewImagePanel extends JPanel implements MediaFileViewer.MediaViewPan ImageTagRegion newRegion = (ImageTagRegion) edit.getNewValue(); ContentViewerTagManager.updateTag(contentViewerTag, newRegion); } catch (SerializationException | TskCoreException | NoCurrentCaseException ex) { - LOGGER.log(Level.WARNING, "Could not save edit for image tag in case db", ex); //NON-NLS + logger.log(Level.WARNING, "Could not save edit for image tag in case db", ex); //NON-NLS } scrollPane.setCursor(Cursor.DEFAULT); }); @@ -916,7 +911,7 @@ class MediaViewImagePanel extends JPanel implements MediaFileViewer.MediaViewPan JOptionPane.showMessageDialog(null, Bundle.MediaViewImagePanel_successfulExport()); } catch (Exception ex) { //Runtime exceptions may spill out of ImageTagsUtil from JavaFX. //This ensures we (devs and users) have something when it doesn't work. - LOGGER.log(Level.WARNING, "Unable to export tagged image to disk", ex); //NON-NLS + logger.log(Level.WARNING, "Unable to export tagged image to disk", ex); //NON-NLS JOptionPane.showMessageDialog(null, Bundle.MediaViewImagePanel_unsuccessfulExport()); } return null; diff --git a/Core/src/org/sleuthkit/autopsy/contentviewers/artifactviewers/Bundle.properties-MERGED b/Core/src/org/sleuthkit/autopsy/contentviewers/artifactviewers/Bundle.properties-MERGED index 24083965fb..d5c75e81a3 100644 --- a/Core/src/org/sleuthkit/autopsy/contentviewers/artifactviewers/Bundle.properties-MERGED +++ b/Core/src/org/sleuthkit/autopsy/contentviewers/artifactviewers/Bundle.properties-MERGED @@ -26,7 +26,7 @@ CallLogArtifactViewer_label_from=From CallLogArtifactViewer_label_to=To CallLogArtifactViewer_suffix_local=(Local) CallLogArtifactViewer_value_unknown=Unknown -#{0} - contact name +# {0} - contact name CommunicationArtifactViewerHelper_contact_label=Contact: {0} CommunicationArtifactViewerHelper_contact_label_unknown=Unknown CommunicationArtifactViewerHelper_menuitem_copy=Copy diff --git a/Core/src/org/sleuthkit/autopsy/contentviewers/artifactviewers/CommunicationArtifactViewerHelper.java b/Core/src/org/sleuthkit/autopsy/contentviewers/artifactviewers/CommunicationArtifactViewerHelper.java index 071fc7011b..e65550f88c 100644 --- a/Core/src/org/sleuthkit/autopsy/contentviewers/artifactviewers/CommunicationArtifactViewerHelper.java +++ b/Core/src/org/sleuthkit/autopsy/contentviewers/artifactviewers/CommunicationArtifactViewerHelper.java @@ -440,7 +440,7 @@ final class CommunicationArtifactViewerHelper { * @return A JLabel with the contact information. */ @NbBundle.Messages({ - "#{0} - contact name", + "# {0} - contact name", "CommunicationArtifactViewerHelper_contact_label=Contact: {0}", "CommunicationArtifactViewerHelper_contact_label_unknown=Unknown" }) diff --git a/Core/src/org/sleuthkit/autopsy/coreutils/ExecUtil.java b/Core/src/org/sleuthkit/autopsy/coreutils/ExecUtil.java index 469cb24d04..89b8c470c7 100644 --- a/Core/src/org/sleuthkit/autopsy/coreutils/ExecUtil.java +++ b/Core/src/org/sleuthkit/autopsy/coreutils/ExecUtil.java @@ -1,7 +1,7 @@ /* * Autopsy Forensic Browser * - * Copyright 2013-2019 Basis Technology Corp. + * Copyright 2013-2020 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -31,28 +31,31 @@ import org.apache.commons.lang3.SystemUtils; import org.sleuthkit.autopsy.core.UserPreferences; /** - * Executes a command line using an operating system process with a configurable - * timeout and pluggable logic to kill or continue the process on timeout. + * Executes a command line using an operating system process with pluggable + * logic to terminate the process under certain conditions. */ public final class ExecUtil { private static final Logger logger = Logger.getLogger(ExecUtil.class.getName()); - private static final long DEFAULT_CHECK_INTERVAL = 5; - private static final TimeUnit DEFAULT_CHECK_INTERVAL_UNITS = TimeUnit.SECONDS; + private static final long DEFAULT_TERMINATION_CHECK_INTERVAL = 5; + private static final TimeUnit DEFAULT_TERMINATION_CHECK_INTERVAL_UNITS = TimeUnit.SECONDS; + private static final long MAX_WAIT_FOR_TERMINATION = 1; + private static final TimeUnit MAX_WAIT_FOR_TERMINATION_UNITS = TimeUnit.MINUTES; /** - * The execute() methods do a wait() with a timeout on the executing process - * and query a process terminator each time the timeout expires to determine - * whether or not to kill the process. See + * An interface for defining the conditions under which an operating system + * process spawned by an ExecUtil method should be terminated. + * + * Some existing implementations: TimedProcessTerminator, + * InterruptedThreadProcessTerminator, * DataSourceIngestModuleProcessTerminator and - * FileIngestModuleProcessTerminator as examples of ProcessTerminator - * implementations. + * FileIngestModuleProcessTerminator. */ public interface ProcessTerminator { /** - * Decides whether or not to terminate a process being run by a - * ExcUtil.execute() methods. + * Decides whether or not to terminate a process being run by an + * ExecUtil method. * * @return True or false. */ @@ -78,11 +81,11 @@ public final class ExecUtil { public static class TimedProcessTerminator implements ProcessTerminator { private final long startTimeInSeconds; - private final long maxRunTimeInSeconds; + private final Long maxRunTimeInSeconds; /** * Creates a process terminator that can be used to kill a process after - * it has run for a given period of time. + * it exceeds a maximum allowable run time. * * @param maxRunTimeInSeconds The maximum allowable run time in seconds. */ @@ -93,32 +96,41 @@ public final class ExecUtil { /** * Creates a process terminator that can be used to kill a process after - * it has run for a given period of time. Maximum allowable run time is - * set via Autopsy Options panel. If the process termination - * functionality is disabled then the maximum allowable time is set to - * MAX_INT seconds. + * it exceeds a global maximum allowable run time specified as a user + * preference. If the user preference is not set, this terminator has no + * effect. */ public TimedProcessTerminator() { if (UserPreferences.getIsTimeOutEnabled() && UserPreferences.getProcessTimeOutHrs() > 0) { - // user specified time out - this.maxRunTimeInSeconds = UserPreferences.getProcessTimeOutHrs() * 3600; + this.maxRunTimeInSeconds = (long) UserPreferences.getProcessTimeOutHrs() * 3600; } else { - // never time out - this.maxRunTimeInSeconds = Long.MAX_VALUE; + this.maxRunTimeInSeconds = null; } this.startTimeInSeconds = (new Date().getTime()) / 1000; } @Override public boolean shouldTerminateProcess() { - long currentTimeInSeconds = (new Date().getTime()) / 1000; - return (currentTimeInSeconds - this.startTimeInSeconds) > this.maxRunTimeInSeconds; + if (maxRunTimeInSeconds != null) { + long currentTimeInSeconds = (new Date().getTime()) / 1000; + return (currentTimeInSeconds - this.startTimeInSeconds) > this.maxRunTimeInSeconds; + } else { + return false; + } } } /** - * Runs a process without a termination check interval or process - * terminator. + * Runs a process without a process terminator. This method should be used + * with caution because there is nothing to stop the process from running + * forever. + * + * IMPORTANT: This method blocks while the process is running. For legacy + * API reasons, if there is an interrupt the InterruptedException is wrapped + * in an IOException instead of being thrown. Callers that need to know + * about interrupts to detect backgound task cancellation can call + * Thread.isInterrupted() or, if the thread's interrupt flag should be + * cleared, Thread.interrupted(). * * @param processBuilder A process builder used to configure and construct * the process to be run. @@ -127,7 +139,8 @@ public final class ExecUtil { * * @throws SecurityException If a security manager exists and vetoes any * aspect of running the process. - * @throws IOException If an I/O error occurs. + * @throws IOException If an error occurs while executing or + * terminating the process. */ public static int execute(ProcessBuilder processBuilder) throws SecurityException, IOException { return ExecUtil.execute(processBuilder, 30, TimeUnit.DAYS, new ProcessTerminator() { @@ -142,6 +155,13 @@ public final class ExecUtil { * Runs a process using the default termination check interval and a process * terminator. * + * IMPORTANT: This method blocks while the process is running. For legacy + * API reasons, if there is an interrupt the InterruptedException is wrapped + * in an IOException instead of being thrown. Callers that need to know + * about interrupts to detect backgound task cancellation can call + * Thread.isInterrupted() or, if the thread's interrupt flag should be + * cleared, Thread.interrupted(). + * * @param processBuilder A process builder used to configure and construct * the process to be run. * @param terminator The terminator. @@ -150,16 +170,24 @@ public final class ExecUtil { * * @throws SecurityException If a security manager exists and vetoes any * aspect of running the process. - * @throws IOException If an I/O error occurs. + * @throws IOException If an error occurs while executing or + * terminating the process. */ public static int execute(ProcessBuilder processBuilder, ProcessTerminator terminator) throws SecurityException, IOException { - return ExecUtil.execute(processBuilder, ExecUtil.DEFAULT_CHECK_INTERVAL, ExecUtil.DEFAULT_CHECK_INTERVAL_UNITS, terminator); + return ExecUtil.execute(processBuilder, ExecUtil.DEFAULT_TERMINATION_CHECK_INTERVAL, ExecUtil.DEFAULT_TERMINATION_CHECK_INTERVAL_UNITS, terminator); } /** * Runs a process using a custom termination check interval and a process * terminator. * + * IMPORTANT: This method blocks while the process is running. For legacy + * API reasons, if there is an interrupt the InterruptedException is wrapped + * in an IOException instead of being thrown. Callers that need to know + * about interrupts to detect backgound task cancellation can call + * Thread.isInterrupted() or, if the thread's interrupt flag should be + * cleared, Thread.interrupted(). + * * @param processBuilder A process builder used to configure and * construct the process to be run. * @param terminationCheckInterval The interval at which to query the @@ -173,12 +201,52 @@ public final class ExecUtil { * * @throws SecurityException If a security manager exists and vetoes any * aspect of running the process. - * @throws IOException If an I/O error occurs. + * @throws IOException If an error occurs while executing or + * terminating the process. */ public static int execute(ProcessBuilder processBuilder, long terminationCheckInterval, TimeUnit units, ProcessTerminator terminator) throws SecurityException, IOException { return waitForTermination(processBuilder.command().get(0), processBuilder.start(), terminationCheckInterval, units, terminator); } + /** + * Waits for an existing process to finish, using a custom termination check + * interval and a process terminator. + * + * IMPORTANT: This method blocks while the process is running. For legacy + * API reasons, if there is an interrupt the InterruptedException is wrapped + * in an IOException instead of being thrown. Callers that need to know + * about interrupts to detect backgound task cancellation can call + * Thread.isInterrupted() or, if the thread's interrupt flag should be + * cleared, Thread.interrupted(). + * + * @param processName The name of the process, for logging + * purposes. + * @param process The process. + * @param terminationCheckInterval The interval at which to query the + * process terminator to see if the process + * should be killed. + * @param units The units for the termination check + * interval. + * @param terminator The process terminator. + * + * @return The exit value of the process. + * + * @throws IOException If an error occurs while executing or terminating the + * process. + */ + public static int waitForTermination(String processName, Process process, long terminationCheckInterval, TimeUnit units, ProcessTerminator terminator) throws IOException { + try { + return waitForProcess(processName, process, terminationCheckInterval, units, terminator); + } catch (InterruptedException ex) { + /* + * Reset the interrupted flag and wrap the exception in an + * IOException for backwards compatibility. + */ + Thread.currentThread().interrupt(); + throw new IOException(String.format("Interrupted executing %s", processName), ex); //NON-NLS + } + } + /** * Waits for an existing process to finish, using a custom termination check * interval and a process terminator. @@ -195,67 +263,114 @@ public final class ExecUtil { * * @return The exit value of the process. * - * @throws SecurityException If a security manager exists and vetoes any - * aspect of running the process. - * @throws IOException If an I/O error occurs. + * @throws IOException If an error occurs while executing or + * terminating the process. + * @throws InterruptedException If the thread running this code is + * interrupted while the process is running. */ - public static int waitForTermination(String processName, Process process, long terminationCheckInterval, TimeUnit units, ProcessTerminator terminator) throws SecurityException, IOException { - try { - do { - process.waitFor(terminationCheckInterval, units); - if (process.isAlive() && terminator.shouldTerminateProcess()) { - killProcess(process); - try { - process.waitFor(); - } catch (InterruptedException ex) { - logger.log(Level.WARNING, String.format("Thread running %s was interrupted before the process completed", processName), ex); - } - } - } while (process.isAlive()); - } catch (InterruptedException ex) { - if (process.isAlive()) { - killProcess(process); - } + private static int waitForProcess(String processName, Process process, long terminationCheckInterval, TimeUnit units, ProcessTerminator terminator) throws IOException, InterruptedException { + do { try { - process.waitFor(); //waiting to help ensure process is shutdown before calling interrupt() or returning - } catch (InterruptedException exx) { - logger.log(Level.WARNING, String.format("Thread running %s was interrupted before the process completed", processName), exx); + process.waitFor(terminationCheckInterval, units); + } catch (InterruptedException ex) { + logger.log(Level.WARNING, String.format("Interrupted executing %s", processName), ex); //NON-NLS + Thread.currentThread().interrupt(); + terminateProcess(processName, process); + /* + * Note that if the preceding call to terminateProcess() throws + * an IOException, the caller will get that exception instead of + * this InterruptedException, which is arguably preferable. If + * terminateProcess() does not throw an IOException, then its + * call to waitFor() will throw a fresh InterruptedException, + * which is fine. + */ + throw ex; } - logger.log(Level.WARNING, String.format("Thread running %s was interrupted before the process completed", processName), ex); - Thread.currentThread().interrupt(); - } + if (process.isAlive() && terminator.shouldTerminateProcess()) { + terminateProcess(processName, process); + } + } while (process.isAlive()); + + /* + * Careful: Process.exitValue() throws an IllegalStateException if the + * process is still alive when the method is called. This code is set up + * so that the only way Process.exitValue() can be called is when it has + * not been bypassed by an exception and the preceding loop has + * terminated with Process.isAlive == false. + */ return process.exitValue(); } /** - * Kills a process and its children + * Terminates a process and its children, waiting with a time out to try to + * ensure the process is no longer alive before returning. * - * @param process The parent process to kill + * IMPORTANT: This method blocks while the process is running. For legacy + * API reasons, if there is an interrupt (or any other exception) the + * exception is logged instead of being thrown. Callers that need to know + * about interrupts to detect backgound task cancellation can call + * Thread.isInterrupted() or, if the thread's interrupt flag should be + * cleared, Thread.interrupted(). + * + * @param process The process. */ public static void killProcess(Process process) { - if (process == null) { + String processName = process.toString(); + try { + terminateProcess(processName, process); + } catch (IOException ex) { + logger.log(Level.WARNING, String.format("Error occured executing %s", processName), ex); //NON-NLS + } catch (InterruptedException ex) { + logger.log(Level.WARNING, String.format("Interrupted executing %s", processName), ex); //NON-NLS + Thread.currentThread().interrupt(); + } + } + + /** + * Terminates a process and its children, waiting with a time out to try to + * ensure the process is no longer alive before returning. + * + * @param processName The name of the process, for logging purposes. + * @param process The process. + * + * @throws IOException If an error occurs while trying to terminate + * the process. + * @throws InterruptedException If the thread running this code is + * interrupted while waiting for the process to + * terminate. + */ + private static void terminateProcess(String processName, Process process) throws IOException, InterruptedException { + if (process == null || !process.isAlive()) { return; } - try { - if (SystemUtils.IS_OS_WINDOWS) { + if (SystemUtils.IS_OS_WINDOWS) { + try { Win32Process parentProcess = new Win32Process(process); List children = parentProcess.getChildren(); - children.stream().forEach((child) -> { child.terminate(); }); parentProcess.terminate(); - } else { - process.destroyForcibly(); + } catch (Exception ex) { + /* + * Wrap whatever exception was thrown from Windows in an + * exception that is appropriate for this API. + */ + throw new IOException(String.format("Error occured terminating %s", processName), ex); //NON-NLS } - } catch (Exception ex) { - logger.log(Level.WARNING, "Error occurred when attempting to kill process: {0}", ex.getMessage()); // NON-NLS + } else { + process.destroyForcibly(); + } + + if (!process.waitFor(MAX_WAIT_FOR_TERMINATION, MAX_WAIT_FOR_TERMINATION_UNITS)) { + throw new IOException(String.format("Failed to terminate %s after %d %s", processName, MAX_WAIT_FOR_TERMINATION, MAX_WAIT_FOR_TERMINATION_UNITS)); //NON-NLS } } /* - * Used by deprecated methods. + * Fields used by deprecated methods that require instantiation of an + * ExecUtil object. */ private Process proc = null; private ExecUtil.StreamToStringRedirect errorStringRedirect = null; diff --git a/Core/src/org/sleuthkit/autopsy/datasourceprocessors/AutoIngestDataSourceProcessor.java b/Core/src/org/sleuthkit/autopsy/datasourceprocessors/AutoIngestDataSourceProcessor.java index 20f3bb59bd..27ffec6d53 100644 --- a/Core/src/org/sleuthkit/autopsy/datasourceprocessors/AutoIngestDataSourceProcessor.java +++ b/Core/src/org/sleuthkit/autopsy/datasourceprocessors/AutoIngestDataSourceProcessor.java @@ -22,6 +22,8 @@ import java.nio.file.Path; import org.sleuthkit.autopsy.corecomponentinterfaces.DataSourceProcessor; import org.sleuthkit.autopsy.corecomponentinterfaces.DataSourceProcessorCallback; import org.sleuthkit.autopsy.corecomponentinterfaces.DataSourceProcessorProgressMonitor; +import org.sleuthkit.autopsy.ingest.IngestJobSettings; +import org.sleuthkit.autopsy.ingest.IngestStream; /** * Interface implemented by DataSourceProcessors in order to be supported by @@ -66,6 +68,31 @@ public interface AutoIngestDataSourceProcessor extends DataSourceProcessor { */ void process(String deviceId, Path dataSourcePath, DataSourceProcessorProgressMonitor progressMonitor, DataSourceProcessorCallback callBack); + + /** + * Adds a data source to the case database using a background task in a + * separate thread by calling DataSourceProcessor.run() method. Returns as + * soon as the background task is started. The background task uses a + * callback object to signal task completion and return results. Method can + * throw an exception for a system level problem. The exception should not + * be thrown for an issue related to bad input data. + * + * @param deviceId An ASCII-printable identifier for the device + * associated with the data source that is intended + * to be unique across multiple cases (e.g., a UUID). + * @param dataSourcePath Path to the data source. + * @param settings The ingest job settings. + * @param progressMonitor Progress monitor that will be used by the + * background task to report progress. + * @param callBack Callback that will be used by the background task + * to return results. + * + * @return The new ingest stream or null if an error occurred. Errors will be handled by the callback. + */ + default IngestStream processWithIngestStream(String deviceId, Path dataSourcePath, IngestJobSettings settings, DataSourceProcessorProgressMonitor progressMonitor, DataSourceProcessorCallback callBack) { + throw new UnsupportedOperationException("Streaming ingest not supported for this data source processor"); + } + /** * A custom exception for the use of AutomatedIngestDataSourceProcessor. */ diff --git a/Core/src/org/sleuthkit/autopsy/datasourcesummary/datamodel/DataSourceAnalysisSummary.java b/Core/src/org/sleuthkit/autopsy/datasourcesummary/datamodel/DataSourceAnalysisSummary.java new file mode 100644 index 0000000000..86aa21c7a1 --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/datasourcesummary/datamodel/DataSourceAnalysisSummary.java @@ -0,0 +1,159 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2020 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.datasourcesummary.datamodel; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.function.Function; +import java.util.stream.Collectors; +import org.apache.commons.lang3.StringUtils; +import org.apache.commons.lang3.tuple.Pair; +import org.sleuthkit.autopsy.datasourcesummary.datamodel.SleuthkitCaseProvider.SleuthkitCaseProviderException; +import org.sleuthkit.datamodel.BlackboardArtifact; +import org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE; +import org.sleuthkit.datamodel.BlackboardAttribute; +import org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE; +import org.sleuthkit.datamodel.DataSource; +import org.sleuthkit.datamodel.SleuthkitCase; +import org.sleuthkit.datamodel.TskCoreException; + +/** + * Providing data for the data source analysis tab. + */ +public class DataSourceAnalysisSummary { + + private static final BlackboardAttribute.Type TYPE_SET_NAME = new BlackboardAttribute.Type(ATTRIBUTE_TYPE.TSK_SET_NAME); + + private static final Set EXCLUDED_KEYWORD_SEARCH_ITEMS = new HashSet<>(Arrays.asList( + "PHONE NUMBERS", + "IP ADDRESSES", + "EMAIL ADDRESSES", + "URLS", + "CREDIT CARD NUMBERS" + )); + + private final SleuthkitCaseProvider provider; + + /** + * Main constructor. + */ + public DataSourceAnalysisSummary() { + this(SleuthkitCaseProvider.DEFAULT); + } + + /** + * Main constructor. + * + * @param provider The means of obtaining a sleuthkit case. + */ + public DataSourceAnalysisSummary(SleuthkitCaseProvider provider) { + this.provider = provider; + } + + /** + * Gets counts for hashset hits. + * + * @param dataSource The datasource for which to identify hashset hits. + * + * @return The hashset set name with the number of hits in descending order. + * + * @throws SleuthkitCaseProviderException + * @throws TskCoreException + */ + public List> getHashsetCounts(DataSource dataSource) throws SleuthkitCaseProviderException, TskCoreException { + return getCountsData(dataSource, TYPE_SET_NAME, ARTIFACT_TYPE.TSK_HASHSET_HIT); + } + + /** + * Gets counts for keyword hits. + * + * @param dataSource The datasource for which to identify keyword hits. + * + * @return The keyword set name with the number of hits in descending order. + * + * @throws SleuthkitCaseProviderException + * @throws TskCoreException + */ + public List> getKeywordCounts(DataSource dataSource) throws SleuthkitCaseProviderException, TskCoreException { + return getCountsData(dataSource, TYPE_SET_NAME, ARTIFACT_TYPE.TSK_KEYWORD_HIT).stream() + // make sure we have a valid set and that that set does not belong to the set of excluded items + .filter((pair) -> pair != null && pair.getKey() != null && !EXCLUDED_KEYWORD_SEARCH_ITEMS.contains(pair.getKey().toUpperCase().trim())) + .collect(Collectors.toList()); + } + + /** + * Gets counts for interesting item hits. + * + * @param dataSource The datasource for which to identify interesting item + * hits. + * + * @return The interesting item set name with the number of hits in + * descending order. + * + * @throws SleuthkitCaseProviderException + * @throws TskCoreException + */ + public List> getInterestingItemCounts(DataSource dataSource) throws SleuthkitCaseProviderException, TskCoreException { + return getCountsData(dataSource, TYPE_SET_NAME, ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT, ARTIFACT_TYPE.TSK_INTERESTING_ARTIFACT_HIT); + } + + /** + * Get counts for the artifact of the specified type. + * + * @param dataSource The datasource. + * @param keyType The attribute to use as the key type. + * @param artifactTypes The types of artifacts for which to query. + * + * @return A list of key value pairs where the key is the attribute type + * value and the value is the count of items found. This list is + * sorted by the count descending max to min. + * + * @throws SleuthkitCaseProviderException + * @throws TskCoreException + */ + private List> getCountsData(DataSource dataSource, BlackboardAttribute.Type keyType, ARTIFACT_TYPE... artifactTypes) + throws SleuthkitCaseProviderException, TskCoreException { + List artifacts = new ArrayList<>(); + SleuthkitCase skCase = provider.get(); + + // get all artifacts in one list for each artifact type + for (ARTIFACT_TYPE type : artifactTypes) { + artifacts.addAll(skCase.getBlackboard().getArtifacts(type.getTypeID(), dataSource.getId())); + } + + // group those based on the value of the attribute type that should serve as a key + Map countedKeys = artifacts.stream() + .map((art) -> { + String key = DataSourceInfoUtilities.getStringOrNull(art, keyType); + return (StringUtils.isBlank(key)) ? null : key; + }) + .filter((key) -> key != null) + .collect(Collectors.groupingBy(Function.identity(), Collectors.counting())); + + // sort from max to min counts + return countedKeys.entrySet().stream() + .map((e) -> Pair.of(e.getKey(), e.getValue())) + .sorted((a, b) -> -a.getValue().compareTo(b.getValue())) + .collect(Collectors.toList()); + } +} diff --git a/Core/src/org/sleuthkit/autopsy/datasourcesummary/ui/AnalysisPanel.form b/Core/src/org/sleuthkit/autopsy/datasourcesummary/ui/AnalysisPanel.form new file mode 100644 index 0000000000..4016b539a7 --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/datasourcesummary/ui/AnalysisPanel.form @@ -0,0 +1,263 @@ + + +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
diff --git a/Core/src/org/sleuthkit/autopsy/datasourcesummary/ui/AnalysisPanel.java b/Core/src/org/sleuthkit/autopsy/datasourcesummary/ui/AnalysisPanel.java new file mode 100644 index 0000000000..16de061385 --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/datasourcesummary/ui/AnalysisPanel.java @@ -0,0 +1,209 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2020 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.datasourcesummary.ui; + +import java.util.Arrays; +import java.util.List; +import java.util.stream.Collectors; +import org.apache.commons.lang3.tuple.Pair; +import org.openide.util.NbBundle.Messages; +import org.sleuthkit.autopsy.casemodule.Case; +import org.sleuthkit.autopsy.datasourcesummary.datamodel.DataSourceAnalysisSummary; +import org.sleuthkit.autopsy.datasourcesummary.uiutils.CellModelTableCellRenderer.DefaultCellModel; +import org.sleuthkit.autopsy.datasourcesummary.uiutils.DataFetchResult; +import org.sleuthkit.autopsy.datasourcesummary.uiutils.DataFetchWorker; +import org.sleuthkit.autopsy.datasourcesummary.uiutils.JTablePanel; +import org.sleuthkit.autopsy.datasourcesummary.uiutils.JTablePanel.ColumnModel; +import org.sleuthkit.datamodel.DataSource; + +/** + * A tab shown in data source summary displaying hash set hits, keyword hits, + * and interesting item hits within a datasource. + */ +@Messages({ + "AnalysisPanel_keyColumn_title=Name", + "AnalysisPanel_countColumn_title=Count" +}) +public class AnalysisPanel extends BaseDataSourceSummaryPanel { + + private static final long serialVersionUID = 1L; + + /** + * Default Column definitions for each table + */ + private static final List>> DEFAULT_COLUMNS = Arrays.asList( + new ColumnModel<>( + Bundle.AnalysisPanel_keyColumn_title(), + (pair) -> new DefaultCellModel(pair.getKey()), + 300 + ), + new ColumnModel<>( + Bundle.AnalysisPanel_countColumn_title(), + (pair) -> new DefaultCellModel(String.valueOf(pair.getValue())), + 100 + ) + ); + + private final JTablePanel> hashsetHitsTable = JTablePanel.getJTablePanel(DEFAULT_COLUMNS); + + private final JTablePanel> keywordHitsTable = JTablePanel.getJTablePanel(DEFAULT_COLUMNS); + + private final JTablePanel> interestingItemsTable = JTablePanel.getJTablePanel(DEFAULT_COLUMNS); + + private final List> tables = Arrays.asList( + hashsetHitsTable, + keywordHitsTable, + interestingItemsTable + ); + + /** + * All of the components necessary for data fetch swing workers to load data + * for each table. + */ + private final List> dataFetchComponents; + + /** + * Creates a new DataSourceUserActivityPanel. + */ + public AnalysisPanel() { + this(new DataSourceAnalysisSummary()); + } + + public AnalysisPanel(DataSourceAnalysisSummary analysisData) { + // set up data acquisition methods + dataFetchComponents = Arrays.asList( + // hashset hits loading components + new DataFetchWorker.DataFetchComponents<>( + (dataSource) -> analysisData.getHashsetCounts(dataSource), + (result) -> hashsetHitsTable.showDataFetchResult(result)), + // keyword hits loading components + new DataFetchWorker.DataFetchComponents<>( + (dataSource) -> analysisData.getKeywordCounts(dataSource), + (result) -> keywordHitsTable.showDataFetchResult(result)), + // interesting item hits loading components + new DataFetchWorker.DataFetchComponents<>( + (dataSource) -> analysisData.getInterestingItemCounts(dataSource), + (result) -> interestingItemsTable.showDataFetchResult(result)) + ); + + initComponents(); + } + + @Override + protected void onNewDataSource(DataSource dataSource) { + // if no data source is present or the case is not open, + // set results for tables to null. + if (dataSource == null || !Case.isCaseOpen()) { + this.dataFetchComponents.forEach((item) -> item.getResultHandler() + .accept(DataFetchResult.getSuccessResult(null))); + + } else { + // set tables to display loading screen + this.tables.forEach((table) -> table.showDefaultLoadingMessage()); + + // create swing workers to run for each table + List> workers = dataFetchComponents + .stream() + .map((components) -> new DataFetchWorker<>(components, dataSource)) + .collect(Collectors.toList()); + + // submit swing workers to run + submit(workers); + } + } + + /** + * This method is called from within the constructor to initialize the form. + * WARNING: Do NOT modify this code. The content of this method is always + * regenerated by the Form Editor. + */ + @SuppressWarnings("unchecked") + // //GEN-BEGIN:initComponents + private void initComponents() { + + javax.swing.JScrollPane mainScrollPane = new javax.swing.JScrollPane(); + javax.swing.JPanel mainContentPanel = new javax.swing.JPanel(); + javax.swing.JLabel hashsetHitsLabel = new javax.swing.JLabel(); + javax.swing.Box.Filler filler1 = new javax.swing.Box.Filler(new java.awt.Dimension(0, 2), new java.awt.Dimension(0, 2), new java.awt.Dimension(32767, 2)); + javax.swing.JPanel hashSetHitsPanel = hashsetHitsTable; + javax.swing.Box.Filler filler2 = new javax.swing.Box.Filler(new java.awt.Dimension(0, 20), new java.awt.Dimension(0, 20), new java.awt.Dimension(32767, 20)); + javax.swing.JLabel keywordHitsLabel = new javax.swing.JLabel(); + javax.swing.Box.Filler filler4 = new javax.swing.Box.Filler(new java.awt.Dimension(0, 2), new java.awt.Dimension(0, 2), new java.awt.Dimension(32767, 2)); + javax.swing.JPanel keywordHitsPanel = keywordHitsTable; + javax.swing.Box.Filler filler5 = new javax.swing.Box.Filler(new java.awt.Dimension(0, 20), new java.awt.Dimension(0, 20), new java.awt.Dimension(32767, 20)); + javax.swing.JLabel interestingItemLabel = new javax.swing.JLabel(); + javax.swing.Box.Filler filler6 = new javax.swing.Box.Filler(new java.awt.Dimension(0, 2), new java.awt.Dimension(0, 2), new java.awt.Dimension(32767, 2)); + javax.swing.JPanel interestingItemPanel = interestingItemsTable; + javax.swing.Box.Filler filler3 = new javax.swing.Box.Filler(new java.awt.Dimension(0, 0), new java.awt.Dimension(0, 0), new java.awt.Dimension(0, 32767)); + + mainContentPanel.setBorder(javax.swing.BorderFactory.createEmptyBorder(10, 10, 10, 10)); + mainContentPanel.setMaximumSize(new java.awt.Dimension(32767, 452)); + mainContentPanel.setMinimumSize(new java.awt.Dimension(200, 452)); + mainContentPanel.setLayout(new javax.swing.BoxLayout(mainContentPanel, javax.swing.BoxLayout.PAGE_AXIS)); + + org.openide.awt.Mnemonics.setLocalizedText(hashsetHitsLabel, org.openide.util.NbBundle.getMessage(AnalysisPanel.class, "AnalysisPanel.hashsetHitsLabel.text")); // NOI18N + mainContentPanel.add(hashsetHitsLabel); + mainContentPanel.add(filler1); + + hashSetHitsPanel.setAlignmentX(0.0F); + hashSetHitsPanel.setMaximumSize(new java.awt.Dimension(32767, 106)); + hashSetHitsPanel.setMinimumSize(new java.awt.Dimension(10, 106)); + hashSetHitsPanel.setPreferredSize(new java.awt.Dimension(10, 106)); + mainContentPanel.add(hashSetHitsPanel); + mainContentPanel.add(filler2); + + org.openide.awt.Mnemonics.setLocalizedText(keywordHitsLabel, org.openide.util.NbBundle.getMessage(AnalysisPanel.class, "AnalysisPanel.keywordHitsLabel.text")); // NOI18N + mainContentPanel.add(keywordHitsLabel); + mainContentPanel.add(filler4); + + keywordHitsPanel.setAlignmentX(0.0F); + keywordHitsPanel.setMaximumSize(new java.awt.Dimension(32767, 106)); + keywordHitsPanel.setMinimumSize(new java.awt.Dimension(10, 106)); + keywordHitsPanel.setPreferredSize(new java.awt.Dimension(10, 106)); + mainContentPanel.add(keywordHitsPanel); + mainContentPanel.add(filler5); + + org.openide.awt.Mnemonics.setLocalizedText(interestingItemLabel, org.openide.util.NbBundle.getMessage(AnalysisPanel.class, "AnalysisPanel.interestingItemLabel.text")); // NOI18N + mainContentPanel.add(interestingItemLabel); + mainContentPanel.add(filler6); + + interestingItemPanel.setAlignmentX(0.0F); + interestingItemPanel.setMaximumSize(new java.awt.Dimension(32767, 106)); + interestingItemPanel.setMinimumSize(new java.awt.Dimension(10, 106)); + interestingItemPanel.setPreferredSize(new java.awt.Dimension(10, 106)); + mainContentPanel.add(interestingItemPanel); + mainContentPanel.add(filler3); + + mainScrollPane.setViewportView(mainContentPanel); + + javax.swing.GroupLayout layout = new javax.swing.GroupLayout(this); + this.setLayout(layout); + layout.setHorizontalGroup( + layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) + .addComponent(mainScrollPane, javax.swing.GroupLayout.DEFAULT_SIZE, 756, Short.MAX_VALUE) + ); + layout.setVerticalGroup( + layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) + .addComponent(mainScrollPane, javax.swing.GroupLayout.DEFAULT_SIZE, 300, Short.MAX_VALUE) + ); + }// //GEN-END:initComponents + + // Variables declaration - do not modify//GEN-BEGIN:variables + // End of variables declaration//GEN-END:variables +} diff --git a/Core/src/org/sleuthkit/autopsy/datasourcesummary/ui/Bundle.properties b/Core/src/org/sleuthkit/autopsy/datasourcesummary/ui/Bundle.properties index 3d4d3af78c..b88a008e10 100644 --- a/Core/src/org/sleuthkit/autopsy/datasourcesummary/ui/Bundle.properties +++ b/Core/src/org/sleuthkit/autopsy/datasourcesummary/ui/Bundle.properties @@ -35,6 +35,9 @@ DataSourceSummaryUserActivityPanel.recentAccountsLabel.text=Recent Accounts DataSourceSummaryUserActivityPanel.topWebSearchLabel.text=Recent Web Searches DataSourceSummaryUserActivityPanel.topDevicesAttachedLabel.text=Recent Devices Attached DataSourceSummaryUserActivityPanel.recentDomainsLabel.text=Recent Domains +AnalysisPanel.hashsetHitsLabel.text=Hashset Hits +AnalysisPanel.keywordHitsLabel.text=Keyword Hits +AnalysisPanel.interestingItemLabel.text=Interesting Item Hits RecentFilesPanel.openDocsLabel.text=Recently Opened Documents RecentFilesPanel.downloadLabel.text=Recent Downloads RecentFilesPanel.attachmentLabel.text=Recent Attachements diff --git a/Core/src/org/sleuthkit/autopsy/datasourcesummary/ui/Bundle.properties-MERGED b/Core/src/org/sleuthkit/autopsy/datasourcesummary/ui/Bundle.properties-MERGED index bd71f58f9c..f3325f2e46 100644 --- a/Core/src/org/sleuthkit/autopsy/datasourcesummary/ui/Bundle.properties-MERGED +++ b/Core/src/org/sleuthkit/autopsy/datasourcesummary/ui/Bundle.properties-MERGED @@ -1,3 +1,5 @@ +AnalysisPanel_countColumn_title=Count +AnalysisPanel_keyColumn_title=Name CTL_DataSourceSummaryAction=Data Source Summary DataSourceSummaryCountsPanel.ArtifactCountsTableModel.count.header=Count DataSourceSummaryCountsPanel.ArtifactCountsTableModel.type.header=Result Type @@ -64,6 +66,7 @@ DataSourceSummaryNode.column.status.header=Ingest Status DataSourceSummaryNode.column.tags.header=Tags DataSourceSummaryNode.column.type.header=Type DataSourceSummaryNode.viewDataSourceAction.text=Go to Data Source +DataSourceSummaryTabbedPane_analysisTab_title=Analysis DataSourceSummaryTabbedPane_countsTab_title=Counts DataSourceSummaryTabbedPane_detailsTab_title=Container DataSourceSummaryTabbedPane_ingestHistoryTab_title=Ingest History @@ -74,6 +77,9 @@ DataSourceSummaryUserActivityPanel.recentAccountsLabel.text=Recent Accounts DataSourceSummaryUserActivityPanel.topWebSearchLabel.text=Recent Web Searches DataSourceSummaryUserActivityPanel.topDevicesAttachedLabel.text=Recent Devices Attached DataSourceSummaryUserActivityPanel.recentDomainsLabel.text=Recent Domains +AnalysisPanel.hashsetHitsLabel.text=Hashset Hits +AnalysisPanel.keywordHitsLabel.text=Keyword Hits +AnalysisPanel.interestingItemLabel.text=Interesting Item Hits DataSourceSummaryUserActivityPanel_noDataExists=No communication data exists DataSourceSummaryUserActivityPanel_tab_title=User Activity DataSourceSummaryUserActivityPanel_TopAccountTableModel_accountType_header=Account Type diff --git a/Core/src/org/sleuthkit/autopsy/datasourcesummary/ui/DataSourceSummaryTabbedPane.java b/Core/src/org/sleuthkit/autopsy/datasourcesummary/ui/DataSourceSummaryTabbedPane.java index 047af1cdd9..9696f4d4c4 100644 --- a/Core/src/org/sleuthkit/autopsy/datasourcesummary/ui/DataSourceSummaryTabbedPane.java +++ b/Core/src/org/sleuthkit/autopsy/datasourcesummary/ui/DataSourceSummaryTabbedPane.java @@ -37,7 +37,8 @@ import org.sleuthkit.datamodel.DataSource; "DataSourceSummaryTabbedPane_detailsTab_title=Container", "DataSourceSummaryTabbedPane_userActivityTab_title=User Activity", "DataSourceSummaryTabbedPane_ingestHistoryTab_title=Ingest History", - "DataSourceSummaryTabbedPane_recentFileTab_title=Recent Files" + "DataSourceSummaryTabbedPane_recentFileTab_title=Recent Files", + "DataSourceSummaryTabbedPane_analysisTab_title=Analysis" }) public class DataSourceSummaryTabbedPane extends JTabbedPane { @@ -47,7 +48,8 @@ public class DataSourceSummaryTabbedPane extends JTabbedPane { private final List> tabs = new ArrayList<>(Arrays.asList( Pair.of(Bundle.DataSourceSummaryTabbedPane_countsTab_title(), new DataSourceSummaryCountsPanel()), Pair.of(Bundle.DataSourceSummaryTabbedPane_userActivityTab_title(), new DataSourceSummaryUserActivityPanel()), - Pair.of(Bundle.DataSourceSummaryTabbedPane_recentFileTab_title(), new RecentFilesPanel()) + Pair.of(Bundle.DataSourceSummaryTabbedPane_recentFileTab_title(), new RecentFilesPanel()), + Pair.of(Bundle.DataSourceSummaryTabbedPane_analysisTab_title(), new AnalysisPanel()) )); private final IngestJobInfoPanel ingestHistoryPanel = new IngestJobInfoPanel(); diff --git a/Core/src/org/sleuthkit/autopsy/datasourcesummary/ui/DataSourceSummaryUserActivityPanel.java b/Core/src/org/sleuthkit/autopsy/datasourcesummary/ui/DataSourceSummaryUserActivityPanel.java index 309b70008e..b4011fe4d6 100644 --- a/Core/src/org/sleuthkit/autopsy/datasourcesummary/ui/DataSourceSummaryUserActivityPanel.java +++ b/Core/src/org/sleuthkit/autopsy/datasourcesummary/ui/DataSourceSummaryUserActivityPanel.java @@ -103,7 +103,8 @@ public class DataSourceSummaryUserActivityPanel extends BaseDataSourceSummaryPan return new DefaultCellModel( getShortFolderName( prog.getProgramPath(), - prog.getProgramName())); + prog.getProgramName())) + .setTooltip(prog.getProgramPath()); }, 150), // run count column diff --git a/Core/src/org/sleuthkit/autopsy/discovery/GroupListPanel.java b/Core/src/org/sleuthkit/autopsy/discovery/GroupListPanel.java index a71876e88d..3617ce3670 100644 --- a/Core/src/org/sleuthkit/autopsy/discovery/GroupListPanel.java +++ b/Core/src/org/sleuthkit/autopsy/discovery/GroupListPanel.java @@ -20,6 +20,8 @@ package org.sleuthkit.autopsy.discovery; import com.google.common.eventbus.Subscribe; import java.awt.Cursor; +import java.awt.Graphics2D; +import java.awt.font.FontRenderContext; import java.util.List; import java.util.Map; import javax.swing.DefaultListCellRenderer; @@ -204,11 +206,30 @@ final class GroupListPanel extends javax.swing.JPanel { if (newValue instanceof GroupKey) { String valueString = newValue.toString(); setToolTipText(valueString); - //if paths would be longer than 37 characters shorten them to be 37 characters - if (groupingAttribute instanceof FileSearch.ParentPathAttribute && valueString.length() > 37) { - valueString = valueString.substring(0, 16) + " ... " + valueString.substring(valueString.length() - 16); + + valueString += " (" + groupMap.get(newValue) + ")"; + + if (groupingAttribute instanceof FileSearch.ParentPathAttribute) { + // Using the list FontRenderContext instead of this because + // the label RenderContext was sometimes null, but this should work. + FontRenderContext context = ((Graphics2D) list.getGraphics()).getFontRenderContext(); + + //Determine the width of the string with the given font. + double stringWidth = getFont().getStringBounds(valueString, context).getWidth(); + // subtracting 10 from the width as a littl inset. + int listWidth = list.getWidth() - 10; + + if (stringWidth > listWidth) { + double avgCharWidth = Math.floor(stringWidth / valueString.length()); + + // The extra 5 is to account for the " ... " that is being added back. + int charToRemove = (int) Math.ceil((stringWidth - listWidth) / avgCharWidth) + 5; + int charactersToShow = (int) Math.ceil((valueString.length() - charToRemove) / 2); + valueString = valueString.substring(0, charactersToShow) + " ... " + valueString.substring(valueString.length() - charactersToShow); + } + } - newValue = valueString + " (" + groupMap.get(newValue) + ")"; + newValue = valueString; } super.getListCellRendererComponent(list, newValue, index, isSelected, cellHasFocus); return this; diff --git a/Core/src/org/sleuthkit/autopsy/ingest/IngestJobInputStream.java b/Core/src/org/sleuthkit/autopsy/ingest/IngestJobInputStream.java index 4e8e9c4019..a2687d5c1d 100644 --- a/Core/src/org/sleuthkit/autopsy/ingest/IngestJobInputStream.java +++ b/Core/src/org/sleuthkit/autopsy/ingest/IngestJobInputStream.java @@ -58,6 +58,11 @@ class IngestJobInputStream implements IngestStream { } ingestJob.addStreamingIngestFiles(fileObjectIds); } + + @Override + public IngestJob getIngestJob() { + return ingestJob; + } @Override public synchronized void close() { diff --git a/Core/src/org/sleuthkit/autopsy/ingest/IngestStream.java b/Core/src/org/sleuthkit/autopsy/ingest/IngestStream.java index 62a42af208..77001531be 100644 --- a/Core/src/org/sleuthkit/autopsy/ingest/IngestStream.java +++ b/Core/src/org/sleuthkit/autopsy/ingest/IngestStream.java @@ -34,6 +34,13 @@ public interface IngestStream { * @throws IngestStreamClosedException */ void addFiles(List fileObjectIds) throws IngestStreamClosedException; + + /** + * Get the ingest job associated with this ingest stream. + * + * @return The IngestJob. + */ + IngestJob getIngestJob(); /** * Closes the ingest stream. Should be called after all files from data diff --git a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestJobLogger.java b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestJobLogger.java index 708ce7ee14..5bc3f46fac 100644 --- a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestJobLogger.java +++ b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestJobLogger.java @@ -275,6 +275,21 @@ final class AutoIngestJobLogger { void logIngestJobSettingsErrors() throws AutoIngestJobLoggerException, InterruptedException { log(MessageCategory.ERROR, "Failed to analyze data source due to settings errors"); } + + /** + * Logs failure to analyze a data source, possibly due to ingest job settings errors. + * Used with streaming ingest since incorrect settings are the most likely cause + * of the error. + * + * @throws AutoIngestJobLoggerException if there is an error writing the log + * message. + * @throws InterruptedException if interrupted while blocked waiting + * to acquire an exclusive lock on the + * log file. + */ + void logProbableIngestJobSettingsErrors() throws AutoIngestJobLoggerException, InterruptedException { + log(MessageCategory.ERROR, "Failed to analyze data source, probably due to ingest settings errors"); + } /** * Logs failure to analyze a data source due to ingest module startup diff --git a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestManager.java b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestManager.java index 48b86025af..7c817e7739 100644 --- a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestManager.java +++ b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestManager.java @@ -101,6 +101,7 @@ import org.sleuthkit.autopsy.ingest.IngestJobSettings; import org.sleuthkit.autopsy.ingest.IngestJobStartResult; import org.sleuthkit.autopsy.ingest.IngestManager; import org.sleuthkit.autopsy.ingest.IngestModuleError; +import org.sleuthkit.autopsy.ingest.IngestStream; import org.sleuthkit.autopsy.keywordsearch.KeywordSearchModuleException; import org.sleuthkit.autopsy.keywordsearch.Server; import org.sleuthkit.datamodel.Content; @@ -165,6 +166,7 @@ final class AutoIngestManager extends Observable implements PropertyChangeListen private AutoIngestJob currentJob; @GuardedBy("jobsLock") private List completedJobs; + private IngestStream currentIngestStream = null; private CoordinationService coordinationService; private JobProcessingTask jobProcessingTask; private Future jobProcessingTaskFuture; @@ -2443,6 +2445,7 @@ final class AutoIngestManager extends Observable implements PropertyChangeListen return; } + currentIngestStream = null; runDataSourceProcessor(caseForJob, dataSource); if (dataSource.getContent().isEmpty()) { currentJob.setProcessingStage(AutoIngestJob.Stage.COMPLETED, Date.from(Instant.now())); @@ -2558,7 +2561,29 @@ final class AutoIngestManager extends Observable implements PropertyChangeListen caseForJob.notifyAddingDataSource(taskId); jobLogger.logDataSourceProcessorSelected(selectedProcessor.getDataSourceType()); sysLogger.log(Level.INFO, "Identified data source type for {0} as {1}", new Object[]{manifestPath, selectedProcessor.getDataSourceType()}); - selectedProcessor.process(dataSource.getDeviceId(), dataSource.getPath(), progressMonitor, callBack); + if (selectedProcessor.supportsIngestStream()) { + IngestJobSettings ingestJobSettings = new IngestJobSettings(AutoIngestUserPreferences.getAutoModeIngestModuleContextString()); + if (! ingestJobSettings.getWarnings().isEmpty()) { + for (String warning : ingestJobSettings.getWarnings()) { + sysLogger.log(Level.SEVERE, "Ingest job settings error for {0}: {1}", new Object[]{manifestPath, warning}); + } + currentJob.setErrorsOccurred(true); + setErrorsOccurredFlagForCase(caseDirectoryPath); + jobLogger.logIngestJobSettingsErrors(); + throw new AutoIngestDataSourceProcessor.AutoIngestDataSourceProcessorException("Error(s) in ingest job settings for " + manifestPath); + } + currentIngestStream = selectedProcessor.processWithIngestStream(dataSource.getDeviceId(), dataSource.getPath(), ingestJobSettings, progressMonitor, callBack); + if (currentIngestStream == null) { + // Either there was a failure to add the data source object to the database or the ingest settings were bad. + // An error in the ingest settings is the more likely scenario. + currentJob.setErrorsOccurred(true); + setErrorsOccurredFlagForCase(caseDirectoryPath); + jobLogger.logProbableIngestJobSettingsErrors(); + throw new AutoIngestDataSourceProcessor.AutoIngestDataSourceProcessorException("Error initializing processing for " + manifestPath + ", probably due to an ingest settings error"); + } + } else { + selectedProcessor.process(dataSource.getDeviceId(), dataSource.getPath(), progressMonitor, callBack); + } ingestLock.wait(); // at this point we got the content object(s) from the current DSP. @@ -2568,6 +2593,12 @@ final class AutoIngestManager extends Observable implements PropertyChangeListen // move onto the the next DSP that can process this data source jobLogger.logDataSourceProcessorError(selectedProcessor.getDataSourceType()); logDataSourceProcessorResult(dataSource); + + // If we had created an ingest stream, close it + if (currentIngestStream != null) { + currentIngestStream.stop(); + currentIngestStream = null; + } continue; } @@ -2674,69 +2705,77 @@ final class AutoIngestManager extends Observable implements PropertyChangeListen IngestManager.getInstance().addIngestJobEventListener(INGEST_JOB_EVENTS_OF_INTEREST, ingestJobEventListener); try { synchronized (ingestLock) { - IngestJobSettings ingestJobSettings = new IngestJobSettings(AutoIngestUserPreferences.getAutoModeIngestModuleContextString()); - List settingsWarnings = ingestJobSettings.getWarnings(); - if (settingsWarnings.isEmpty()) { - IngestJobStartResult ingestJobStartResult = IngestManager.getInstance().beginIngestJob(dataSource.getContent(), ingestJobSettings); - IngestJob ingestJob = ingestJobStartResult.getJob(); - if (null != ingestJob) { - currentJob.setIngestJob(ingestJob); - /* - * Block until notified by the ingest job event - * listener or until interrupted because auto ingest - * is shutting down. - */ - ingestLock.wait(); - sysLogger.log(Level.INFO, "Finished ingest modules analysis for {0} ", manifestPath); - IngestJob.ProgressSnapshot jobSnapshot = ingestJob.getSnapshot(); - for (IngestJob.ProgressSnapshot.DataSourceProcessingSnapshot snapshot : jobSnapshot.getDataSourceSnapshots()) { - AutoIngestJobLogger nestedJobLogger = new AutoIngestJobLogger(manifestPath, snapshot.getDataSource(), caseDirectoryPath); - if (!snapshot.isCancelled()) { - List cancelledModules = snapshot.getCancelledDataSourceIngestModules(); - if (!cancelledModules.isEmpty()) { - sysLogger.log(Level.WARNING, String.format("Ingest module(s) cancelled for %s", manifestPath)); - currentJob.setErrorsOccurred(true); - setErrorsOccurredFlagForCase(caseDirectoryPath); - for (String module : snapshot.getCancelledDataSourceIngestModules()) { - sysLogger.log(Level.WARNING, String.format("%s ingest module cancelled for %s", module, manifestPath)); - nestedJobLogger.logIngestModuleCancelled(module); - } - } - nestedJobLogger.logAnalysisCompleted(); - } else { - currentJob.setProcessingStage(AutoIngestJob.Stage.CANCELLING, Date.from(Instant.now())); + IngestJob ingestJob; + IngestJobStartResult ingestJobStartResult = null; + if (currentIngestStream == null) { + IngestJobSettings ingestJobSettings = new IngestJobSettings(AutoIngestUserPreferences.getAutoModeIngestModuleContextString()); + List settingsWarnings = ingestJobSettings.getWarnings(); + if (! settingsWarnings.isEmpty()) { + for (String warning : settingsWarnings) { + sysLogger.log(Level.SEVERE, "Ingest job settings error for {0}: {1}", new Object[]{manifestPath, warning}); + } + currentJob.setErrorsOccurred(true); + setErrorsOccurredFlagForCase(caseDirectoryPath); + jobLogger.logIngestJobSettingsErrors(); + throw new AnalysisStartupException("Error(s) in ingest job settings"); + } + + + ingestJobStartResult = IngestManager.getInstance().beginIngestJob(dataSource.getContent(), ingestJobSettings); + ingestJob = ingestJobStartResult.getJob(); + } else { + ingestJob = currentIngestStream.getIngestJob(); + } + + if (null != ingestJob) { + currentJob.setIngestJob(ingestJob); + /* + * Block until notified by the ingest job event + * listener or until interrupted because auto ingest + * is shutting down. + */ + ingestLock.wait(); + sysLogger.log(Level.INFO, "Finished ingest modules analysis for {0} ", manifestPath); + IngestJob.ProgressSnapshot jobSnapshot = ingestJob.getSnapshot(); + for (IngestJob.ProgressSnapshot.DataSourceProcessingSnapshot snapshot : jobSnapshot.getDataSourceSnapshots()) { + AutoIngestJobLogger nestedJobLogger = new AutoIngestJobLogger(manifestPath, snapshot.getDataSource(), caseDirectoryPath); + if (!snapshot.isCancelled()) { + List cancelledModules = snapshot.getCancelledDataSourceIngestModules(); + if (!cancelledModules.isEmpty()) { + sysLogger.log(Level.WARNING, String.format("Ingest module(s) cancelled for %s", manifestPath)); currentJob.setErrorsOccurred(true); setErrorsOccurredFlagForCase(caseDirectoryPath); - nestedJobLogger.logAnalysisCancelled(); - CancellationReason cancellationReason = snapshot.getCancellationReason(); - if (CancellationReason.NOT_CANCELLED != cancellationReason && CancellationReason.USER_CANCELLED != cancellationReason) { - throw new AnalysisStartupException(String.format("Analysis cancelled due to %s for %s", cancellationReason.getDisplayName(), manifestPath)); + for (String module : snapshot.getCancelledDataSourceIngestModules()) { + sysLogger.log(Level.WARNING, String.format("%s ingest module cancelled for %s", module, manifestPath)); + nestedJobLogger.logIngestModuleCancelled(module); } } + nestedJobLogger.logAnalysisCompleted(); + } else { + currentJob.setProcessingStage(AutoIngestJob.Stage.CANCELLING, Date.from(Instant.now())); + currentJob.setErrorsOccurred(true); + setErrorsOccurredFlagForCase(caseDirectoryPath); + nestedJobLogger.logAnalysisCancelled(); + CancellationReason cancellationReason = snapshot.getCancellationReason(); + if (CancellationReason.NOT_CANCELLED != cancellationReason && CancellationReason.USER_CANCELLED != cancellationReason) { + throw new AnalysisStartupException(String.format("Analysis cancelled due to %s for %s", cancellationReason.getDisplayName(), manifestPath)); + } } - } else if (!ingestJobStartResult.getModuleErrors().isEmpty()) { - for (IngestModuleError error : ingestJobStartResult.getModuleErrors()) { - sysLogger.log(Level.SEVERE, String.format("%s ingest module startup error for %s", error.getModuleDisplayName(), manifestPath), error.getThrowable()); - } - currentJob.setErrorsOccurred(true); - setErrorsOccurredFlagForCase(caseDirectoryPath); - jobLogger.logIngestModuleStartupErrors(); - throw new AnalysisStartupException(String.format("Error(s) during ingest module startup for %s", manifestPath)); - } else { - sysLogger.log(Level.SEVERE, String.format("Ingest manager ingest job start error for %s", manifestPath), ingestJobStartResult.getStartupException()); - currentJob.setErrorsOccurred(true); - setErrorsOccurredFlagForCase(caseDirectoryPath); - jobLogger.logAnalysisStartupError(); - throw new AnalysisStartupException("Ingest manager error starting job", ingestJobStartResult.getStartupException()); } - } else { - for (String warning : settingsWarnings) { - sysLogger.log(Level.SEVERE, "Ingest job settings error for {0}: {1}", new Object[]{manifestPath, warning}); + } else if (ingestJobStartResult != null && !ingestJobStartResult.getModuleErrors().isEmpty()) { + for (IngestModuleError error : ingestJobStartResult.getModuleErrors()) { + sysLogger.log(Level.SEVERE, String.format("%s ingest module startup error for %s", error.getModuleDisplayName(), manifestPath), error.getThrowable()); } currentJob.setErrorsOccurred(true); setErrorsOccurredFlagForCase(caseDirectoryPath); - jobLogger.logIngestJobSettingsErrors(); - throw new AnalysisStartupException("Error(s) in ingest job settings"); + jobLogger.logIngestModuleStartupErrors(); + throw new AnalysisStartupException(String.format("Error(s) during ingest module startup for %s", manifestPath)); + } else if (ingestJobStartResult != null) { + sysLogger.log(Level.SEVERE, String.format("Ingest manager ingest job start error for %s", manifestPath), ingestJobStartResult.getStartupException()); + currentJob.setErrorsOccurred(true); + setErrorsOccurredFlagForCase(caseDirectoryPath); + jobLogger.logAnalysisStartupError(); + throw new AnalysisStartupException("Ingest manager error starting job", ingestJobStartResult.getStartupException()); } } } finally { diff --git a/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/datamodel/grouping/GroupManager.java b/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/datamodel/grouping/GroupManager.java index 774f6ea3e5..34cc943e34 100644 --- a/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/datamodel/grouping/GroupManager.java +++ b/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/datamodel/grouping/GroupManager.java @@ -365,7 +365,7 @@ public class GroupManager { } else { //group == null // It may be that this was the last unanalyzed file in the group, so test // whether the group is now fully analyzed. - return popuplateIfAnalyzed(groupKey, null); + return populateIfAnalyzed(groupKey, null); } } @@ -574,7 +574,7 @@ public class GroupManager { * 'populateIfAnalyzed' will still not return a group and therefore * this method will never mark the group as unseen. */ - group = popuplateIfAnalyzed(groupKey, null); + group = populateIfAnalyzed(groupKey, null); } else { //if there is aleady a group that was previously deemed fully analyzed, then add this newly analyzed file to it. group.addFile(fileID); @@ -680,7 +680,7 @@ public class GroupManager { } else if (groupKey.getValue().toString().equalsIgnoreCase(this.currentPathGroup.getValue().toString()) == false) { // mark the last path group as analyzed getDrawableDB().markGroupAnalyzed(currentPathGroup); - popuplateIfAnalyzed(currentPathGroup, null); + populateIfAnalyzed(currentPathGroup, null); currentPathGroup = groupKey; } @@ -698,7 +698,7 @@ public class GroupManager { try { if (currentPathGroup != null) { getDrawableDB().markGroupAnalyzed(currentPathGroup); - popuplateIfAnalyzed(currentPathGroup, null); + populateIfAnalyzed(currentPathGroup, null); currentPathGroup = null; } } catch (TskCoreException ex) { @@ -713,7 +713,7 @@ public class GroupManager { * * @returns null if Group is not ready to be viewed */ - synchronized private DrawableGroup popuplateIfAnalyzed(GroupKey groupKey, ReGroupTask task) { + synchronized private DrawableGroup populateIfAnalyzed(GroupKey groupKey, ReGroupTask task) { /* * If this method call is part of a ReGroupTask and that task is * cancelled, no-op. @@ -735,7 +735,7 @@ public class GroupManager { if (groupKey.getAttribute() != DrawableAttribute.PATH || getDrawableDB().isGroupAnalyzed(groupKey)) { Set fileIDs = getFileIDsInGroup(groupKey); - if (Objects.nonNull(fileIDs)) { + if (Objects.nonNull(fileIDs) && ! fileIDs.isEmpty()) { long examinerID = collaborativeModeProp.get() ? -1 : controller.getCaseDatabase().getCurrentExaminer().getId(); final boolean groupSeen = getDrawableDB().isGroupSeenByExaminer(groupKey, examinerID); @@ -866,7 +866,7 @@ public class GroupManager { p++; updateMessage(Bundle.ReGroupTask_displayTitle(groupBy.attrName.toString()) + valForDataSource.getValue()); updateProgress(p, valsByDataSource.size()); - popuplateIfAnalyzed(new GroupKey<>(groupBy, valForDataSource.getValue(), valForDataSource.getKey()), this); + populateIfAnalyzed(new GroupKey<>(groupBy, valForDataSource.getValue(), valForDataSource.getKey()), this); } Optional viewedGroup diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Bundle.properties b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Bundle.properties index 283bbc6dc5..4a10106355 100644 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Bundle.properties +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Bundle.properties @@ -49,10 +49,7 @@ ExtractRegistry.analyzeRegFiles.failedParsingResults={0}: Failed parsing registr ExtractRegistry.parentModuleName.noSpace=RecentActivity ExtractRegistry.programName=RegRipper ExtractRegistry.analyzeRegFiles.errMsg.errReadingRegFile={0}: Error reading registry file - {1} -ExtractRegistry.execRegRip.errMsg.failedAnalyzeRegFile={0}: Failed to analyze registry file -ExtractRegistry.execRegRip.errMsg.failedAnalyzeRegFile2={0}: Failed to analyze registry file -ExtractRegistry.execRegRip.errMsg.failedAnalyzeRegFile3={0}: Failed to analyze registry file -ExtractRegistry.execRegRip.errMsg.failedAnalyzeRegFile4={0}: Failed to analyze registry file +ExtractRegistry.execRegRip.errMsg.failedAnalyzeRegFile={0}: Failed to analyze registry file {1} Firefox.moduleName=FireFox Firefox.getHistory.errMsg.errFetchingFiles=Error fetching internet history files for Firefox. Firefox.getHistory.errMsg.noFilesFound=No FireFox history files found. diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Bundle.properties-MERGED b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Bundle.properties-MERGED index af0dbf306b..6a0dd03eef 100755 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Bundle.properties-MERGED +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Bundle.properties-MERGED @@ -5,10 +5,15 @@ ChromeCacheExtract_adding_artifacts_msg=Chrome Cache: Adding %d artifacts for an ChromeCacheExtract_adding_extracted_files_msg=Chrome Cache: Adding %d extracted files for analysis. ChromeCacheExtract_loading_files_msg=Chrome Cache: Loading files from %s. ChromeCacheExtractor.moduleName=ChromeCacheExtractor +# {0} - module name +# {1} - row number +# {2} - table length +# {3} - cache path ChromeCacheExtractor.progressMsg={0}: Extracting cache entry {1} of {2} entries from {3} DataSourceUsage_AndroidMedia=Android Media Card DataSourceUsage_DJU_Drone_DAT=DJI Internal SD Card DataSourceUsage_FlashDrive=Flash Drive +# {0} - OS name DataSourceUsageAnalyzer.customVolume.label=OS Drive ({0}) DataSourceUsageAnalyzer.parentModuleName=Recent Activity Extract.indexError.message=Failed to index artifact for keyword search. @@ -19,6 +24,8 @@ ExtractEdge_process_errMsg_errGettingWebCacheFiles=Error trying to retrieving Ed ExtractEdge_process_errMsg_spartanFail=Failure processing Microsoft Edge spartan.edb file ExtractEdge_process_errMsg_unableFindESEViewer=Unable to find ESEDatabaseViewer ExtractEdge_process_errMsg_webcacheFail=Failure processing Microsoft Edge WebCacheV01.dat file +# {0} - sub module name +ExtractIE_executePasco_errMsg_errorRunningPasco={0}: Error analyzing Internet Explorer web history ExtractOs.androidOs.label=Android ExtractOs.androidVolume.label=OS Drive (Android) ExtractOs.debianLinuxOs.label=Linux (Debian) @@ -50,6 +57,8 @@ ExtractOs.windowsVolume.label=OS Drive (Windows) ExtractOs.yellowDogLinuxOs.label=Linux (Yellow Dog) ExtractOs.yellowDogLinuxVolume.label=OS Drive (Linux Yellow Dog) ExtractOS_progressMessage=Checking for OS +# {0} - sub module name +ExtractPrefetch_errMsg_prefetchParsingFailed={0}: Error analyzing prefetch files ExtractPrefetch_module_name=Windows Prefetch Extractor ExtractRecycleBin_module_name=Recycle Bin ExtractSafari_Error_Getting_History=An error occurred while processing Safari history files. @@ -122,10 +131,7 @@ ExtractRegistry.analyzeRegFiles.failedParsingResults={0}: Failed parsing registr ExtractRegistry.parentModuleName.noSpace=RecentActivity ExtractRegistry.programName=RegRipper ExtractRegistry.analyzeRegFiles.errMsg.errReadingRegFile={0}: Error reading registry file - {1} -ExtractRegistry.execRegRip.errMsg.failedAnalyzeRegFile={0}: Failed to analyze registry file -ExtractRegistry.execRegRip.errMsg.failedAnalyzeRegFile2={0}: Failed to analyze registry file -ExtractRegistry.execRegRip.errMsg.failedAnalyzeRegFile3={0}: Failed to analyze registry file -ExtractRegistry.execRegRip.errMsg.failedAnalyzeRegFile4={0}: Failed to analyze registry file +ExtractRegistry.execRegRip.errMsg.failedAnalyzeRegFile={0}: Failed to analyze registry file {1} Firefox.moduleName=FireFox Firefox.getHistory.errMsg.errFetchingFiles=Error fetching internet history files for Firefox. Firefox.getHistory.errMsg.noFilesFound=No FireFox history files found. @@ -211,6 +217,7 @@ Recently_Used_Artifacts_Winrar=Recently opened according to WinRAR MRU Registry_System_Bam=Recently Executed according to Background Activity Moderator (BAM) RegRipperFullNotFound=Full version RegRipper executable not found. RegRipperNotFound=Autopsy RegRipper executable not found. +# {0} - file name SearchEngineURLQueryAnalyzer.init.exception.msg=Unable to find {0}. SearchEngineURLQueryAnalyzer.moduleName.text=Search Engine SearchEngineURLQueryAnalyzer.engineName.none=NONE diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Bundle_ja.properties b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Bundle_ja.properties index 5ee2ad12aa..c088d66978 100644 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Bundle_ja.properties +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Bundle_ja.properties @@ -40,10 +40,7 @@ ExtractPrefetch_module_name=Windows Prefetch Extractor ExtractRegistry.analyzeRegFiles.errMsg.errReadingRegFile={0}\:\u30ec\u30b8\u30b9\u30c8\u30ea\u30d5\u30a1\u30a4\u30eb - {1}\u3092\u8aad\u307f\u53d6\u308a\u4e2d\u306b\u30a8\u30e9\u30fc\u304c\u767a\u751f\u3057\u307e\u3057\u305f ExtractRegistry.analyzeRegFiles.errMsg.errWritingTemp={0}\:\u30ec\u30b8\u30b9\u30c8\u30ea\u30d5\u30a1\u30a4\u30eb{1}\u3092\u89e3\u6790\u4e2d\u306b\u30a8\u30e9\u30fc\u304c\u767a\u751f\u3057\u307e\u3057\u305f ExtractRegistry.analyzeRegFiles.failedParsingResults={0}\:\u30ec\u30b8\u30b9\u30c8\u30ea\u30d5\u30a1\u30a4\u30eb\u7d50\u679c\u306e\u30d1\u30fc\u30b9\u306b\u5931\u6557\u3057\u307e\u3057\u305f{1} -ExtractRegistry.execRegRip.errMsg.failedAnalyzeRegFile={0}\:\u30ec\u30b8\u30b9\u30c8\u30ea\u30d5\u30a1\u30a4\u30eb\u306e\u89e3\u6790\u306b\u5931\u6557\u3057\u307e\u3057\u305f -ExtractRegistry.execRegRip.errMsg.failedAnalyzeRegFile2={0}\:\u30ec\u30b8\u30b9\u30c8\u30ea\u30d5\u30a1\u30a4\u30eb\u306e\u89e3\u6790\u306b\u5931\u6557\u3057\u307e\u3057\u305f -ExtractRegistry.execRegRip.errMsg.failedAnalyzeRegFile3={0}\:\u30ec\u30b8\u30b9\u30c8\u30ea\u30d5\u30a1\u30a4\u30eb\u306e\u89e3\u6790\u306b\u5931\u6557\u3057\u307e\u3057\u305f -ExtractRegistry.execRegRip.errMsg.failedAnalyzeRegFile4={0}\:\u30ec\u30b8\u30b9\u30c8\u30ea\u30d5\u30a1\u30a4\u30eb\u306e\u89e3\u6790\u306b\u5931\u6557\u3057\u307e\u3057\u305f +ExtractRegistry.execRegRip.errMsg.failedAnalyzeRegFile={0}\:\u30ec\u30b8\u30b9\u30c8\u30ea\u30d5\u30a1\u30a4\u30eb\u306e\u89e3\u6790\u306b\u5931\u6557\u3057\u307e\u3057\u305f {1} ExtractRegistry.findRegFiles.errMsg.errReadingFile=\u30ec\u30b8\u30b9\u30c8\u30ea\u30d5\u30a1\u30a4\u30eb\uff1a{0}\u3092\u53d6\u5f97\u4e2d\u306b\u30a8\u30e9\u30fc\u304c\u767a\u751f\u3057\u307e\u3057\u305f ExtractRegistry.moduleName.text=\u30ec\u30b8\u30b9\u30c8\u30ea ExtractRegistry.parentModuleName.noSpace=\u6700\u8fd1\u306e\u30a2\u30af\u30c6\u30a3\u30d3\u30c6\u30a3 diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractEdge.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractEdge.java index bafb4eb27c..aeb2b27954 100755 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractEdge.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractEdge.java @@ -2,7 +2,7 @@ * * Autopsy Forensic Browser * - * Copyright 2019 Basis Technology Corp. + * Copyright 2019-2020 Basis Technology Corp. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -162,24 +162,24 @@ final class ExtractEdge extends Extract { final String esedumper = getPathForESEDumper(); if (esedumper == null) { - this.addErrorMessage(Bundle.ExtractEdge_process_errMsg_unableFindESEViewer()); LOG.log(Level.SEVERE, "Error finding ESEDatabaseViewer program"); //NON-NLS + this.addErrorMessage(Bundle.ExtractEdge_process_errMsg_unableFindESEViewer()); return; //If we cannot find the ESEDatabaseView we cannot proceed } try { this.processWebCacheDbFile(esedumper, webCacheFiles, progressBar); } catch (IOException | TskCoreException ex) { + LOG.log(Level.SEVERE, "Error processing 'WebCacheV01.dat' files for Microsoft Edge", ex); // NON-NLS this.addErrorMessage(Bundle.ExtractEdge_process_errMsg_webcacheFail()); - LOG.log(Level.SEVERE, "Error returned from processWebCacheDbFile", ex); // NON-NLS } progressBar.progress(Bundle.Progress_Message_Edge_Bookmarks()); try { this.processSpartanDbFile(esedumper, spartanFiles); } catch (IOException | TskCoreException ex) { + LOG.log(Level.SEVERE, "Error processing 'spartan.edb' files for Microsoft Edge", ex); // NON-NLS this.addErrorMessage(Bundle.ExtractEdge_process_errMsg_spartanFail()); - LOG.log(Level.SEVERE, "Error returned from processSpartanDbFile", ex); // NON-NLS } } @@ -584,7 +584,7 @@ final class ExtractEdge extends Extract { processBuilder.redirectOutput(outputFilePath.toFile()); processBuilder.redirectError(errFilePath.toFile()); - ExecUtil.execute(processBuilder, new DataSourceIngestModuleProcessTerminator(context)); + ExecUtil.execute(processBuilder, new DataSourceIngestModuleProcessTerminator(context, true)); } /** diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractIE.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractIE.java index 2b97aa44e7..d128c09dc7 100644 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractIE.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractIE.java @@ -2,7 +2,7 @@ * * Autopsy Forensic Browser * - * Copyright 2011-2019 Basis Technology Corp. + * Copyright 2012-2020 Basis Technology Corp. * * Copyright 2012 42six Solutions. * Contact: aebadirad 42six com @@ -46,7 +46,6 @@ import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; import org.sleuthkit.autopsy.casemodule.services.FileManager; import org.sleuthkit.autopsy.datamodel.ContentUtils; -import org.sleuthkit.autopsy.ingest.IngestServices; import org.sleuthkit.datamodel.BlackboardArtifact; import org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE; import org.sleuthkit.datamodel.BlackboardAttribute; @@ -56,7 +55,9 @@ import org.sleuthkit.autopsy.coreutils.PlatformUtil; import org.sleuthkit.autopsy.ingest.DataSourceIngestModuleProcessTerminator; import org.sleuthkit.autopsy.ingest.DataSourceIngestModuleProgress; import org.sleuthkit.autopsy.ingest.IngestJobContext; -import org.sleuthkit.datamodel.*; +import org.sleuthkit.datamodel.AbstractFile; +import org.sleuthkit.datamodel.ReadContentInputStream; +import org.sleuthkit.datamodel.TskCoreException; /** * Extracts activity from Internet Explorer browser, as well as recent documents @@ -65,7 +66,6 @@ import org.sleuthkit.datamodel.*; class ExtractIE extends Extract { private static final Logger logger = Logger.getLogger(ExtractIE.class.getName()); - private final IngestServices services = IngestServices.getInstance(); private final String moduleTempResultsDir; private String PASCO_LIB_PATH; private final String JAVA_PATH; @@ -387,6 +387,10 @@ class ExtractIE extends Extract { * * @return false on error */ + @Messages({ + "# {0} - sub module name", + "ExtractIE_executePasco_errMsg_errorRunningPasco={0}: Error analyzing Internet Explorer web history", + }) private boolean executePasco(String indexFilePath, String outputFileName) { boolean success = true; try { @@ -413,11 +417,12 @@ class ExtractIE extends Extract { * contains a lot of useful data and only the last entry is * corrupted. */ - ExecUtil.execute(processBuilder, new DataSourceIngestModuleProcessTerminator(context)); + ExecUtil.execute(processBuilder, new DataSourceIngestModuleProcessTerminator(context, true)); // @@@ Investigate use of history versus cache as type. } catch (IOException ex) { + logger.log(Level.SEVERE, "Error executing Pasco to process Internet Explorer web history", ex); //NON-NLS + addErrorMessage(Bundle.ExtractIE_executePasco_errMsg_errorRunningPasco(getName())); success = false; - logger.log(Level.SEVERE, "Unable to execute Pasco to process Internet Explorer web history.", ex); //NON-NLS } return success; } diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractPrefetch.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractPrefetch.java index 6b20c99e8a..4771b76223 100644 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractPrefetch.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractPrefetch.java @@ -78,7 +78,9 @@ final class ExtractPrefetch extends Extract { private static final String PREFETCH_DIR_NAME = "prefetch"; //NON-NLS @Messages({ - "ExtractPrefetch_module_name=Windows Prefetch Extractor" + "ExtractPrefetch_module_name=Windows Prefetch Extractor", + "# {0} - sub module name", + "ExtractPrefetch_errMsg_prefetchParsingFailed={0}: Error analyzing prefetch files" }) ExtractPrefetch() { this.moduleName = Bundle.ExtractPrefetch_module_name(); @@ -96,7 +98,6 @@ final class ExtractPrefetch extends Extract { if (!dirMade) { logger.log(Level.SEVERE, "Error creating directory to store prefetch output database"); //NON-NLS return; //If we cannot create the directory then we need to exit - } } @@ -118,7 +119,8 @@ final class ExtractPrefetch extends Extract { parsePrefetchFiles(prefetchDumper, tempDirPath, modOutFile, modOutPath); createAppExecArtifacts(modOutFile, dataSource); } catch (IOException ex) { - logger.log(Level.WARNING, "Error runing parse_prefetch or creating artifacts.", ex); //NON-NLS + logger.log(Level.SEVERE, "Error parsing prefetch files", ex); //NON-NLS + addErrorMessage(Bundle.ExtractPrefetch_errMsg_prefetchParsingFailed(Bundle.ExtractPrefetch_module_name())); } } @@ -127,7 +129,6 @@ final class ExtractPrefetch extends Extract { * that the prefetch files only come from the /Windows/Prefetch directory * * @param dataSource - datasource to search for prefetch files - * */ void extractPrefetchFiles(Content dataSource) { List pFiles; @@ -184,7 +185,7 @@ final class ExtractPrefetch extends Extract { processBuilder.redirectOutput(outputFilePath.toFile()); processBuilder.redirectError(errFilePath.toFile()); - ExecUtil.execute(processBuilder, new DataSourceIngestModuleProcessTerminator(context)); + ExecUtil.execute(processBuilder, new DataSourceIngestModuleProcessTerminator(context, true)); } /** diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractRegistry.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractRegistry.java index 96175408b5..7ff07455cc 100644 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractRegistry.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractRegistry.java @@ -443,8 +443,8 @@ class ExtractRegistry extends Extract { try { scanErrorLogs(errFilePath); } catch (IOException ex) { - logger.log(Level.SEVERE, "Unable to run RegRipper", ex); //NON-NLS - this.addErrorMessage(NbBundle.getMessage(this.getClass(), "ExtractRegistry.execRegRip.errMsg.failedAnalyzeRegFile", this.getName())); + logger.log(Level.SEVERE, String.format("Unable to run RegRipper on %s", regFilePath), ex); //NON-NLS + this.addErrorMessage(NbBundle.getMessage(this.getClass(), "ExtractRegistry.execRegRip.errMsg.failedAnalyzeRegFile", this.getName(), regFilePath)); } } return regOutputFiles; @@ -480,10 +480,10 @@ class ExtractRegistry extends Extract { processBuilder.directory(regRipperHomeDir.toFile()); // RegRipper 2.8 has to be run from its own directory processBuilder.redirectOutput(new File(outputFile)); processBuilder.redirectError(new File(errFile)); - ExecUtil.execute(processBuilder, new DataSourceIngestModuleProcessTerminator(context)); + ExecUtil.execute(processBuilder, new DataSourceIngestModuleProcessTerminator(context, true)); } catch (IOException ex) { - logger.log(Level.SEVERE, "Unable to run RegRipper", ex); //NON-NLS - this.addErrorMessage(NbBundle.getMessage(this.getClass(), "ExtractRegistry.execRegRip.errMsg.failedAnalyzeRegFile", this.getName())); + logger.log(Level.SEVERE, String.format("Error running RegRipper on %s", hiveFilePath), ex); //NON-NLS + this.addErrorMessage(NbBundle.getMessage(this.getClass(), "ExtractRegistry.execRegRip.errMsg.failedAnalyzeRegFile", this.getName(), hiveFilePath)); } } diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractSru.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractSru.java index f4665416ab..fb85593953 100644 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractSru.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractSru.java @@ -138,8 +138,8 @@ final class ExtractSru extends Extract { createNetUsageArtifacts(modOutFile, sruAbstractFile); createAppUsageArtifacts(modOutFile, sruAbstractFile); } catch (IOException ex) { + logger.log(Level.SEVERE, "Error processing SRUDB.dat file", ex); //NON-NLS= this.addErrorMessage(Bundle.ExtractSru_process_error_executing_export_srudb_program()); - logger.log(Level.SEVERE, "SRUDB.dat file not found"); //NON-NLS } } @@ -256,7 +256,7 @@ final class ExtractSru extends Extract { processBuilder.redirectOutput(outputFilePath.toFile()); processBuilder.redirectError(errFilePath.toFile()); - ExecUtil.execute(processBuilder, new DataSourceIngestModuleProcessTerminator(context)); + ExecUtil.execute(processBuilder, new DataSourceIngestModuleProcessTerminator(context, true)); } private String getPathForSruDumper() { diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/SEUQAMappings.xml b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/SEUQAMappings.xml index bbc4d12f61..4cedc25d2b 100644 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/SEUQAMappings.xml +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/SEUQAMappings.xml @@ -12,7 +12,7 @@ Each splitToken contains a single mapping of a raw URL substring to its regex eq SearchEngine: engine: The engines basic name - domainSubstring: The domain of the URL such that it can uniquely be identified as given engine. + domainSubstring: The domain of the URL such that it can be identified as given engine. Should not have leading or trailing '.' splitToken: plainToken: The string in the URL that is immediately followed by the actual query. @@ -25,30 +25,30 @@ splitToken: --> - + - + - + - + - + - + - + @@ -59,28 +59,28 @@ splitToken: - + - + - + - + - + @@ -92,22 +92,22 @@ splitToken: - + - + - + - + - + @@ -116,28 +116,28 @@ splitToken: - + - + - + - + - + - + - + - + diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/SearchEngineURLQueryAnalyzer.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/SearchEngineURLQueryAnalyzer.java index eeb6e5a987..0dde950a05 100644 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/SearchEngineURLQueryAnalyzer.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/SearchEngineURLQueryAnalyzer.java @@ -22,10 +22,15 @@ import java.io.File; import java.io.IOException; import java.io.UnsupportedEncodingException; import java.net.URLDecoder; +import java.util.Arrays; import java.util.ArrayList; import java.util.Collection; +import java.util.HashSet; import java.util.List; import java.util.logging.Level; +import java.util.regex.Matcher; +import java.util.regex.Pattern; +import java.util.Set; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.ParserConfigurationException; @@ -54,7 +59,7 @@ import org.xml.sax.SAXException; * artifacts, and extracting search text from them. * * - * To add search engines, edit SearchEngines.xml under RecentActivity + * To add search engines, edit SEUQAMappings.xml under RecentActivity * */ @NbBundle.Messages({ @@ -107,11 +112,13 @@ class SearchEngineURLQueryAnalyzer extends Extract { private final String engineName; private final String domainSubstring; private final List keyPairs; + private final Pattern domainRegexPattern; private int count; SearchEngine(String engineName, String domainSubstring, List keyPairs) { this.engineName = engineName; this.domainSubstring = domainSubstring; + domainRegexPattern = Pattern.compile("^(.*[./])?" + domainSubstring + "([./].*)?$"); this.keyPairs = keyPairs; count = 0; } @@ -127,6 +134,10 @@ class SearchEngineURLQueryAnalyzer extends Extract { String getDomainSubstring() { return domainSubstring; } + + Pattern getDomainRegexPattern() { + return domainRegexPattern; + } int getTotal() { return count; @@ -202,20 +213,21 @@ class SearchEngineURLQueryAnalyzer extends Extract { * * @param domain domain as part of the URL * - * @return supported search engine the domain belongs to or null if no match - * is found + * @return supported search engine(s) the domain belongs to (list may be empty) * */ - private static SearchEngineURLQueryAnalyzer.SearchEngine getSearchEngineFromUrl(String domain) { + private static Collection getSearchEngineFromUrl(String domain) { + List supportedEngines = new ArrayList<>(); if (engines == null) { - return null; + return supportedEngines; } for (SearchEngine engine : engines) { - if (domain.contains(engine.getDomainSubstring())) { - return engine; + Matcher matcher = engine.getDomainRegexPattern().matcher(domain); + if (matcher.matches()) { + supportedEngines.add(engine); } } - return null; + return supportedEngines; } /** @@ -294,8 +306,9 @@ class SearchEngineURLQueryAnalyzer extends Extract { int totalQueries = 0; try { //from blackboard_artifacts - Collection listArtifacts = currentCase.getSleuthkitCase().getMatchingArtifacts("WHERE (blackboard_artifacts.artifact_type_id = '" + ARTIFACT_TYPE.TSK_WEB_BOOKMARK.getTypeID() //NON-NLS - + "' OR blackboard_artifacts.artifact_type_id = '" + ARTIFACT_TYPE.TSK_WEB_HISTORY.getTypeID() + "') "); //List of every 'web_history' and 'bookmark' artifact NON-NLS + Collection listArtifacts = currentCase.getSleuthkitCase().getBlackboard().getArtifacts( + Arrays.asList(new BlackboardArtifact.Type(ARTIFACT_TYPE.TSK_WEB_BOOKMARK), new BlackboardArtifact.Type(ARTIFACT_TYPE.TSK_WEB_HISTORY)), + Arrays.asList(dataSource.getId())); logger.log(Level.INFO, "Processing {0} blackboard artifacts.", listArtifacts.size()); //NON-NLS for (BlackboardArtifact artifact : listArtifacts) { @@ -304,51 +317,54 @@ class SearchEngineURLQueryAnalyzer extends Extract { } //initializing default attributes - String query = ""; String searchEngineDomain = ""; String browser = ""; long last_accessed = -1; - long fileId = artifact.getObjectID(); - boolean isFromSource = tskCase.isFileFromSource(dataSource, fileId); - if (!isFromSource) { - //File was from a different dataSource. Skipping. - continue; - } - - AbstractFile file = tskCase.getAbstractFileById(fileId); + AbstractFile file = tskCase.getAbstractFileById(artifact.getObjectID()); if (file == null) { continue; } - SearchEngineURLQueryAnalyzer.SearchEngine se = null; - //from blackboard_attributes - Collection listAttributes = currentCase.getSleuthkitCase().getMatchingAttributes("WHERE artifact_id = " + artifact.getArtifactID()); //NON-NLS - - for (BlackboardAttribute attribute : listAttributes) { - if (attribute.getAttributeType().getTypeID() == BlackboardAttribute.ATTRIBUTE_TYPE.TSK_URL.getTypeID()) { - final String urlString = attribute.getValueString(); - se = getSearchEngineFromUrl(urlString); - if (se == null) { - break; - } - - query = extractSearchEngineQuery(se, attribute.getValueString()); - if (query.equals("")) //False positive match, artifact was not a query. NON-NLS - { - break; - } - - } else if (attribute.getAttributeType().getTypeID() == BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PROG_NAME.getTypeID()) { - browser = attribute.getValueString(); - } else if (attribute.getAttributeType().getTypeID() == BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DOMAIN.getTypeID()) { - searchEngineDomain = attribute.getValueString(); - } else if (attribute.getAttributeType().getTypeID() == BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED.getTypeID()) { - last_accessed = attribute.getValueLong(); + // Try search engines on the URL to see if any produce a search string + Set searchQueries = new HashSet<>(); + BlackboardAttribute urlAttr = artifact.getAttribute(new BlackboardAttribute.Type(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_URL)); + if (urlAttr == null) { + continue; + } + + final String urlString = urlAttr.getValueString(); + Collection possibleSearchEngines = getSearchEngineFromUrl(urlString); + for (SearchEngineURLQueryAnalyzer.SearchEngine se : possibleSearchEngines) { + String query = extractSearchEngineQuery(se, urlString); + // If we have a non-empty query string, add it to the list + if ( !query.equals("")) { + searchQueries.add(query); + se.increment(); } } + + // If we didn't extract any search queries, go on to the next artifact + if (searchQueries.isEmpty()) { + continue; + } + + // Extract the rest of the fields needed for the web search artifact + BlackboardAttribute browserAttr = artifact.getAttribute(new BlackboardAttribute.Type(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PROG_NAME)); + if (browserAttr != null) { + browser = browserAttr.getValueString(); + } + BlackboardAttribute domainAttr = artifact.getAttribute(new BlackboardAttribute.Type(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DOMAIN)); + if (domainAttr != null) { + searchEngineDomain = domainAttr.getValueString(); + } + BlackboardAttribute lastAccessAttr = artifact.getAttribute(new BlackboardAttribute.Type(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED)); + if (lastAccessAttr != null) { + last_accessed = lastAccessAttr.getValueLong(); + } - if (se != null && !query.equals("")) { //NON-NLS + // Make an artifact for each distinct query + for (String query : searchQueries) { // If date doesn't exist, change to 0 (instead of 1969) if (last_accessed == -1) { last_accessed = 0; @@ -367,7 +383,6 @@ class SearchEngineURLQueryAnalyzer extends Extract { NbBundle.getMessage(this.getClass(), "SearchEngineURLQueryAnalyzer.parentModuleName"), last_accessed)); postArtifact(createArtifactWithAttributes(ARTIFACT_TYPE.TSK_WEB_SEARCH_QUERY, file, bbattributes)); - se.increment(); ++totalQueries; } }