diff --git a/.gitignore b/.gitignore index 9949c8b6fc..43671307d1 100755 --- a/.gitignore +++ b/.gitignore @@ -3,6 +3,7 @@ /*/build/ */nbproject/private/* /nbproject/private/* +/apidiff_output/ /Core/release/ /Core/src/org/sleuthkit/autopsy/coreutils/Version.properties diff --git a/Core/build.xml b/Core/build.xml index e8966c2508..44b627fb9d 100644 --- a/Core/build.xml +++ b/Core/build.xml @@ -31,6 +31,17 @@ + + + + + + @@ -89,6 +100,11 @@ + + + + + diff --git a/Core/nbproject/project.properties b/Core/nbproject/project.properties index 7dc5c4b32a..fceb64d406 100644 --- a/Core/nbproject/project.properties +++ b/Core/nbproject/project.properties @@ -83,7 +83,6 @@ file.reference.jgraphx-4.1.0.jar=release\\modules\\ext\\jgraphx-4.1.0.jar file.reference.jline-0.9.94.jar=release\\modules\\ext\\jline-0.9.94.jar file.reference.jsoup-1.10.3.jar=release\\modules\\ext\\jsoup-1.10.3.jar file.reference.jsr305-3.0.2.jar=release\\modules\\ext\\jsr305-3.0.2.jar -file.reference.junit-3.8.1.jar=release\\modules\\ext\\junit-3.8.1.jar file.reference.jutf7-1.0.0.jar=release\\modules\\ext\\jutf7-1.0.0.jar file.reference.jxmapviewer2-2.4.jar=release\\modules\\ext\\jxmapviewer2-2.4.jar file.reference.jython-standalone-2.7.0.jar=release\\modules\\ext\\jython-standalone-2.7.0.jar diff --git a/Core/nbproject/project.xml b/Core/nbproject/project.xml index d59036a1f2..ab23992798 100644 --- a/Core/nbproject/project.xml +++ b/Core/nbproject/project.xml @@ -737,10 +737,6 @@ ext/jai_imageio-1.1.jar release\modules\ext\jai_imageio-1.1.jar - - ext/junit-3.8.1.jar - release\modules\ext\junit-3.8.1.jar - ext/curator-client-2.8.0.jar release\modules\ext\curator-client-2.8.0.jar diff --git a/Core/src/org/sleuthkit/autopsy/actions/Bundle.properties b/Core/src/org/sleuthkit/autopsy/actions/Bundle.properties index a2feedc54f..4ca72069dc 100644 --- a/Core/src/org/sleuthkit/autopsy/actions/Bundle.properties +++ b/Core/src/org/sleuthkit/autopsy/actions/Bundle.properties @@ -45,3 +45,4 @@ OpenPythonModulesFolderAction.actionName.text=Python Plugins OpenPythonModulesFolderAction.errorMsg.folderNotFound=Python plugins folder not found: {0} CTL_OpenPythonModulesFolderAction=Python Plugins GetTagNameAndCommentDialog.tagCombo.toolTipText=Select tag to use +CTL_ExitAction=Exit \ No newline at end of file diff --git a/Core/src/org/sleuthkit/autopsy/actions/Bundle.properties-MERGED b/Core/src/org/sleuthkit/autopsy/actions/Bundle.properties-MERGED index 507e079cad..5c9a0ea3ac 100755 --- a/Core/src/org/sleuthkit/autopsy/actions/Bundle.properties-MERGED +++ b/Core/src/org/sleuthkit/autopsy/actions/Bundle.properties-MERGED @@ -96,3 +96,4 @@ OpenPythonModulesFolderAction.actionName.text=Python Plugins OpenPythonModulesFolderAction.errorMsg.folderNotFound=Python plugins folder not found: {0} CTL_OpenPythonModulesFolderAction=Python Plugins GetTagNameAndCommentDialog.tagCombo.toolTipText=Select tag to use +CTL_ExitAction=Exit diff --git a/Core/src/org/sleuthkit/autopsy/actions/ExitAction.java b/Core/src/org/sleuthkit/autopsy/actions/ExitAction.java index 7240afca92..31162cc67b 100644 --- a/Core/src/org/sleuthkit/autopsy/actions/ExitAction.java +++ b/Core/src/org/sleuthkit/autopsy/actions/ExitAction.java @@ -40,7 +40,7 @@ import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil; * The action associated with the Case/Exit menu item. It closes the current * case, if any, and shuts down the application. */ -@ActionRegistration(displayName = "Exit", iconInMenu = true) +@ActionRegistration(displayName = "#CTL_ExitAction", iconInMenu = true) @ActionReference(path = "Menu/Case", position = 1000, separatorBefore = 999) @ActionID(id = "org.sleuthkit.autopsy.casemodule.ExitAction", category = "Case") final public class ExitAction implements ActionListener { diff --git a/Core/src/org/sleuthkit/autopsy/actions/ThreadDumpAction.java b/Core/src/org/sleuthkit/autopsy/actions/ThreadDumpAction.java index d74511996c..dad577e8cb 100755 --- a/Core/src/org/sleuthkit/autopsy/actions/ThreadDumpAction.java +++ b/Core/src/org/sleuthkit/autopsy/actions/ThreadDumpAction.java @@ -1,7 +1,7 @@ /* * Autopsy Forensic Browser * - * Copyright 2020 Basis Technology Corp. + * Copyright 2020-2021 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -24,18 +24,10 @@ import java.io.BufferedWriter; import java.io.File; import java.io.FileWriter; import java.io.IOException; -import java.lang.management.ManagementFactory; -import java.lang.management.ThreadInfo; -import java.lang.management.ThreadMXBean; import java.nio.file.Path; import java.nio.file.Paths; -import java.text.DateFormat; -import java.text.SimpleDateFormat; -import java.util.Arrays; -import java.util.Date; import java.util.concurrent.ExecutionException; import java.util.logging.Level; -import java.util.stream.Collectors; import javax.swing.SwingWorker; import org.openide.awt.ActionID; import org.openide.awt.ActionReference; @@ -46,6 +38,8 @@ import org.openide.util.actions.CallableSystemAction; import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.coreutils.PlatformUtil; +import org.sleuthkit.autopsy.coreutils.ThreadUtils; +import org.sleuthkit.autopsy.coreutils.TimeStampUtils; /** * Action class for the Thread Dump help menu item. If there is no case open the @@ -63,8 +57,6 @@ public final class ThreadDumpAction extends CallableSystemAction implements Acti private static final long serialVersionUID = 1L; private static final Logger logger = Logger.getLogger(ThreadDumpAction.class.getName()); - private static final DateFormat DATE_FORMAT = new SimpleDateFormat("MM-dd-yyyy-HH-mm-ss-SSSS"); - @Override public void performAction() { (new ThreadDumper()).run(); @@ -114,26 +106,13 @@ public final class ThreadDumpAction extends CallableSystemAction implements Acti * @throws IOException */ private File createThreadDump() throws IOException { + + // generate thread dump + String threadDump = ThreadUtils.generateThreadDump(); + File dumpFile = createFilePath().toFile(); try (BufferedWriter writer = new BufferedWriter(new FileWriter(dumpFile, true))) { - ThreadMXBean threadMXBean = ManagementFactory.getThreadMXBean(); - ThreadInfo[] threadInfos = threadMXBean.getThreadInfo(threadMXBean.getAllThreadIds(), 100); - for (ThreadInfo threadInfo : threadInfos) { - writer.write(threadInfo.toString()); - writer.write("\n"); - } - - long[] deadlockThreadIds = threadMXBean.findDeadlockedThreads(); - if (deadlockThreadIds != null) { - writer.write("-------------------List of Deadlocked Thread IDs ---------------------"); - String idsList = (Arrays - .stream(deadlockThreadIds) - .boxed() - .collect(Collectors.toList())) - .stream().map(n -> String.valueOf(n)) - .collect(Collectors.joining("-", "{", "}")); - writer.write(idsList); - } + writer.write(threadDump); } return dumpFile; @@ -145,7 +124,7 @@ public final class ThreadDumpAction extends CallableSystemAction implements Acti * @return Path for dump file. */ private Path createFilePath() { - String fileName = "ThreadDump_" + DATE_FORMAT.format(new Date()) + ".txt"; + String fileName = "ThreadDump_" + TimeStampUtils.createTimeStamp() + ".txt"; if (Case.isCaseOpen()) { return Paths.get(Case.getCurrentCase().getLogDirectoryPath(), fileName); } diff --git a/Core/src/org/sleuthkit/autopsy/apputils/Bundle.properties-MERGED b/Core/src/org/sleuthkit/autopsy/apputils/Bundle.properties-MERGED new file mode 100644 index 0000000000..de7c28c948 --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/apputils/Bundle.properties-MERGED @@ -0,0 +1,4 @@ +CTL_ResetWindowsAction=Reset Windows +ResetWindowAction.caseCloseFailure.text=Unable to close the current case, the software will restart and the windows locations will reset the next time the software is closed. +ResetWindowAction.caseSaveMetadata.text=Unable to save current case path, the software will restart and the windows locations will reset but the current case will not be opened upon restart. +ResetWindowAction.confirm.text=In order to perform the resetting of window locations the software will close and restart. If a case is currently open, it will be closed. If ingest or a search is currently running, it will be terminated. Are you sure you want to restart the software to reset all window locations? diff --git a/Core/src/org/sleuthkit/autopsy/apputils/ResetWindowsAction.java b/Core/src/org/sleuthkit/autopsy/apputils/ResetWindowsAction.java new file mode 100644 index 0000000000..25bacd3761 --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/apputils/ResetWindowsAction.java @@ -0,0 +1,141 @@ +/* + * Autopsy + * + * Copyright 2021 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.apputils; + +import java.io.File; +import java.io.IOException; +import java.nio.charset.Charset; +import java.util.logging.Level; +import javax.swing.SwingUtilities; +import org.apache.commons.io.FileUtils; +import org.openide.LifecycleManager; +import org.openide.awt.ActionID; +import org.openide.awt.ActionReference; +import org.openide.awt.ActionReferences; +import org.openide.awt.ActionRegistration; +import org.openide.util.HelpCtx; +import org.openide.util.NbBundle; +import org.openide.util.actions.CallableSystemAction; +import org.sleuthkit.autopsy.casemodule.Case; +import org.sleuthkit.autopsy.casemodule.CaseActionException; +import org.sleuthkit.autopsy.coreutils.Logger; +import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil; +import org.sleuthkit.autopsy.coreutils.PlatformUtil; + +/** + * Class to open the Discovery dialog. Allows the user to run searches and see + * results in the DiscoveryTopComponent. + */ +@ActionID(category = "Tools", id = "org.sleuthkit.autopsy.apputils.ResetWindowsAction") +@ActionReferences(value = { + @ActionReference(path = "Menu/Window", position = 205)}) +@ActionRegistration(displayName = "#CTL_ResetWindowsAction", lazy = false) +@NbBundle.Messages({"CTL_ResetWindowsAction=Reset Windows"}) +public final class ResetWindowsAction extends CallableSystemAction { + + private static final String DISPLAY_NAME = Bundle.CTL_ResetWindowsAction(); + private static final long serialVersionUID = 1L; + private final static Logger logger = Logger.getLogger(ResetWindowsAction.class.getName()); + private final static String WINDOWS2LOCAL = "Windows2Local"; + private final static String CASE_TO_REOPEN_FILE = "caseToOpen.txt"; + + @Override + public boolean isEnabled() { + return true; + } + + @NbBundle.Messages({"ResetWindowAction.confirm.text=In order to perform the resetting of window locations the software will close and restart. " + + "If a case is currently open, it will be closed. If ingest or a search is currently running, it will be terminated. " + + "Are you sure you want to restart the software to reset all window locations?", + "ResetWindowAction.caseCloseFailure.text=Unable to close the current case, " + + "the software will restart and the windows locations will reset the next time the software is closed.", + "ResetWindowAction.caseSaveMetadata.text=Unable to save current case path, " + + "the software will restart and the windows locations will reset but the current case will not be opened upon restart."}) + + @Override + public void performAction() { + SwingUtilities.invokeLater(() -> { + boolean response = MessageNotifyUtil.Message.confirm(Bundle.ResetWindowAction_confirm_text()); + if (response) { + //adding the shutdown hook, closing the current case, and marking for restart can be re-ordered if slightly different behavior is desired + Runtime.getRuntime().addShutdownHook(new Thread() { + @Override + public void run() { + try { + FileUtils.deleteDirectory(new File(PlatformUtil.getUserConfigDirectory() + File.separator + WINDOWS2LOCAL)); + } catch (IOException ex) { + //While we would like the user to be aware of this in the unlikely event that the directory can not be deleted + //Because our deletion is being attempted in a shutdown hook I don't know that we can pop up UI elements during the shutdown proces + logger.log(Level.SEVERE, "Unable to delete config directory, window locations will not be reset. To manually reset the windows please delete the following directory while the software is closed. " + PlatformUtil.getUserConfigDirectory() + File.separator + "Windows2Local", ex); + } + } + }); + try { + if (Case.isCaseOpen()) { + String caseMetadataFilePath = Case.getCurrentCase().getMetadata().getFilePath().toString(); + File caseToOpenFile = new File(ResetWindowsAction.getCaseToReopenFilePath()); + Charset encoding = null; //prevents writeStringToFile from having ambiguous arguments + FileUtils.writeStringToFile(caseToOpenFile, caseMetadataFilePath, encoding); + Case.closeCurrentCase(); + } + // The method markForRestart can not be undone once it is called. + LifecycleManager.getDefault().markForRestart(); + //we need to call exit last + LifecycleManager.getDefault().exit(); + } catch (CaseActionException ex) { + logger.log(Level.WARNING, Bundle.ResetWindowAction_caseCloseFailure_text(), ex); + MessageNotifyUtil.Message.show(Bundle.ResetWindowAction_caseCloseFailure_text(), MessageNotifyUtil.MessageType.ERROR); + } catch (IOException ex) { + logger.log(Level.WARNING, Bundle.ResetWindowAction_caseSaveMetadata_text(), ex); + MessageNotifyUtil.Message.show(Bundle.ResetWindowAction_caseSaveMetadata_text(), MessageNotifyUtil.MessageType.ERROR); + } + } + }); + } + + public static String getCaseToReopenFilePath(){ + return PlatformUtil.getUserConfigDirectory() + File.separator + CASE_TO_REOPEN_FILE; + } + + /** + * Set this action to be enabled/disabled + * + * @param value whether to enable this action or not + */ + @Override + + public void setEnabled(boolean value) { + super.setEnabled(value); + } + + @Override + public String getName() { + return DISPLAY_NAME; + } + + @Override + public HelpCtx getHelpCtx() { + return HelpCtx.DEFAULT_HELP; + } + + @Override + public boolean asynchronous() { + return false; // run on edt + } +} diff --git a/Core/src/org/sleuthkit/autopsy/casemodule/AddImageWizardAddingProgressPanel.java b/Core/src/org/sleuthkit/autopsy/casemodule/AddImageWizardAddingProgressPanel.java index 6106914e5b..f0753d70bd 100644 --- a/Core/src/org/sleuthkit/autopsy/casemodule/AddImageWizardAddingProgressPanel.java +++ b/Core/src/org/sleuthkit/autopsy/casemodule/AddImageWizardAddingProgressPanel.java @@ -290,7 +290,7 @@ class AddImageWizardAddingProgressPanel extends ShortcutWizardDescriptorPanel { * * * @param errorString the error string to be displayed - * @param critical true if this is a critical error + * @param critical true if this is a critical error */ void addErrors(String errorString, boolean critical) { getComponent().showErrors(errorString, critical); @@ -325,10 +325,10 @@ class AddImageWizardAddingProgressPanel extends ShortcutWizardDescriptorPanel { * Starts the Data source processing by kicking off the selected * DataSourceProcessor * - * @param dsp The data source processor providing configuration for how to - * process the specific data source type. + * @param dsp The data source processor providing configuration for + * how to process the specific data source type. * @param selectedHost The host to which this data source belongs or null - * for a default host. + * for a default host. */ void startDataSourceProcessing(DataSourceProcessor dsp, Host selectedHost) { if (dsProcessor == null) { //this can only be run once @@ -336,45 +336,44 @@ class AddImageWizardAddingProgressPanel extends ShortcutWizardDescriptorPanel { newContents.clear(); cleanupTask = null; dsProcessor = dsp; - - // Add a cleanup task to interrupt the background process if the - // wizard exits while the background process is running. - cleanupTask = addImageAction.new CleanupTask() { - @Override - void cleanup() throws Exception { - WindowManager.getDefault().getMainWindow().setCursor(Cursor.getPredefinedCursor(Cursor.WAIT_CURSOR)); - cancelDataSourceProcessing(dataSourceId); - cancelled = true; - WindowManager.getDefault().getMainWindow().setCursor(Cursor.getPredefinedCursor(Cursor.DEFAULT_CURSOR)); - } - }; - - cleanupTask.enable(); - new Thread(() -> { + // Add a cleanup task to interrupt the background process if the + // wizard exits while the background process is running. + cleanupTask = addImageAction.new CleanupTask() { + @Override + void cleanup() throws Exception { + WindowManager.getDefault().getMainWindow().setCursor(Cursor.getPredefinedCursor(Cursor.WAIT_CURSOR)); + cancelDataSourceProcessing(dataSourceId); + cancelled = true; + WindowManager.getDefault().getMainWindow().setCursor(Cursor.getPredefinedCursor(Cursor.DEFAULT_CURSOR)); + } + }; + + cleanupTask.enable(); + try { Case.getCurrentCaseThrows().notifyAddingDataSource(dataSourceId); } catch (NoCurrentCaseException ex) { Logger.getLogger(AddImageWizardAddingProgressVisual.class.getName()).log(Level.SEVERE, "Exception while getting open case.", ex); //NON-NLS } - }).start(); - DataSourceProcessorCallback cbObj = new DataSourceProcessorCallback() { - @Override - public void doneEDT(DataSourceProcessorCallback.DataSourceProcessorResult result, List errList, List contents) { - dataSourceProcessorDone(dataSourceId, result, errList, contents); + + DataSourceProcessorCallback cbObj = new DataSourceProcessorCallback() { + @Override + public void doneEDT(DataSourceProcessorCallback.DataSourceProcessorResult result, List errList, List contents) { + dataSourceProcessorDone(dataSourceId, result, errList, contents); + } + }; + + // Kick off the DSProcessor + if (dsProcessor.supportsIngestStream()) { + // Set readyToIngest to false to prevent the wizard from starting ingest a second time. + readyToIngest = false; + dsProcessor.runWithIngestStream(selectedHost, ingestJobSettings, getDSPProgressMonitorImpl(), cbObj); + } else { + dsProcessor.run(selectedHost, getDSPProgressMonitorImpl(), cbObj); } - }; - + }).start(); setStateStarted(); - - // Kick off the DSProcessor - if (dsProcessor.supportsIngestStream()) { - // Set readyToIngest to false to prevent the wizard from starting ingest a second time. - readyToIngest = false; - dsProcessor.runWithIngestStream(selectedHost, ingestJobSettings, getDSPProgressMonitorImpl(), cbObj); - } else { - dsProcessor.run(selectedHost, getDSPProgressMonitorImpl(), cbObj); - } } } diff --git a/Core/src/org/sleuthkit/autopsy/casemodule/AddImageWizardSelectHostVisual.java b/Core/src/org/sleuthkit/autopsy/casemodule/AddImageWizardSelectHostVisual.java index 9197b2fb42..07eaf84104 100644 --- a/Core/src/org/sleuthkit/autopsy/casemodule/AddImageWizardSelectHostVisual.java +++ b/Core/src/org/sleuthkit/autopsy/casemodule/AddImageWizardSelectHostVisual.java @@ -79,7 +79,7 @@ class AddImageWizardSelectHostVisual extends javax.swing.JPanel { @Override public int hashCode() { int hash = 7; - hash = 41 * hash + Objects.hashCode(this.host == null ? 0 : this.host.getId()); + hash = 41 * hash + Objects.hashCode(this.host == null ? 0 : this.host.getHostId()); return hash; } @@ -96,8 +96,8 @@ class AddImageWizardSelectHostVisual extends javax.swing.JPanel { } final HostListItem other = (HostListItem) obj; if (!Objects.equals( - this.host == null ? 0 : this.host.getId(), - other.host == null ? 0 : other.host.getId())) { + this.host == null ? 0 : this.host.getHostId(), + other.host == null ? 0 : other.host.getHostId())) { return false; } @@ -166,7 +166,7 @@ class AddImageWizardSelectHostVisual extends javax.swing.JPanel { if (specifyNewHostRadio.isSelected() && StringUtils.isNotEmpty(specifyNewHostTextField.getText())) { String newHostName = specifyNewHostTextField.getText(); try { - return Case.getCurrentCaseThrows().getSleuthkitCase().getHostManager().createHost(newHostName); + return Case.getCurrentCaseThrows().getSleuthkitCase().getHostManager().newHost(newHostName); } catch (NoCurrentCaseException | TskCoreException ex) { logger.log(Level.WARNING, String.format("Unable to create host '%s'.", newHostName), ex); return null; @@ -186,7 +186,7 @@ class AddImageWizardSelectHostVisual extends javax.swing.JPanel { */ private void loadHostData() { try { - Collection hosts = Case.getCurrentCaseThrows().getSleuthkitCase().getHostManager().getHosts(); + Collection hosts = Case.getCurrentCaseThrows().getSleuthkitCase().getHostManager().getAllHosts(); sanitizedHostSet = HostNameValidator.getSanitizedHostNames(hosts); Vector hostListItems = hosts.stream() diff --git a/Core/src/org/sleuthkit/autopsy/casemodule/Bundle.properties b/Core/src/org/sleuthkit/autopsy/casemodule/Bundle.properties index f3dfe72020..1a6186b2c2 100644 --- a/Core/src/org/sleuthkit/autopsy/casemodule/Bundle.properties +++ b/Core/src/org/sleuthkit/autopsy/casemodule/Bundle.properties @@ -4,7 +4,6 @@ CTL_CaseCloseAct=Close Case CTL_CaseNewAction=New Case CTL_CaseDetailsAction=Case Details CTL_CaseDeleteAction=Delete Case -Menu/Case/OpenRecentCase=Open Recent Case CTL_CaseDeleteAction=Delete Case OpenIDE-Module-Name=Case NewCaseVisualPanel1.caseNameLabel.text_1=Case Name: diff --git a/Core/src/org/sleuthkit/autopsy/casemodule/Bundle.properties-MERGED b/Core/src/org/sleuthkit/autopsy/casemodule/Bundle.properties-MERGED index f731913af5..528d3a5088 100755 --- a/Core/src/org/sleuthkit/autopsy/casemodule/Bundle.properties-MERGED +++ b/Core/src/org/sleuthkit/autopsy/casemodule/Bundle.properties-MERGED @@ -128,6 +128,7 @@ CTL_CaseCloseAct=Close Case CTL_CaseNewAction=New Case CTL_CaseDetailsAction=Case Details CTL_CaseDeleteAction=Delete Case +CTL_CaseDeleteAction=Delete Case CTL_CaseOpenAction=Open Case CTL_UnpackagePortableCaseAction=Unpack and Open Portable Case DeleteDataSourceAction.confirmationDialog.message=Are you sure you want to remove the selected data source from the case?\nNote that the case will be closed and re-opened during the removal. @@ -186,8 +187,6 @@ LogicalEvidenceFilePanel.pathValidation.getOpenCase.Error=Warning: Exception whi LogicalEvidenceFilePanel.validatePanel.nonL01Error.text=Only files with the .l01 file extension are supported here. LogicalFilesDspPanel.subTypeComboBox.l01FileOption.text=Logical evidence file (L01) LogicalFilesDspPanel.subTypeComboBox.localFilesOption.text=Local files and folders -Menu/Case/OpenRecentCase=Open Recent Case -CTL_CaseDeleteAction=Delete Case OpenIDE-Module-Name=Case NewCaseVisualPanel1.caseNameLabel.text_1=Case Name: NewCaseVisualPanel1.caseDirLabel.text=Base Directory: @@ -346,6 +345,12 @@ RecentCases.getName.text=Clear Recent Cases RecentItems.openRecentCase.msgDlg.text=Case {0} no longer exists. SelectDataSourceProcessorPanel.name.text=Select Data Source Type StartupWindow.title.text=Welcome +# {0} - autFilePath +StartupWindowProvider.openCase.cantOpen=Unable to open previously open case with metadata file: {0} +# {0} - reOpenFilePath +StartupWindowProvider.openCase.deleteOpenFailure=Unable to open or delete file containing path {0} to previously open case. The previous case will not be opened. +# {0} - autFilePath +StartupWindowProvider.openCase.noFile=Unable to open previously open case because metadata file not found at: {0} UnpackagePortableCaseDialog.title.text=Unpackage Portable Case UnpackagePortableCaseDialog.UnpackagePortableCaseDialog.extensions=Portable case package (.zip, .zip.001) UnpackagePortableCaseDialog.validatePaths.badExtension=File extension must be .zip or .zip.001 diff --git a/Core/src/org/sleuthkit/autopsy/casemodule/Case.java b/Core/src/org/sleuthkit/autopsy/casemodule/Case.java index ffd226d6d3..d2233a93cb 100644 --- a/Core/src/org/sleuthkit/autopsy/casemodule/Case.java +++ b/Core/src/org/sleuthkit/autopsy/casemodule/Case.java @@ -1,7 +1,7 @@ /* * Autopsy Forensic Browser * - * Copyright 2012-2020 Basis Technology Corp. + * Copyright 2012-2021 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -29,6 +29,7 @@ import java.awt.event.ActionListener; import java.beans.PropertyChangeListener; import java.beans.PropertyChangeSupport; import java.io.File; +import java.lang.reflect.InvocationTargetException; import java.nio.file.InvalidPathException; import java.nio.file.Path; import java.nio.file.Paths; @@ -62,6 +63,7 @@ import javax.annotation.concurrent.GuardedBy; import javax.annotation.concurrent.ThreadSafe; import javax.swing.JOptionPane; import javax.swing.SwingUtilities; +import org.apache.commons.lang3.StringUtils; import org.openide.util.Lookup; import org.openide.util.NbBundle; import org.openide.util.NbBundle.Messages; @@ -87,9 +89,10 @@ import org.sleuthkit.autopsy.casemodule.events.HostsChangedEvent; import org.sleuthkit.autopsy.casemodule.events.HostsRemovedEvent; import org.sleuthkit.autopsy.casemodule.events.OsAccountAddedEvent; import org.sleuthkit.autopsy.casemodule.events.OsAccountChangedEvent; +import org.sleuthkit.autopsy.casemodule.events.OsAccountDeletedEvent; import org.sleuthkit.autopsy.casemodule.events.PersonsAddedEvent; import org.sleuthkit.autopsy.casemodule.events.PersonsChangedEvent; -import org.sleuthkit.autopsy.casemodule.events.PersonsRemovedEvent; +import org.sleuthkit.autopsy.casemodule.events.PersonsDeletedEvent; import org.sleuthkit.autopsy.casemodule.events.ReportAddedEvent; import org.sleuthkit.autopsy.casemodule.multiusercases.CaseNodeData.CaseNodeDataException; import org.sleuthkit.autopsy.casemodule.multiusercases.CoordinationServiceUtils; @@ -114,6 +117,7 @@ import org.sleuthkit.autopsy.coreutils.ThreadUtils; import org.sleuthkit.autopsy.coreutils.TimeZoneUtils; import org.sleuthkit.autopsy.coreutils.Version; import org.sleuthkit.autopsy.datamodel.hosts.OpenHostsAction; +import org.sleuthkit.autopsy.directorytree.DirectoryTreeTopComponent; import org.sleuthkit.autopsy.events.AutopsyEvent; import org.sleuthkit.autopsy.events.AutopsyEventException; import org.sleuthkit.autopsy.events.AutopsyEventPublisher; @@ -124,6 +128,7 @@ import org.sleuthkit.autopsy.ingest.IngestServices; import org.sleuthkit.autopsy.ingest.ModuleDataEvent; import org.sleuthkit.autopsy.keywordsearchservice.KeywordSearchService; import org.sleuthkit.autopsy.keywordsearchservice.KeywordSearchServiceException; +import org.sleuthkit.autopsy.machinesettings.UserMachinePreferences; import org.sleuthkit.autopsy.progress.LoggingProgressIndicator; import org.sleuthkit.autopsy.progress.ModalDialogProgressIndicator; import org.sleuthkit.autopsy.progress.ProgressIndicator; @@ -138,24 +143,16 @@ import org.sleuthkit.datamodel.ContentTag; import org.sleuthkit.datamodel.DataSource; import org.sleuthkit.datamodel.FileSystem; import org.sleuthkit.datamodel.Host; -import org.sleuthkit.datamodel.HostManager.HostsCreationEvent; -import org.sleuthkit.datamodel.HostManager.HostsUpdateEvent; -import org.sleuthkit.datamodel.HostManager.HostsDeletionEvent; import org.sleuthkit.datamodel.Image; import org.sleuthkit.datamodel.OsAccount; -import org.sleuthkit.datamodel.OsAccountManager; -import org.sleuthkit.datamodel.OsAccountManager.OsAccountsCreationEvent; -import org.sleuthkit.datamodel.OsAccountManager.OsAccountsUpdateEvent; import org.sleuthkit.datamodel.Person; -import org.sleuthkit.datamodel.PersonManager.PersonsCreationEvent; -import org.sleuthkit.datamodel.PersonManager.PersonsUpdateEvent; -import org.sleuthkit.datamodel.PersonManager.PersonsDeletionEvent; import org.sleuthkit.datamodel.Report; import org.sleuthkit.datamodel.SleuthkitCase; import org.sleuthkit.datamodel.TimelineManager; import org.sleuthkit.datamodel.SleuthkitCaseAdminUtil; import org.sleuthkit.datamodel.TskCoreException; import org.sleuthkit.datamodel.TskDataException; +import org.sleuthkit.datamodel.TskEvent; import org.sleuthkit.datamodel.TskUnsupportedSchemaVersionException; /** @@ -163,9 +160,10 @@ import org.sleuthkit.datamodel.TskUnsupportedSchemaVersionException; */ public class Case { - private static final String CASE_TEMP_DIR = Case.class.getSimpleName(); private static final int CASE_LOCK_TIMEOUT_MINS = 1; private static final int CASE_RESOURCES_LOCK_TIMEOUT_HOURS = 1; + private static final String APP_NAME = UserPreferences.getAppName(); + private static final String TEMP_FOLDER = "Temp"; private static final String SINGLE_USER_CASE_DB_NAME = "autopsy.db"; private static final String EVENT_CHANNEL_NAME = "%s-Case-Events"; //NON-NLS private static final String CACHE_FOLDER = "Cache"; //NON-NLS @@ -438,41 +436,38 @@ public class Case { */ OS_ACCOUNT_ADDED, /** - * OSAccount associated with the current case has changed. - * Call getOsAccount to get the changed account; + * OSAccount associated with the current case has changed. Call + * getOsAccount to get the changed account; */ OS_ACCOUNT_CHANGED, - + /** + * OSAccount associated with the current case has been deleted. + */ + OS_ACCOUNT_REMOVED, /** * Hosts associated with the current case added. */ HOSTS_ADDED, - /** - * Hosts associated with the current case has changed. + * Hosts associated with the current case has changed. */ HOSTS_CHANGED, - /** - * Hosts associated with the current case has been deleted. + * Hosts associated with the current case has been deleted. */ HOSTS_DELETED, - /** * Persons associated with the current case added. */ PERSONS_ADDED, - /** - * Persons associated with the current case has changed. + * Persons associated with the current case has changed. */ PERSONS_CHANGED, - /** - * Persons associated with the current case has been deleted. + * Persons associated with the current case has been deleted. */ - PERSONS_DELETED - ; + PERSONS_DELETED; }; /** @@ -505,90 +500,97 @@ public class Case { event.getArtifacts(artifactType))); } } - - @Subscribe - public void publishOsAccountAddedEvent(OsAccountsCreationEvent event) { - for(OsAccount account: event.getOsAcounts()) { + + @Subscribe + public void publishOsAccountAddedEvent(TskEvent.OsAccountsAddedTskEvent event) { + for (OsAccount account : event.getOsAcounts()) { eventPublisher.publish(new OsAccountAddedEvent(account)); } } - - @Subscribe - public void publishOsAccountChangedEvent(OsAccountsUpdateEvent event) { - for(OsAccount account: event.getOsAcounts()) { + + @Subscribe + public void publishOsAccountChangedEvent(TskEvent.OsAccountsChangedTskEvent event) { + for (OsAccount account : event.getOsAcounts()) { eventPublisher.publish(new OsAccountChangedEvent(account)); } } - + + @Subscribe + public void publishOsAccountDeletedEvent(TskEvent.OsAccountsDeletedTskEvent event) { + for (Long accountId : event.getOsAcountObjectIds()) { + eventPublisher.publish(new OsAccountDeletedEvent(accountId)); + } + } + /** - * Publishes an autopsy event from the sleuthkit HostCreationEvent + * Publishes an autopsy event from the sleuthkit HostAddedEvent * indicating that hosts have been created. - * + * * @param event The sleuthkit event for the creation of hosts. */ - @Subscribe - public void publishHostsAddedEvent(HostsCreationEvent event) { + @Subscribe + public void publishHostsAddedEvent(TskEvent.HostsAddedTskEvent event) { eventPublisher.publish(new HostsAddedEvent( event == null ? Collections.emptyList() : event.getHosts())); } - + /** - * Publishes an autopsy event from the sleuthkit HostUpdateEvent + * Publishes an autopsy event from the sleuthkit HostUpdateEvent * indicating that hosts have been updated. - * + * * @param event The sleuthkit event for the updating of hosts. - */ - @Subscribe - public void publishHostsChangedEvent(HostsUpdateEvent event) { + */ + @Subscribe + public void publishHostsChangedEvent(TskEvent.HostsChangedTskEvent event) { eventPublisher.publish(new HostsChangedEvent( event == null ? Collections.emptyList() : event.getHosts())); } - + /** - * Publishes an autopsy event from the sleuthkit HostDeletedEvent + * Publishes an autopsy event from the sleuthkit HostDeletedEvent * indicating that hosts have been deleted. - * + * * @param event The sleuthkit event for the deleting of hosts. - */ - @Subscribe - public void publishHostsDeletedEvent(HostsDeletionEvent event) { + */ + @Subscribe + public void publishHostsDeletedEvent(TskEvent.HostsDeletedTskEvent event) { eventPublisher.publish(new HostsRemovedEvent( event == null ? Collections.emptyList() : event.getHosts())); } - + /** - * Publishes an autopsy event from the sleuthkit PersonCreationEvent + * Publishes an autopsy event from the sleuthkit PersonAddedEvent * indicating that persons have been created. - * + * * @param event The sleuthkit event for the creation of persons. */ - @Subscribe - public void publishPersonsAddedEvent(PersonsCreationEvent event) { + @Subscribe + public void publishPersonsAddedEvent(TskEvent.PersonsAddedTskEvent event) { eventPublisher.publish(new PersonsAddedEvent( event == null ? Collections.emptyList() : event.getPersons())); } - + /** - * Publishes an autopsy event from the sleuthkit PersonUpdateEvent + * Publishes an autopsy event from the sleuthkit PersonChangedEvent * indicating that persons have been updated. - * + * * @param event The sleuthkit event for the updating of persons. - */ - @Subscribe - public void publishPersonsChangedEvent(PersonsUpdateEvent event) { + */ + @Subscribe + public void publishPersonsChangedEvent(TskEvent.PersonsChangedTskEvent event) { eventPublisher.publish(new PersonsChangedEvent( event == null ? Collections.emptyList() : event.getPersons())); } - + /** - * Publishes an autopsy event from the sleuthkit PersonDeletedEvent + * Publishes an autopsy event from the sleuthkit PersonDeletedEvent * indicating that persons have been deleted. - * + * * @param event The sleuthkit event for the deleting of persons. - */ - @Subscribe - public void publishPersonsDeletedEvent(PersonsDeletionEvent event) { - eventPublisher.publish(new PersonsRemovedEvent( + */ + @Subscribe + public void publishPersonsDeletedEvent(TskEvent.PersonsDeletedTskEvent event) { + eventPublisher.publish(new PersonsDeletedEvent( event == null ? Collections.emptyList() : event.getPersons())); } } @@ -869,12 +871,12 @@ public class Case { eventPublisher.publishLocally(new AutopsyEvent(Events.CURRENT_CASE.toString(), closedCase, null)); logger.log(Level.INFO, "Closing current case {0} ({1}) in {2}", new Object[]{closedCase.getDisplayName(), closedCase.getName(), closedCase.getCaseDirectory()}); //NON-NLS closedCase.doCloseCaseAction(); - currentCase = null; logger.log(Level.INFO, "Closed current case {0} ({1}) in {2}", new Object[]{closedCase.getDisplayName(), closedCase.getName(), closedCase.getCaseDirectory()}); //NON-NLS } catch (CaseActionException ex) { logger.log(Level.SEVERE, String.format("Error closing current case %s (%s) in %s", closedCase.getDisplayName(), closedCase.getName(), closedCase.getCaseDirectory()), ex); //NON-NLS throw ex; } finally { + currentCase = null; if (RuntimeProperties.runningWithGUI()) { updateGUIForCaseClosed(); } @@ -1213,9 +1215,7 @@ public class Case { /** * Update the GUI to to reflect the current case. */ - private static void updateGUIForCaseOpened(Case newCurrentCase) { - if (RuntimeProperties.runningWithGUI()) { - SwingUtilities.invokeLater(() -> { + private static void updateGUIForCaseOpened(Case newCurrentCase) { /* * If the case database was upgraded for a new schema and a * backup database was created, notify the user. @@ -1241,17 +1241,31 @@ public class Case { String path = entry.getValue(); boolean fileExists = (new File(path).isFile() || DriveUtils.driveExists(path)); if (!fileExists) { - int response = JOptionPane.showConfirmDialog( - mainFrame, - NbBundle.getMessage(Case.class, "Case.checkImgExist.confDlg.doesntExist.msg", path), - NbBundle.getMessage(Case.class, "Case.checkImgExist.confDlg.doesntExist.title"), - JOptionPane.YES_NO_OPTION); - if (response == JOptionPane.YES_OPTION) { - MissingImageDialog.makeDialog(obj_id, caseDb); - } else { - logger.log(Level.SEVERE, "User proceeding with missing image files"); //NON-NLS + try { + // Using invokeAndWait means that the dialog will + // open on the EDT but this thread will wait for an + // answer. Using invokeLater would cause this loop to + // end before all of the dialogs appeared. + SwingUtilities.invokeAndWait(new Runnable() { + @Override + public void run() { + int response = JOptionPane.showConfirmDialog( + mainFrame, + NbBundle.getMessage(Case.class, "Case.checkImgExist.confDlg.doesntExist.msg", path), + NbBundle.getMessage(Case.class, "Case.checkImgExist.confDlg.doesntExist.title"), + JOptionPane.YES_NO_OPTION); + if (response == JOptionPane.YES_OPTION) { + MissingImageDialog.makeDialog(obj_id, caseDb); + } else { + logger.log(Level.SEVERE, "User proceeding with missing image files"); //NON-NLS - } + } + } + + }); + } catch (InterruptedException | InvocationTargetException ex) { + logger.log(Level.SEVERE, "Failed to show missing image confirmation dialog", ex); //NON-NLS + } } } @@ -1269,14 +1283,16 @@ public class Case { CallableSystemAction.get(CommonAttributeSearchAction.class).setEnabled(true); CallableSystemAction.get(OpenOutputFolderAction.class).setEnabled(false); CallableSystemAction.get(OpenDiscoveryAction.class).setEnabled(true); - - /* - * Add the case to the recent cases tracker that supplies a list - * of recent cases to the recent cases menu item and the - * open/create case dialog. - */ - RecentCases.getInstance().addRecentCase(newCurrentCase.getDisplayName(), newCurrentCase.getMetadata().getFilePath().toString()); - + + /* + * Add the case to the recent cases tracker that supplies a list + * of recent cases to the recent cases menu item and the + * open/create case dialog. + */ + RecentCases.getInstance().addRecentCase(newCurrentCase.getDisplayName(), newCurrentCase.getMetadata().getFilePath().toString()); + final boolean hasData = newCurrentCase.hasData(); + + SwingUtilities.invokeLater(() -> { /* * Open the top components (windows within the main application * window). @@ -1285,8 +1301,11 @@ public class Case { * opened via the DirectoryTreeTopComponent 'propertyChange()' * method on a DATA_SOURCE_ADDED event. */ - if (newCurrentCase.hasData()) { + if (hasData) { CoreComponentControl.openCoreWindows(); + } else { + //ensure that the DirectoryTreeTopComponent is open so that it's listener can open the core windows including making it visible. + DirectoryTreeTopComponent.findInstance(); } /* @@ -1296,7 +1315,6 @@ public class Case { */ mainFrame.setTitle(newCurrentCase.getDisplayName() + " - " + getNameForTitle()); }); - } } /* @@ -1471,6 +1489,13 @@ public class Case { return hostPath.toString(); } + /** + * @return A subdirectory of java.io.tmpdir. + */ + private Path getBaseSystemTempPath() { + return Paths.get(System.getProperty("java.io.tmpdir"), APP_NAME, getName()); + } + /** * Gets the full path to the temp directory for this case, creating it if it * does not exist. @@ -1478,16 +1503,45 @@ public class Case { * @return The temp subdirectory path. */ public String getTempDirectory() { - // get temp folder scoped to the combination of case name and timestamp - // provided by getName() - Path path = Paths.get(UserPreferences.getAppTempDirectory(), CASE_TEMP_DIR, getName()); - File f = path.toFile(); - // verify that the folder exists - if (!f.exists()) { - f.mkdirs(); + // NOTE: UserPreferences may also be affected by changes in this method. + // See JIRA-7505 for more information. + Path basePath = null; + // get base temp path for the case based on user preference + switch (UserMachinePreferences.getTempDirChoice()) { + case CUSTOM: + String customDirectory = UserMachinePreferences.getCustomTempDirectory(); + basePath = (StringUtils.isBlank(customDirectory)) + ? null + : Paths.get(customDirectory, APP_NAME, getName()); + break; + case CASE: + basePath = Paths.get(getCaseDirectory()); + break; + case SYSTEM: + default: + // at this level, if the case directory is specified for a temp + // directory, return the system temp directory instead. + basePath = getBaseSystemTempPath(); + break; } - return path.toAbsolutePath().toString(); + basePath = basePath == null ? getBaseSystemTempPath() : basePath; + + // get sub directories based on multi user vs. single user + Path caseRelPath = (CaseType.MULTI_USER_CASE.equals(getCaseType())) + ? Paths.get(NetworkUtils.getLocalHostName(), TEMP_FOLDER) + : Paths.get(TEMP_FOLDER); + + File caseTempDir = basePath + .resolve(caseRelPath) + .toFile(); + + // ensure directory exists + if (!caseTempDir.exists()) { + caseTempDir.mkdirs(); + } + + return caseTempDir.getAbsolutePath(); } /** @@ -1789,7 +1843,7 @@ public class Case { public void notifyBlackBoardArtifactTagDeleted(BlackboardArtifactTag deletedTag) { eventPublisher.publish(new BlackBoardArtifactTagDeletedEvent(deletedTag)); } - + public void notifyOsAccountAdded(OsAccount account) { eventPublisher.publish(new OsAccountAddedEvent(account)); } @@ -1797,9 +1851,14 @@ public class Case { public void notifyOsAccountChanged(OsAccount account) { eventPublisher.publish(new OsAccountChangedEvent(account)); } - + + public void notifyOsAccountRemoved(Long osAccountObjectId) { + eventPublisher.publish(new OsAccountDeletedEvent(osAccountObjectId)); + } + /** * Notify via an autopsy event that a host has been added. + * * @param host The host that has been added. */ public void notifyHostAdded(Host host) { @@ -1808,22 +1867,25 @@ public class Case { /** * Notify via an autopsy event that a host has been changed. + * * @param newValue The host that has been updated. */ public void notifyHostChanged(Host newValue) { eventPublisher.publish(new HostsChangedEvent(Collections.singletonList(newValue))); } - + /** * Notify via an autopsy event that a host has been deleted. + * * @param host The host that has been deleted. */ public void notifyHostDeleted(Host host) { eventPublisher.publish(new HostsRemovedEvent(Collections.singletonList(host))); } - + /** * Notify via an autopsy event that a person has been added. + * * @param person The person that has been added. */ public void notifyPersonAdded(Person person) { @@ -1832,20 +1894,22 @@ public class Case { /** * Notify via an autopsy event that a person has been changed. + * * @param newValue The person that has been updated. */ public void notifyPersonChanged(Person newValue) { eventPublisher.publish(new PersonsChangedEvent(Collections.singletonList(newValue))); } - + /** * Notify via an autopsy event that a person has been deleted. + * * @param person The person that has been deleted. */ public void notifyPersonDeleted(Person person) { - eventPublisher.publish(new PersonsRemovedEvent(Collections.singletonList(person))); + eventPublisher.publish(new PersonsDeletedEvent(Collections.singletonList(person))); } - + /** * Adds a report to the case. * @@ -1924,7 +1988,7 @@ public class Case { * * @return A CaseMetaData object. */ - CaseMetadata getMetadata() { + public CaseMetadata getMetadata() { return metadata; } diff --git a/Core/src/org/sleuthkit/autopsy/casemodule/CaseMetadata.java b/Core/src/org/sleuthkit/autopsy/casemodule/CaseMetadata.java index 5f7ffe9ea7..96f9899dae 100644 --- a/Core/src/org/sleuthkit/autopsy/casemodule/CaseMetadata.java +++ b/Core/src/org/sleuthkit/autopsy/casemodule/CaseMetadata.java @@ -218,7 +218,7 @@ public final class CaseMetadata { * * @return The path to the metadata file */ - Path getFilePath() { + public Path getFilePath() { return metadataFilePath; } diff --git a/Core/src/org/sleuthkit/autopsy/casemodule/CasePreferences.java b/Core/src/org/sleuthkit/autopsy/casemodule/CasePreferences.java index 9967f1fb68..8f749199f7 100755 --- a/Core/src/org/sleuthkit/autopsy/casemodule/CasePreferences.java +++ b/Core/src/org/sleuthkit/autopsy/casemodule/CasePreferences.java @@ -35,22 +35,22 @@ import org.sleuthkit.autopsy.directorytree.DirectoryTreeTopComponent; * Read and update case preference file values. */ public final class CasePreferences { - + private static final String SETTINGS_FILE = "CasePreferences.properties"; //NON-NLS private static final String KEY_GROUP_BY_DATA_SOURCE = "groupByDataSource"; //NON-NLS private static final String VALUE_TRUE = "true"; //NON-NLS private static final String VALUE_FALSE = "false"; //NON-NLS - + private static final Logger logger = Logger.getLogger(CasePreferences.class.getName()); - + private static Boolean groupItemsInTreeByDataSource = false; - + /** * Prevent instantiation. */ private CasePreferences() { } - + static { Case.addEventTypeSubscriber(EnumSet.of(Case.Events.CURRENT_CASE), (PropertyChangeEvent evt) -> { if (evt.getNewValue() != null) { @@ -66,25 +66,27 @@ public final class CasePreferences { logger.log(Level.SEVERE, "No current case open.", ex); } } - + /** * Get the 'groupItemsInTreeByDataSource' value. This can be true, false, or * null. - * + * * @return The value. */ public static Boolean getGroupItemsInTreeByDataSource() { return groupItemsInTreeByDataSource; } - + /** * Set the 'groupItemsInTreeByDataSource' value to true or false. - * + * * @param value The value to use for the value change. */ public static void setGroupItemsInTreeByDataSource(boolean value) { groupItemsInTreeByDataSource = value; - DirectoryTreeTopComponent.getDefault().refreshContentTreeSafe(); + if (Case.isCaseOpen()) { + DirectoryTreeTopComponent.getDefault().refreshContentTreeSafe(); + } } /** @@ -120,7 +122,7 @@ public final class CasePreferences { } } } - + /** * Reset all values to their default states. */ diff --git a/Core/src/org/sleuthkit/autopsy/casemodule/ImageDSProcessor.java b/Core/src/org/sleuthkit/autopsy/casemodule/ImageDSProcessor.java index 07a88ee07f..9b174f28f7 100644 --- a/Core/src/org/sleuthkit/autopsy/casemodule/ImageDSProcessor.java +++ b/Core/src/org/sleuthkit/autopsy/casemodule/ImageDSProcessor.java @@ -284,10 +284,10 @@ public class ImageDSProcessor implements DataSourceProcessor, AutoIngestDataSour ingestStream = IngestManager.getInstance().openIngestStream(image, settings); } catch (TskCoreException ex) { logger.log(Level.SEVERE, "Error starting ingest modules", ex); - final List errors = new ArrayList<>(); - errors.add(ex.getMessage()); - callBack.done(DataSourceProcessorCallback.DataSourceProcessorResult.CRITICAL_ERRORS, errors, new ArrayList<>()); - return; + // There was an error with ingest, but the data source has already been added + // so proceed with the defaultIngestStream. Code in openIngestStream + // should have caused a dialog to popup with the errors. + ingestStream = new DefaultIngestStream(); } doAddImageProcess(deviceId, imagePath, sectorSize, timeZone, ignoreFatOrphanFiles, md5, sha1, sha256, progress, callBack); diff --git a/Core/src/org/sleuthkit/autopsy/casemodule/NewCaseWizardPanel2.java b/Core/src/org/sleuthkit/autopsy/casemodule/NewCaseWizardPanel2.java index b5119dc389..8550d5d81e 100644 --- a/Core/src/org/sleuthkit/autopsy/casemodule/NewCaseWizardPanel2.java +++ b/Core/src/org/sleuthkit/autopsy/casemodule/NewCaseWizardPanel2.java @@ -55,10 +55,8 @@ class NewCaseWizardPanel2 implements WizardDescriptor.ValidatingPanel startupWindows = Lookup.getDefault().lookupAll(StartupWindowInterface.class); int windowsCount = startupWindows.size(); - if (windowsCount == 1) { - startupWindowToUse = startupWindows.iterator().next(); - logger.log(Level.INFO, "Will use the default startup window: " + startupWindowToUse.toString()); //NON-NLS - } else if (windowsCount == 2) { - //pick the non default one - Iterator it = startupWindows.iterator(); - while (it.hasNext()) { - StartupWindowInterface window = it.next(); - if (!org.sleuthkit.autopsy.casemodule.StartupWindow.class.isInstance(window)) { - startupWindowToUse = window; - logger.log(Level.INFO, "Will use the custom startup window: " + startupWindowToUse.toString()); //NON-NLS - break; + switch (windowsCount) { + case 1: + startupWindowToUse = startupWindows.iterator().next(); + logger.log(Level.INFO, "Will use the default startup window: {0}", startupWindowToUse.toString()); //NON-NLS + break; + case 2: { + //pick the non default one + Iterator it = startupWindows.iterator(); + while (it.hasNext()) { + StartupWindowInterface window = it.next(); + if (!org.sleuthkit.autopsy.casemodule.StartupWindow.class.isInstance(window)) { + startupWindowToUse = window; + logger.log(Level.INFO, "Will use the custom startup window: {0}", startupWindowToUse.toString()); //NON-NLS + break; + } } + break; } - } else { - // select first non-Autopsy start up window - Iterator it = startupWindows.iterator(); - while (it.hasNext()) { - StartupWindowInterface window = it.next(); - if (!window.getClass().getCanonicalName().startsWith("org.sleuthkit.autopsy")) { - startupWindowToUse = window; - logger.log(Level.INFO, "Will use the custom startup window: " + startupWindowToUse.toString()); //NON-NLS - break; + default: { + // select first non-Autopsy start up window + Iterator it = startupWindows.iterator(); + while (it.hasNext()) { + StartupWindowInterface window = it.next(); + if (!window.getClass().getCanonicalName().startsWith("org.sleuthkit.autopsy")) { + startupWindowToUse = window; + logger.log(Level.INFO, "Will use the custom startup window: {0}", startupWindowToUse.toString()); //NON-NLS + break; + } } + break; } } @@ -121,6 +139,45 @@ public class StartupWindowProvider implements StartupWindowInterface { startupWindowToUse = new org.sleuthkit.autopsy.casemodule.StartupWindow(); } } + File openPreviousCaseFile = new File(ResetWindowsAction.getCaseToReopenFilePath()); + + if (openPreviousCaseFile.exists()) { + //do actual opening on another thread + new Thread(() -> { + String caseFilePath = ""; + String unableToOpenMessage = null; + try { + //avoid readFileToString having ambiguous arguments + Charset encoding = null; + caseFilePath = FileUtils.readFileToString(openPreviousCaseFile, encoding); + if (new File(caseFilePath).exists()) { + FileUtils.forceDelete(openPreviousCaseFile); + //close the startup window as we attempt to open the case + close(); + Case.openAsCurrentCase(caseFilePath); + + } else { + unableToOpenMessage = Bundle.StartupWindowProvider_openCase_noFile(caseFilePath); + logger.log(Level.WARNING, unableToOpenMessage); + } + } catch (IOException ex) { + unableToOpenMessage = Bundle.StartupWindowProvider_openCase_deleteOpenFailure(ResetWindowsAction.getCaseToReopenFilePath()); + logger.log(Level.WARNING, unableToOpenMessage, ex); + } catch (CaseActionException ex) { + unableToOpenMessage = Bundle.StartupWindowProvider_openCase_cantOpen(caseFilePath); + logger.log(Level.WARNING, unableToOpenMessage, ex); + } + + if (RuntimeProperties.runningWithGUI() && !StringUtils.isBlank(unableToOpenMessage)) { + final String message = unableToOpenMessage; + SwingUtilities.invokeLater(() -> { + MessageNotifyUtil.Message.warn(message); + //the case was not opened restore the startup window + open(); + }); + } + }).start(); + } } private void checkSolr() { @@ -147,9 +204,9 @@ public class StartupWindowProvider implements StartupWindowInterface { * @return True if running from command line, false otherwise */ private boolean isRunningFromCommandLine() { - + CommandLineOptionProcessor processor = Lookup.getDefault().lookup(CommandLineOptionProcessor.class); - if(processor != null) { + if (processor != null) { return processor.isRunFromCommandLine(); } return false; @@ -157,12 +214,12 @@ public class StartupWindowProvider implements StartupWindowInterface { /** * Get the default argument from the CommandLineOptionProcessor. - * - * @return If set, the default argument otherwise null. + * + * @return If set, the default argument otherwise null. */ - private String getDefaultArgument() { + private String getDefaultArgument() { CommandLineOptionProcessor processor = Lookup.getDefault().lookup(CommandLineOptionProcessor.class); - if(processor != null) { + if (processor != null) { return processor.getDefaultArgument(); } return null; diff --git a/Core/src/org/sleuthkit/autopsy/casemodule/events/HostsEvent.java b/Core/src/org/sleuthkit/autopsy/casemodule/events/HostsEvent.java index 1ba225f4a2..7c9a31f01e 100644 --- a/Core/src/org/sleuthkit/autopsy/casemodule/events/HostsEvent.java +++ b/Core/src/org/sleuthkit/autopsy/casemodule/events/HostsEvent.java @@ -33,6 +33,8 @@ import org.sleuthkit.datamodel.TskCoreException; */ public class HostsEvent extends TskDataModelChangeEvent { + private static final long serialVersionUID = 1L; + /** * Retrieves a list of ids from a list of hosts. * @@ -42,7 +44,7 @@ public class HostsEvent extends TskDataModelChangeEvent { private static List getIds(List hosts) { return getSafeList(hosts).stream() .filter(h -> h != null) - .map(h -> h.getId()).collect(Collectors.toList()); + .map(h -> h.getHostId()).collect(Collectors.toList()); } /** @@ -76,7 +78,7 @@ public class HostsEvent extends TskDataModelChangeEvent { continue; } - Optional thisHostOpt = hostManager.getHost(id); + Optional thisHostOpt = hostManager.getHostById(id); thisHostOpt.ifPresent((h) -> toRet.add(h)); } } diff --git a/Core/src/org/sleuthkit/autopsy/casemodule/events/OsAccountDeletedEvent.java b/Core/src/org/sleuthkit/autopsy/casemodule/events/OsAccountDeletedEvent.java new file mode 100644 index 0000000000..adc726fca8 --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/casemodule/events/OsAccountDeletedEvent.java @@ -0,0 +1,37 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2021 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.casemodule.events; + +import org.sleuthkit.autopsy.casemodule.Case; +import org.sleuthkit.autopsy.events.AutopsyEvent; + +/** + * Event published when an OsAccount is deleted. + * + * oldValue will contain the objectId of the account that was removed. newValue + * will be null. + */ +public final class OsAccountDeletedEvent extends AutopsyEvent { + + private static final long serialVersionUID = 1L; + + public OsAccountDeletedEvent(Long osAccountObjectId) { + super(Case.Events.OS_ACCOUNT_REMOVED.toString(), osAccountObjectId, null); + } +} diff --git a/Core/src/org/sleuthkit/autopsy/casemodule/events/OsAccountEvent.java b/Core/src/org/sleuthkit/autopsy/casemodule/events/OsAccountEvent.java index 22b0622ba8..d34da7822d 100755 --- a/Core/src/org/sleuthkit/autopsy/casemodule/events/OsAccountEvent.java +++ b/Core/src/org/sleuthkit/autopsy/casemodule/events/OsAccountEvent.java @@ -56,7 +56,7 @@ class OsAccountEvent extends TskDataModelChangeEvent { @Override protected List getDataModelObjects(SleuthkitCase caseDb, List ids) throws TskCoreException { Long id = ids.get(0); - OsAccount account = caseDb.getOsAccountManager().getOsAccount(id); + OsAccount account = caseDb.getOsAccountManager().getOsAccountByObjectId(id); List accounts = new ArrayList<>(); accounts.add(account); return accounts; diff --git a/Core/src/org/sleuthkit/autopsy/casemodule/events/PersonsRemovedEvent.java b/Core/src/org/sleuthkit/autopsy/casemodule/events/PersonsDeletedEvent.java similarity index 90% rename from Core/src/org/sleuthkit/autopsy/casemodule/events/PersonsRemovedEvent.java rename to Core/src/org/sleuthkit/autopsy/casemodule/events/PersonsDeletedEvent.java index 522c841461..a63fef32cb 100644 --- a/Core/src/org/sleuthkit/autopsy/casemodule/events/PersonsRemovedEvent.java +++ b/Core/src/org/sleuthkit/autopsy/casemodule/events/PersonsDeletedEvent.java @@ -25,7 +25,7 @@ import org.sleuthkit.datamodel.Person; /** * Event fired when persons are removed. */ -public class PersonsRemovedEvent extends PersonsEvent { +public class PersonsDeletedEvent extends PersonsEvent { private static final long serialVersionUID = 1L; @@ -33,7 +33,7 @@ public class PersonsRemovedEvent extends PersonsEvent { * Main constructor. * @param dataModelObjects The list of persons that have been deleted. */ - public PersonsRemovedEvent(List dataModelObjects) { + public PersonsDeletedEvent(List dataModelObjects) { super(Case.Events.PERSONS_DELETED.name(), dataModelObjects); } } diff --git a/Core/src/org/sleuthkit/autopsy/casemodule/events/PersonsEvent.java b/Core/src/org/sleuthkit/autopsy/casemodule/events/PersonsEvent.java index ef74585b7f..e3f584d58a 100644 --- a/Core/src/org/sleuthkit/autopsy/casemodule/events/PersonsEvent.java +++ b/Core/src/org/sleuthkit/autopsy/casemodule/events/PersonsEvent.java @@ -42,7 +42,7 @@ public class PersonsEvent extends TskDataModelChangeEvent { private static List getIds(List persons) { return getSafeList(persons).stream() .filter(h -> h != null) - .map(h -> h.getId()).collect(Collectors.toList()); + .map(h -> h.getPersonId()).collect(Collectors.toList()); } /** diff --git a/Core/src/org/sleuthkit/autopsy/casemodule/multiusercasesbrowser/MultiUserCasesBrowserPanel.form b/Core/src/org/sleuthkit/autopsy/casemodule/multiusercasesbrowser/MultiUserCasesBrowserPanel.form index ada4c7f484..9b8c2c8f72 100644 --- a/Core/src/org/sleuthkit/autopsy/casemodule/multiusercasesbrowser/MultiUserCasesBrowserPanel.form +++ b/Core/src/org/sleuthkit/autopsy/casemodule/multiusercasesbrowser/MultiUserCasesBrowserPanel.form @@ -23,25 +23,4 @@ - - - - - - - - - - - - - - - - - - - - - diff --git a/Core/src/org/sleuthkit/autopsy/casemodule/multiusercasesbrowser/MultiUserCasesBrowserPanel.java b/Core/src/org/sleuthkit/autopsy/casemodule/multiusercasesbrowser/MultiUserCasesBrowserPanel.java index 154e692663..04785cc83d 100644 --- a/Core/src/org/sleuthkit/autopsy/casemodule/multiusercasesbrowser/MultiUserCasesBrowserPanel.java +++ b/Core/src/org/sleuthkit/autopsy/casemodule/multiusercasesbrowser/MultiUserCasesBrowserPanel.java @@ -1,7 +1,7 @@ /* * Autopsy Forensic Browser * - * Copyright 2017-2019 Basis Technology Corp. + * Copyright 2017-2021 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -18,6 +18,7 @@ */ package org.sleuthkit.autopsy.casemodule.multiusercasesbrowser; +import java.awt.BorderLayout; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -68,8 +69,7 @@ public final class MultiUserCasesBrowserPanel extends javax.swing.JPanel impleme outlineView = new org.openide.explorer.view.OutlineView(); outline = this.outlineView.getOutline(); configureOutlineView(); - caseTableScrollPane.add(outlineView); - caseTableScrollPane.setViewportView(outlineView); + add(outlineView, BorderLayout.CENTER); this.setVisible(true); } @@ -146,20 +146,11 @@ public final class MultiUserCasesBrowserPanel extends javax.swing.JPanel impleme // //GEN-BEGIN:initComponents private void initComponents() { - caseTableScrollPane = new javax.swing.JScrollPane(); - setMinimumSize(new java.awt.Dimension(0, 5)); setPreferredSize(new java.awt.Dimension(5, 5)); setLayout(new java.awt.BorderLayout()); - - caseTableScrollPane.setHorizontalScrollBarPolicy(javax.swing.ScrollPaneConstants.HORIZONTAL_SCROLLBAR_NEVER); - caseTableScrollPane.setMinimumSize(new java.awt.Dimension(0, 5)); - caseTableScrollPane.setOpaque(false); - caseTableScrollPane.setPreferredSize(new java.awt.Dimension(500, 500)); - add(caseTableScrollPane, java.awt.BorderLayout.CENTER); }// //GEN-END:initComponents // Variables declaration - do not modify//GEN-BEGIN:variables - private javax.swing.JScrollPane caseTableScrollPane; // End of variables declaration//GEN-END:variables } diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/contentviewer/DataContentViewerOtherCases.java b/Core/src/org/sleuthkit/autopsy/centralrepository/contentviewer/DataContentViewerOtherCases.java index 997ecbab82..cd901c0c1d 100644 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/contentviewer/DataContentViewerOtherCases.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/contentviewer/DataContentViewerOtherCases.java @@ -51,7 +51,7 @@ import org.sleuthkit.datamodel.TskException; * View correlation results from other cases */ @SuppressWarnings("PMD.SingularField") // UI widgets cause lots of false positives -@ServiceProvider(service = DataContentViewer.class, position = 9) +@ServiceProvider(service = DataContentViewer.class, position = 10) @Messages({"DataContentViewerOtherCases.title=Other Occurrences", "DataContentViewerOtherCases.toolTip=Displays instances of the selected file/artifact from other occurrences."}) public final class DataContentViewerOtherCases extends JPanel implements DataContentViewer { diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/contentviewer/OtherOccurrencesPanel.java b/Core/src/org/sleuthkit/autopsy/centralrepository/contentviewer/OtherOccurrencesPanel.java index 4976b72278..75ffb5c9ad 100644 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/contentviewer/OtherOccurrencesPanel.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/contentviewer/OtherOccurrencesPanel.java @@ -130,7 +130,7 @@ public final class OtherOccurrencesPanel extends javax.swing.JPanel { exportToCSVMenuItem.addActionListener(actList); showCaseDetailsMenuItem.addActionListener(actList); showCommonalityMenuItem.addActionListener(actList); - + filesTable.setComponentPopupMenu(rightClickPopupMenu); // Configure column sorting. TableRowSorter sorter = new TableRowSorter<>(filesTable.getModel()); filesTable.setRowSorter(sorter); @@ -380,7 +380,6 @@ public final class OtherOccurrencesPanel extends javax.swing.JPanel { int totalCount = 0; Set dataSources = new HashSet<>(); if (CentralRepository.isEnabled()) { - try { List instances; instances = CentralRepository.getInstance().getArtifactInstancesByTypeValue(aType, value); @@ -395,12 +394,12 @@ public final class OtherOccurrencesPanel extends javax.swing.JPanel { // - the data source device ID is different // - the file path is different if (artifactInstance.getCorrelationCase().getCaseUUID().equals(caseUUID) - || (!StringUtils.isBlank(dataSourceName) && artifactInstance.getCorrelationDataSource().getName().equals(dataSourceName)) - || (!StringUtils.isBlank(deviceId) && artifactInstance.getCorrelationDataSource().getDeviceID().equals(deviceId)) - || (file != null && artifactInstance.getFilePath().equalsIgnoreCase(file.getParentPath() + file.getName()))) { - correlationAttributes.add(artifactInstance); + && (!StringUtils.isBlank(dataSourceName) && artifactInstance.getCorrelationDataSource().getName().equals(dataSourceName)) + && (!StringUtils.isBlank(deviceId) && artifactInstance.getCorrelationDataSource().getDeviceID().equals(deviceId)) + && (file != null && artifactInstance.getFilePath().equalsIgnoreCase(file.getParentPath() + file.getName()))) { continue; } + correlationAttributes.add(artifactInstance); OtherOccurrenceNodeInstanceData newNode = new OtherOccurrenceNodeInstanceData(artifactInstance, aType, value); UniquePathKey uniquePathKey = new UniquePathKey(newNode); nodeDataMap.put(uniquePathKey, newNode); @@ -510,9 +509,7 @@ public final class OtherOccurrencesPanel extends javax.swing.JPanel { * artifact. If the central repo is not enabled, this will only return files * from the current case with matching MD5 hashes. * - * @param corAttr CorrelationAttribute to query for - * @param dataSourceName Data source to filter results - * @param deviceId Device Id to filter results + * @param corAttr CorrelationAttribute to query for * * @return A collection of correlated artifact instances */ @@ -535,9 +532,9 @@ public final class OtherOccurrencesPanel extends javax.swing.JPanel { // - the data source device ID is different // - the file path is different if (artifactInstance.getCorrelationCase().getCaseUUID().equals(caseUUID) - || (!StringUtils.isBlank(dataSourceName) && artifactInstance.getCorrelationDataSource().getName().equals(dataSourceName)) - || (!StringUtils.isBlank(deviceId) && artifactInstance.getCorrelationDataSource().getDeviceID().equals(deviceId)) - || (file != null && artifactInstance.getFilePath().equalsIgnoreCase(file.getParentPath() + file.getName()))) { + && (!StringUtils.isBlank(dataSourceName) && artifactInstance.getCorrelationDataSource().getName().equals(dataSourceName)) + && (!StringUtils.isBlank(deviceId) && artifactInstance.getCorrelationDataSource().getDeviceID().equals(deviceId)) + && (file != null && artifactInstance.getFilePath().equalsIgnoreCase(file.getParentPath() + file.getName()))) { continue; } OtherOccurrenceNodeInstanceData newNode = new OtherOccurrenceNodeInstanceData(artifactInstance, corAttr.getCorrelationType(), corAttr.getCorrelationValue()); @@ -618,6 +615,7 @@ public final class OtherOccurrencesPanel extends javax.swing.JPanel { * * @param corAttr The CorrelationAttribute containing the MD5 to search for * @param openCase The current case + * @param file The current file. * * @return List of matching AbstractFile objects * diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/Bundle.properties-MERGED b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/Bundle.properties-MERGED index a7b597640e..f43b438b2c 100755 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/Bundle.properties-MERGED +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/Bundle.properties-MERGED @@ -12,7 +12,7 @@ CentralRepoDbChoice.PostgreSQL.Text=Custom PostgreSQL CentralRepoDbChoice.PostgreSQL_Multiuser.Text=PostgreSQL using multi-user settings CentralRepoDbChoice.Sqlite.Text=SQLite CentralRepoDbManager.connectionErrorMsg.text=Failed to connect to central repository database. -CentralRepositoryService.progressMsg.updatingSchema=Updating schema... +CentralRepositoryService.progressMsg.updatingSchema=Checking for schema updates... CentralRepositoryService.progressMsg.waitingForListeners=Finishing adding data to central repository database.... CentralRepositoryService.serviceName=Central Repository Service CorrelationAttributeInstance.invalidName.message=Invalid database table name. Name must start with a lowercase letter and can only contain lowercase letters, numbers, and '_'. diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/CentralRepository.java b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/CentralRepository.java index 842c8e3f04..289dfa476f 100755 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/CentralRepository.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/CentralRepository.java @@ -21,6 +21,7 @@ package org.sleuthkit.autopsy.centralrepository.datamodel; import java.sql.SQLException; import java.util.Collection; import java.util.List; +import java.util.Optional; import java.util.Set; import org.sleuthkit.datamodel.TskData; import org.sleuthkit.autopsy.casemodule.Case; @@ -860,10 +861,10 @@ public interface CentralRepository { * Get account type by type name. * * @param accountTypeName account type name to look for - * @return CR account type + * @return CR account type (if found) * @throws CentralRepoException */ - CentralRepoAccountType getAccountTypeByName(String accountTypeName) throws CentralRepoException; + Optional getAccountTypeByName(String accountTypeName) throws CentralRepoException; /** * Gets all account types. diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/CentralRepositoryService.java b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/CentralRepositoryService.java old mode 100644 new mode 100755 index 4ed13dc82a..83dc61d06d --- a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/CentralRepositoryService.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/CentralRepositoryService.java @@ -47,7 +47,7 @@ public class CentralRepositoryService implements AutopsyService { } @NbBundle.Messages({ - "CentralRepositoryService.progressMsg.updatingSchema=Updating schema..." + "CentralRepositoryService.progressMsg.updatingSchema=Checking for schema updates..." }) @Override public void openCaseResources(CaseContext context) throws AutopsyServiceException { diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/CorrelationAttributeUtil.java b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/CorrelationAttributeUtil.java index 4867b4eb0a..2d0315ef7b 100755 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/CorrelationAttributeUtil.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/CorrelationAttributeUtil.java @@ -22,6 +22,7 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.HashSet; import java.util.List; +import java.util.Optional; import java.util.Set; import java.util.logging.Level; import org.openide.util.NbBundle.Messages; @@ -308,8 +309,12 @@ public class CorrelationAttributeUtil { if (Account.Type.DEVICE.getTypeName().equalsIgnoreCase(accountTypeStr) == false && predefinedAccountType != null) { // Get the corresponding CentralRepoAccountType from the database. - CentralRepoAccountType crAccountType = CentralRepository.getInstance().getAccountTypeByName(accountTypeStr); - + Optional optCrAccountType = CentralRepository.getInstance().getAccountTypeByName(accountTypeStr); + if (!optCrAccountType.isPresent()) { + return; + } + CentralRepoAccountType crAccountType = optCrAccountType.get(); + int corrTypeId = crAccountType.getCorrelationTypeId(); CorrelationAttributeInstance.Type corrType = CentralRepository.getInstance().getCorrelationTypeById(corrTypeId); diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/PersonaAccount.java b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/PersonaAccount.java index afbb2fe4b5..59b0ebe627 100644 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/PersonaAccount.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/PersonaAccount.java @@ -26,8 +26,10 @@ import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.Objects; +import java.util.Optional; import org.apache.commons.lang3.StringUtils; import org.sleuthkit.datamodel.Account; +import org.sleuthkit.datamodel.TskCoreException; /** * This class represents an association between a Persona and an Account. @@ -206,10 +208,15 @@ public class PersonaAccount { ); // create account - CentralRepoAccount.CentralRepoAccountType crAccountType = getCRInstance().getAccountTypeByName(rs.getString("type_name")); + String accountTypeName = rs.getString("type_name"); + Optional optCrAccountType = getCRInstance().getAccountTypeByName(accountTypeName); + if (! optCrAccountType.isPresent()) { + // The CR account can not be null, so throw an exception + throw new CentralRepoException("Account type with name '" + accountTypeName + "' not found in Central Repository"); + } CentralRepoAccount account = new CentralRepoAccount( rs.getInt("account_id"), - crAccountType, + optCrAccountType.get(), rs.getString("account_unique_identifier")); // create persona account @@ -389,10 +396,15 @@ public class PersonaAccount { while (rs.next()) { // create account - CentralRepoAccount.CentralRepoAccountType crAccountType = getCRInstance().getAccountTypeByName(rs.getString("type_name")); + String accountTypeName = rs.getString("type_name"); + Optional optCrAccountType = getCRInstance().getAccountTypeByName(accountTypeName); + if (! optCrAccountType.isPresent()) { + // The CR account can not be null, so throw an exception + throw new CentralRepoException("Account type with name '" + accountTypeName + "' not found in Central Repository"); + } CentralRepoAccount account = new CentralRepoAccount( rs.getInt("account_id"), - crAccountType, + optCrAccountType.get(), rs.getString("account_unique_identifier")); accountsList.add(account); diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/RdbmsCentralRepo.java b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/RdbmsCentralRepo.java index b48797e3fc..1b4ce08c18 100644 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/RdbmsCentralRepo.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/RdbmsCentralRepo.java @@ -37,6 +37,7 @@ import java.time.LocalDate; import java.util.Arrays; import java.util.HashMap; import java.util.Map; +import java.util.Optional; import java.util.Set; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; @@ -78,7 +79,7 @@ abstract class RdbmsCentralRepo implements CentralRepository { private static final int CASE_CACHE_TIMEOUT = 5; private static final int DATA_SOURCE_CACHE_TIMEOUT = 5; private static final int ACCOUNTS_CACHE_TIMEOUT = 5; - private static final Cache accountTypesCache = CacheBuilder.newBuilder().build(); + private static final Cache> accountTypesCache = CacheBuilder.newBuilder().build(); private static final Cache, CentralRepoAccount> accountsCache = CacheBuilder.newBuilder() .expireAfterWrite(ACCOUNTS_CACHE_TIMEOUT, TimeUnit.MINUTES). build(); @@ -1115,7 +1116,7 @@ abstract class RdbmsCentralRepo implements CentralRepository { } @Override - public CentralRepoAccountType getAccountTypeByName(String accountTypeName) throws CentralRepoException { + public Optional getAccountTypeByName(String accountTypeName) throws CentralRepoException { try { return accountTypesCache.get(accountTypeName, () -> getCRAccountTypeFromDb(accountTypeName)); } catch (CacheLoader.InvalidCacheLoadException | ExecutionException ex) { @@ -1155,7 +1156,7 @@ abstract class RdbmsCentralRepo implements CentralRepository { * * @throws CentralRepoException */ - private CentralRepoAccountType getCRAccountTypeFromDb(String accountTypeName) throws CentralRepoException { + private Optional getCRAccountTypeFromDb(String accountTypeName) throws CentralRepoException { String sql = "SELECT * FROM account_types WHERE type_name = ?"; try (Connection conn = connect(); @@ -1166,10 +1167,11 @@ abstract class RdbmsCentralRepo implements CentralRepository { if (resultSet.next()) { Account.Type acctType = new Account.Type(accountTypeName, resultSet.getString("display_name")); CentralRepoAccountType crAccountType = new CentralRepoAccountType(resultSet.getInt("id"), acctType, resultSet.getInt("correlation_type_id")); - accountTypesCache.put(accountTypeName, crAccountType); - return crAccountType; + accountTypesCache.put(accountTypeName, Optional.of(crAccountType)); + return Optional.of(crAccountType); } else { - throw new CentralRepoException("Failed to find entry for account type = " + accountTypeName); + accountTypesCache.put(accountTypeName, Optional.empty()); + return Optional.empty(); } } } catch (SQLException ex) { diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/SqliteCentralRepoSettings.java b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/SqliteCentralRepoSettings.java index a938dd166a..2e52694416 100644 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/SqliteCentralRepoSettings.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/SqliteCentralRepoSettings.java @@ -22,6 +22,8 @@ import java.io.File; import java.io.IOException; import java.nio.file.Files; import java.nio.file.InvalidPathException; +import java.nio.file.Path; +import java.nio.file.Paths; import java.sql.Connection; import java.sql.DriverManager; import java.sql.SQLException; @@ -41,7 +43,15 @@ public final class SqliteCentralRepoSettings implements CentralRepoDbConnectivit public final static String DEFAULT_DBNAME = "central_repository.db"; // NON-NLS private final static Logger LOGGER = Logger.getLogger(SqliteCentralRepoSettings.class.getName()); + private final Path userConfigDir = Paths.get(PlatformUtil.getUserDirectory().getAbsolutePath()); private final static String DEFAULT_DBDIRECTORY = PlatformUtil.getUserDirectory() + File.separator + "central_repository"; // NON-NLS + + //property names + private static final String PROFILE_NAME = "CentralRepository"; + private static final String DATABASE_NAME = "db.sqlite.dbName"; //NON-NLS + private static final String DATABASE_PATH = "db.sqlite.dbDirectory"; //NON-NLS + private static final String BULK_THRESHOLD = "db.sqlite.bulkThreshold"; //NON-NLS + private final static String JDBC_DRIVER = "org.sqlite.JDBC"; // NON-NLS private final static String JDBC_BASE_URI = "jdbc:sqlite:"; // NON-NLS private final static String VALIDATION_QUERY = "SELECT count(*) from sqlite_master"; // NON-NLS @@ -56,18 +66,18 @@ public final class SqliteCentralRepoSettings implements CentralRepoDbConnectivit } public void loadSettings() { - dbName = ModuleSettings.getConfigSetting("CentralRepository", "db.sqlite.dbName"); // NON-NLS + dbName = ModuleSettings.getConfigSetting(PROFILE_NAME, DATABASE_NAME); // NON-NLS if (dbName == null || dbName.isEmpty()) { dbName = DEFAULT_DBNAME; } - dbDirectory = ModuleSettings.getConfigSetting("CentralRepository", "db.sqlite.dbDirectory"); // NON-NLS + dbDirectory = readDbPath(); // NON-NLS if (dbDirectory == null || dbDirectory.isEmpty()) { dbDirectory = DEFAULT_DBDIRECTORY; } try { - String bulkThresholdString = ModuleSettings.getConfigSetting("CentralRepository", "db.sqlite.bulkThreshold"); // NON-NLS + String bulkThresholdString = ModuleSettings.getConfigSetting(PROFILE_NAME, BULK_THRESHOLD); // NON-NLS if (bulkThresholdString == null || bulkThresholdString.isEmpty()) { this.bulkThreshold = RdbmsCentralRepo.DEFAULT_BULK_THRESHHOLD; } else { @@ -96,9 +106,64 @@ public final class SqliteCentralRepoSettings implements CentralRepoDbConnectivit public void saveSettings() { createDbDirectory(); - ModuleSettings.setConfigSetting("CentralRepository", "db.sqlite.dbName", getDbName()); // NON-NLS - ModuleSettings.setConfigSetting("CentralRepository", "db.sqlite.dbDirectory", getDbDirectory()); // NON-NLS - ModuleSettings.setConfigSetting("CentralRepository", "db.sqlite.bulkThreshold", Integer.toString(getBulkThreshold())); // NON-NLS + ModuleSettings.setConfigSetting(PROFILE_NAME, DATABASE_NAME, getDbName()); // NON-NLS + saveDbPath(getDbDirectory()); // NON-NLS + ModuleSettings.setConfigSetting(PROFILE_NAME, BULK_THRESHOLD, Integer.toString(getBulkThreshold())); // NON-NLS + } + + /** + * Save CR database path. If the path is inside user directory (e.g. + * "C:\Users\USER_NAME\AppData\Roaming\autopsy"), trim that off and save it + * as a relative path (i.e it will not start with a “/” or drive letter). Otherwise, + * full path is saved. See JIRA-7348. + * + * @param fullPath Full path to the SQLite db file. + */ + private void saveDbPath(String fullPath) { + Path relativePath = Paths.get(fullPath); + // check if the path is within user directory + if (Paths.get(fullPath).startsWith(userConfigDir)) { + // relativize the path + relativePath = userConfigDir.relativize(relativePath); + } + // Use properties to persist the logo to use. + ModuleSettings.setConfigSetting(PROFILE_NAME, DATABASE_PATH, relativePath.toString()); + } + + /** + * Read CD database path from preferences file. Reverses the path relativization performed + * in saveDbPath(). If the stored path starts with either “/” or drive letter, + * it is a full path, and is returned to the caller. Otherwise, append current user + * directory to the saved relative path. See JIRA-7348. + * + * @return Full path to the SQLite CR database file. + */ + private String readDbPath() { + + String curPath = ModuleSettings.getConfigSetting(PROFILE_NAME, DATABASE_PATH); + + + //if has been set, validate it's correct, if not set, return null + if (curPath != null && !curPath.isEmpty()) { + + // check if the path is an absolute path (starts with either drive letter or "/") + Path driveLetterOrNetwork = Paths.get(curPath).getRoot(); + if (driveLetterOrNetwork != null) { + // absolute path + return curPath; + } + + // Path is a relative path. Reverse path relativization performed in saveDbPath() + Path absolutePath = userConfigDir.resolve(curPath); + curPath = absolutePath.toString(); + if (new File(curPath).canRead() == false) { + //use default + LOGGER.log(Level.INFO, "Path to SQLite Central Repository database is not valid: {0}", curPath); //NON-NLS + curPath = null; + } + } + + return curPath; } /** @@ -252,9 +317,9 @@ public final class SqliteCentralRepoSettings implements CentralRepoDbConnectivit } boolean isChanged() { - String dbNameString = ModuleSettings.getConfigSetting("CentralRepository", "db.sqlite.dbName"); // NON-NLS - String dbDirectoryString = ModuleSettings.getConfigSetting("CentralRepository", "db.sqlite.dbDirectory"); // NON-NLS - String bulkThresholdString = ModuleSettings.getConfigSetting("CentralRepository", "db.sqlite.bulkThreshold"); // NON-NLS + String dbNameString = ModuleSettings.getConfigSetting(PROFILE_NAME, DATABASE_NAME); // NON-NLS + String dbDirectoryString = readDbPath(); // NON-NLS + String bulkThresholdString = ModuleSettings.getConfigSetting(PROFILE_NAME, BULK_THRESHOLD); // NON-NLS return !dbName.equals(dbNameString) || !dbDirectory.equals(dbDirectoryString) diff --git a/Core/src/org/sleuthkit/autopsy/communications/FiltersPanel.java b/Core/src/org/sleuthkit/autopsy/communications/FiltersPanel.java index 90c41e467f..f345ffa47e 100644 --- a/Core/src/org/sleuthkit/autopsy/communications/FiltersPanel.java +++ b/Core/src/org/sleuthkit/autopsy/communications/FiltersPanel.java @@ -184,6 +184,7 @@ final public class FiltersPanel extends JPanel { applyFiltersButton.addActionListener(e -> applyFilters()); refreshButton.addActionListener(e -> applyFilters()); } + /** * Validate that filters are in a consistent state and will result in some @@ -220,13 +221,18 @@ final public class FiltersPanel extends JPanel { } void initalizeFilters() { - Runnable runnable = new Runnable() { + + applyFiltersButton.setEnabled(false); + refreshButton.setEnabled(true); + needsRefreshLabel.setText("Loading filters..."); + needsRefreshLabel.setVisible(true); + + (new Thread(new Runnable(){ @Override public void run() { new FilterPanelRefresher(true, true).refresh(); } - }; - runnable.run(); + })).start(); } private void updateTimeZone() { @@ -1152,6 +1158,8 @@ final public class FiltersPanel extends JPanel { if (!isEnabled()) { setEnabled(true); } + + needsRefreshLabel.setText(org.openide.util.NbBundle.getMessage(FiltersPanel.class, "FiltersPanel.needsRefreshLabel.text")); // NOI18N validateFilters(); diff --git a/Core/src/org/sleuthkit/autopsy/communications/relationships/CorrelationCaseChildNodeFactory.java b/Core/src/org/sleuthkit/autopsy/communications/relationships/CorrelationCaseChildNodeFactory.java index 588e474303..1e2b350669 100755 --- a/Core/src/org/sleuthkit/autopsy/communications/relationships/CorrelationCaseChildNodeFactory.java +++ b/Core/src/org/sleuthkit/autopsy/communications/relationships/CorrelationCaseChildNodeFactory.java @@ -21,6 +21,7 @@ package org.sleuthkit.autopsy.communications.relationships; import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.Optional; import java.util.Set; import java.util.logging.Level; import org.openide.nodes.AbstractNode; @@ -75,9 +76,9 @@ final class CorrelationCaseChildNodeFactory extends ChildFactory { try { - CorrelationAttributeInstance.Type correlationType = getCorrelationType(account.getAccountType()); - if (correlationType != null) { - List correlationInstances = dbInstance.getArtifactInstancesByTypeValue(correlationType, account.getTypeSpecificID()); + Optional optCorrelationType = getCorrelationType(account.getAccountType()); + if (optCorrelationType.isPresent()) { + List correlationInstances = dbInstance.getArtifactInstancesByTypeValue(optCorrelationType.get(), account.getTypeSpecificID()); correlationInstances.forEach((correlationInstance) -> { CorrelationCase correlationCase = correlationInstance.getCorrelationCase(); uniqueCaseMap.put(correlationCase.getCaseUUID(), correlationCase); @@ -103,20 +104,22 @@ final class CorrelationCaseChildNodeFactory extends ChildFactory getCorrelationType(Account.Type accountType) throws CentralRepoException { + String accountTypeStr = accountType.getTypeName(); if (Account.Type.DEVICE.getTypeName().equalsIgnoreCase(accountTypeStr) == false) { - CentralRepoAccount.CentralRepoAccountType crAccountType = CentralRepository.getInstance().getAccountTypeByName(accountTypeStr); - int corrTypeId = crAccountType.getCorrelationTypeId(); - return CentralRepository.getInstance().getCorrelationTypeById(corrTypeId); + Optional optCrAccountType = CentralRepository.getInstance().getAccountTypeByName(accountTypeStr); + if (optCrAccountType.isPresent()) { + int corrTypeId = optCrAccountType.get().getCorrelationTypeId(); + return Optional.of(CentralRepository.getInstance().getCorrelationTypeById(corrTypeId)); + } } - - return null; + return Optional.empty(); } /** diff --git a/Core/src/org/sleuthkit/autopsy/communications/relationships/MessageViewer.java b/Core/src/org/sleuthkit/autopsy/communications/relationships/MessageViewer.java index f9989c5f55..1c4c538ea9 100755 --- a/Core/src/org/sleuthkit/autopsy/communications/relationships/MessageViewer.java +++ b/Core/src/org/sleuthkit/autopsy/communications/relationships/MessageViewer.java @@ -49,6 +49,7 @@ import org.openide.nodes.Children; import org.openide.nodes.Node; import org.openide.nodes.Node.Property; import org.openide.nodes.Node.PropertySet; +import org.openide.util.Exceptions; import org.openide.util.Lookup; import org.openide.util.NbBundle.Messages; import org.sleuthkit.autopsy.communications.ModifiableProxyLookup; @@ -146,14 +147,23 @@ final class MessageViewer extends JPanel implements RelationshipsViewer { @Override public void setSelectionInfo(SelectionInfo info) { - currentSelectionInfo = info; + if(currentSelectionInfo != null && currentSelectionInfo.equals(info)) { + try { + // Clear the currently selected thread so that clicks can + // be registered. + rootTablePane.getExplorerManager().setSelectedNodes(new Node[0]); + } catch (PropertyVetoException ex) { + logger.log(Level.WARNING, "Error clearing the selected node", ex); + } + } else { + currentSelectionInfo = info; + rootMessageFactory.refresh(info); + } currentPanel = rootTablePane; CardLayout layout = (CardLayout) this.getLayout(); - layout.show(this, "threads"); - - rootMessageFactory.refresh(info); + layout.show(this, "threads"); } @Override @@ -192,8 +202,8 @@ final class MessageViewer extends JPanel implements RelationshipsViewer { if (isDescendingFrom(newFocusOwner, rootTablePane)) { proxyLookup.setNewLookups(createLookup(rootTablePane.getExplorerManager(), getActionMap())); } else if (isDescendingFrom(newFocusOwner, this)) { - proxyLookup.setNewLookups(createLookup(currentPanel.getExplorerManager(), getActionMap())); - } + proxyLookup.setNewLookups(createLookup(threadMessagesPanel.getExplorerManager(), getActionMap())); + } } @Override diff --git a/Core/src/org/sleuthkit/autopsy/communications/relationships/MessagesPanel.java b/Core/src/org/sleuthkit/autopsy/communications/relationships/MessagesPanel.java index f409c288f7..1955cc333b 100755 --- a/Core/src/org/sleuthkit/autopsy/communications/relationships/MessagesPanel.java +++ b/Core/src/org/sleuthkit/autopsy/communications/relationships/MessagesPanel.java @@ -113,7 +113,7 @@ class MessagesPanel extends javax.swing.JPanel implements Lookup.Provider { outlineViewPanel.setTableColumnsWidth(5, 10, 10, 15, 50, 10); } - public MessagesPanel(ChildFactory nodeFactory) { + MessagesPanel(ChildFactory nodeFactory) { this(); setChildFactory(nodeFactory); } @@ -122,6 +122,15 @@ class MessagesPanel extends javax.swing.JPanel implements Lookup.Provider { public Lookup getLookup() { return proxyLookup; } + + /** + * Return the explorerManager for the table. + * + * @return The explorer manager for the table. + */ + ExplorerManager getExplorerManager() { + return outlineViewPanel.getExplorerManager(); + } @Override public void addNotify() { diff --git a/Core/src/org/sleuthkit/autopsy/communications/relationships/SummaryPanelWorker.java b/Core/src/org/sleuthkit/autopsy/communications/relationships/SummaryPanelWorker.java index bc6157f4cf..e18f3883d9 100755 --- a/Core/src/org/sleuthkit/autopsy/communications/relationships/SummaryPanelWorker.java +++ b/Core/src/org/sleuthkit/autopsy/communications/relationships/SummaryPanelWorker.java @@ -21,10 +21,12 @@ package org.sleuthkit.autopsy.communications.relationships; import java.util.ArrayList; import java.util.Collection; import java.util.List; +import java.util.Optional; import java.util.logging.Level; import javax.swing.SwingWorker; import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.centralrepository.datamodel.CentralRepoAccount; +import org.sleuthkit.autopsy.centralrepository.datamodel.CentralRepoException; import org.sleuthkit.autopsy.centralrepository.datamodel.CentralRepository; import org.sleuthkit.autopsy.centralrepository.datamodel.Persona; import org.sleuthkit.autopsy.centralrepository.datamodel.PersonaAccount; @@ -61,8 +63,10 @@ class SummaryPanelWorker extends SwingWorker stringList = new ArrayList<>(); List accountFileInstanceList = Case.getCurrentCase().getSleuthkitCase().getCommunicationsManager().getAccountFileInstances(account); - for (AccountFileInstance instance : accountFileInstanceList) { - stringList.add(instance.getFile().getUniquePath()); + if (accountFileInstanceList != null) { + for (AccountFileInstance instance : accountFileInstanceList) { + stringList.add(instance.getFile().getUniquePath()); + } } List personaList = new ArrayList<>(); @@ -74,12 +78,15 @@ class SummaryPanelWorker extends SwingWorker optCrAccountType = CentralRepository.getInstance().getAccountTypeByName(account.getAccountType().getTypeName()); + if (optCrAccountType.isPresent()) { + try { + crAccount = CentralRepository.getInstance().getAccount(optCrAccountType.get(), account.getTypeSpecificID()); + } catch (InvalidAccountIDException unused) { + // This was probably caused to a phone number not making + // threw the normalization. + logger.log(Level.WARNING, String.format("Exception thrown from CR getAccount for account %s (%d)", account.getTypeSpecificID(), account.getAccountID())); + } } } diff --git a/Core/src/org/sleuthkit/autopsy/communications/relationships/SummaryViewer.java b/Core/src/org/sleuthkit/autopsy/communications/relationships/SummaryViewer.java index ae263c08b2..8ba0e957d3 100755 --- a/Core/src/org/sleuthkit/autopsy/communications/relationships/SummaryViewer.java +++ b/Core/src/org/sleuthkit/autopsy/communications/relationships/SummaryViewer.java @@ -216,9 +216,11 @@ public class SummaryViewer extends javax.swing.JPanel implements RelationshipsVi List fileRefList = results.getPaths(); - fileRefList.forEach(value -> { - fileRefListModel.addElement(value); - }); + if (fileRefList != null) { + fileRefList.forEach(value -> { + fileRefListModel.addElement(value); + }); + } CardLayout cardLayout = (CardLayout) fileRefPane.getLayout(); cardLayout.show(fileRefPane, "listPanelCard"); diff --git a/Core/src/org/sleuthkit/autopsy/contentviewers/AnnotationsContentViewer.java b/Core/src/org/sleuthkit/autopsy/contentviewers/AnnotationsContentViewer.java index a12675994e..5a4725a8d7 100755 --- a/Core/src/org/sleuthkit/autopsy/contentviewers/AnnotationsContentViewer.java +++ b/Core/src/org/sleuthkit/autopsy/contentviewers/AnnotationsContentViewer.java @@ -61,7 +61,7 @@ import org.jsoup.nodes.Element; * Annotations view of file contents. */ @SuppressWarnings("PMD.SingularField") // UI widgets cause lots of false positives -@ServiceProvider(service = DataContentViewer.class, position = 8) +@ServiceProvider(service = DataContentViewer.class, position = 9) @Messages({ "AnnotationsContentViewer.title=Annotations", "AnnotationsContentViewer.toolTip=Displays tags and comments associated with the selected content.", diff --git a/Core/src/org/sleuthkit/autopsy/contentviewers/MediaViewImagePanel.java b/Core/src/org/sleuthkit/autopsy/contentviewers/MediaViewImagePanel.java index d8cfda7697..06b0a42fb7 100644 --- a/Core/src/org/sleuthkit/autopsy/contentviewers/MediaViewImagePanel.java +++ b/Core/src/org/sleuthkit/autopsy/contentviewers/MediaViewImagePanel.java @@ -1,7 +1,7 @@ /* * Autopsy Forensic Browser * - * Copyright 2018-2020 Basis Technology Corp. + * Copyright 2018-2021 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -18,6 +18,7 @@ */ package org.sleuthkit.autopsy.contentviewers; +import com.google.common.collect.Lists; import java.awt.EventQueue; import java.awt.event.ActionEvent; import java.awt.image.BufferedImage; @@ -32,7 +33,6 @@ import java.util.Collection; import java.util.Collections; import java.util.List; import static java.util.Objects.nonNull; -import java.util.SortedSet; import java.util.concurrent.ExecutionException; import java.util.logging.Level; import java.util.stream.Collectors; @@ -70,7 +70,6 @@ import javax.swing.SwingWorker; import org.apache.commons.io.FilenameUtils; import org.controlsfx.control.MaskerPane; import org.openide.util.NbBundle; -import org.python.google.common.collect.Lists; import org.sleuthkit.autopsy.actions.GetTagNameAndCommentDialog; import org.sleuthkit.autopsy.actions.GetTagNameAndCommentDialog.TagNameAndComment; import org.sleuthkit.autopsy.casemodule.Case; @@ -110,10 +109,6 @@ class MediaViewImagePanel extends JPanel implements MediaFileViewer.MediaViewPan private static final long serialVersionUID = 1L; private static final Logger logger = Logger.getLogger(MediaViewImagePanel.class.getName()); - private static final SortedSet supportedMimes = ImageUtils.getSupportedImageMimeTypes(); - private static final List supportedExtensions = ImageUtils.getSupportedImageExtensions().stream() - .map("."::concat) //NOI18N - .collect(Collectors.toList()); private static final double[] ZOOM_STEPS = { 0.0625, 0.125, 0.25, 0.375, 0.5, 0.75, 1, 1.5, 2, 2.5, 3, 4, 5, 6, 8, 10}; @@ -634,30 +629,25 @@ class MediaViewImagePanel extends JPanel implements MediaFileViewer.MediaViewPan } /** - * @return supported mime types + * Gets the list of supported MIME types. + * + * @return A list of the supported MIME types as Strings. */ @Override final public List getSupportedMimeTypes() { - return Collections.unmodifiableList(Lists.newArrayList(supportedMimes)); + return Collections.unmodifiableList(Lists.newArrayList(ImageUtils.getSupportedImageMimeTypes())); } /** - * returns supported extensions (each starting with .) + * Returns supported extensions (each starting with .) * - * @return + * @return A unmodifiable list of image extensions as Strings. */ @Override final public List getSupportedExtensions() { - return getExtensions(); - } - - /** - * returns supported extensions (each starting with .) - * - * @return - */ - final public List getExtensions() { - return Collections.unmodifiableList(supportedExtensions); + return ImageUtils.getSupportedImageExtensions().stream() + .map("."::concat) //NOI18N + .collect(Collectors.toList()); } @Override diff --git a/Core/src/org/sleuthkit/autopsy/contentviewers/artifactviewers/ContactArtifactViewer.java b/Core/src/org/sleuthkit/autopsy/contentviewers/artifactviewers/ContactArtifactViewer.java index 2bd3d90875..b17263a26d 100644 --- a/Core/src/org/sleuthkit/autopsy/contentviewers/artifactviewers/ContactArtifactViewer.java +++ b/Core/src/org/sleuthkit/autopsy/contentviewers/artifactviewers/ContactArtifactViewer.java @@ -32,6 +32,7 @@ import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.Optional; import java.util.concurrent.CancellationException; import java.util.concurrent.ExecutionException; import java.util.logging.Level; @@ -630,11 +631,13 @@ public class ContactArtifactViewer extends javax.swing.JPanel implements Artifac // make a list of all unique accounts for this contact if (!account.getAccountType().equals(Account.Type.DEVICE)) { - CentralRepoAccount.CentralRepoAccountType crAccountType = CentralRepository.getInstance().getAccountTypeByName(account.getAccountType().getTypeName()); - CentralRepoAccount crAccount = CentralRepository.getInstance().getAccount(crAccountType, account.getTypeSpecificID()); + Optional optCrAccountType = CentralRepository.getInstance().getAccountTypeByName(account.getAccountType().getTypeName()); + if (optCrAccountType.isPresent()) { + CentralRepoAccount crAccount = CentralRepository.getInstance().getAccount(optCrAccountType.get(), account.getTypeSpecificID()); - if (crAccount != null && uniqueAccountsList.contains(crAccount) == false) { - uniqueAccountsList.add(crAccount); + if (crAccount != null && uniqueAccountsList.contains(crAccount) == false) { + uniqueAccountsList.add(crAccount); + } } } diff --git a/Core/src/org/sleuthkit/autopsy/contentviewers/contextviewer/ContextViewer.java b/Core/src/org/sleuthkit/autopsy/contentviewers/contextviewer/ContextViewer.java index c223f577b0..bfa0cfc3e9 100644 --- a/Core/src/org/sleuthkit/autopsy/contentviewers/contextviewer/ContextViewer.java +++ b/Core/src/org/sleuthkit/autopsy/contentviewers/contextviewer/ContextViewer.java @@ -48,7 +48,7 @@ import org.sleuthkit.datamodel.TskCoreException; * usage, if known. * */ -@ServiceProvider(service = DataContentViewer.class, position = 7) +@ServiceProvider(service = DataContentViewer.class, position = 8) public final class ContextViewer extends javax.swing.JPanel implements DataContentViewer { private static final long serialVersionUID = 1L; diff --git a/Core/src/org/sleuthkit/autopsy/contentviewers/osaccount/Bundle.properties-MERGED b/Core/src/org/sleuthkit/autopsy/contentviewers/osaccount/Bundle.properties-MERGED index 33ac7b64d3..d74307642c 100755 --- a/Core/src/org/sleuthkit/autopsy/contentviewers/osaccount/Bundle.properties-MERGED +++ b/Core/src/org/sleuthkit/autopsy/contentviewers/osaccount/Bundle.properties-MERGED @@ -1,3 +1,4 @@ +OsAccountDataPanel_administrator_title=Administrator OsAccountDataPanel_basic_address=Address OsAccountDataPanel_basic_admin=Administrator OsAccountDataPanel_basic_creationDate=Creation Date @@ -5,11 +6,16 @@ OsAccountDataPanel_basic_fullname=Full Name OsAccountDataPanel_basic_login=Login OsAccountDataPanel_basic_title=Basic Properties OsAccountDataPanel_basic_type=Type +OsAccountDataPanel_data_accessed_title=Last Login +OsAccountDataPanel_host_count_title=Login Count +# {0} - hostName +OsAccountDataPanel_host_section_title={0} Details OsAccountDataPanel_realm_address=Address OsAccountDataPanel_realm_confidence=Confidence OsAccountDataPanel_realm_name=Name +OsAccountDataPanel_realm_scope=Scope OsAccountDataPanel_realm_title=Realm Properties OsAccountDataPanel_realm_unknown=Unknown -OsAccountViewer_title=Os Account -OsAccountViewer_tooltip=Viewer for OS accounts related to the selected node. +OsAccountViewer_title=OS Account +OsAccountViewer_tooltip=Viewer for Operating System accounts related to the selected node. TableDataPanel.titleLabel.text=Title diff --git a/Core/src/org/sleuthkit/autopsy/contentviewers/osaccount/OsAccountDataPanel.java b/Core/src/org/sleuthkit/autopsy/contentviewers/osaccount/OsAccountDataPanel.java old mode 100755 new mode 100644 index e478cc27a3..2c88660cb0 --- a/Core/src/org/sleuthkit/autopsy/contentviewers/osaccount/OsAccountDataPanel.java +++ b/Core/src/org/sleuthkit/autopsy/contentviewers/osaccount/OsAccountDataPanel.java @@ -18,6 +18,7 @@ */ package org.sleuthkit.autopsy.contentviewers.osaccount; +import java.awt.BorderLayout; import java.awt.Font; import java.awt.GridBagConstraints; import java.awt.GridBagLayout; @@ -25,16 +26,30 @@ import java.awt.Insets; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Date; +import java.util.HashMap; import java.util.List; import static java.util.Locale.US; +import java.util.Map; import java.util.Optional; +import java.util.concurrent.ExecutionException; +import java.util.logging.Level; +import java.util.logging.Logger; import javax.swing.Box; import javax.swing.JLabel; import javax.swing.JPanel; +import javax.swing.SwingWorker; import org.openide.util.NbBundle.Messages; +import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.contentviewers.osaccount.SectionData.RowData; +import org.sleuthkit.datamodel.BlackboardAttribute; +import org.sleuthkit.datamodel.DataSource; +import org.sleuthkit.datamodel.Host; import org.sleuthkit.datamodel.OsAccount; +import org.sleuthkit.datamodel.OsAccount.OsAccountAttribute; +import org.sleuthkit.datamodel.OsAccountInstance; +import org.sleuthkit.datamodel.OsAccountManager; import org.sleuthkit.datamodel.OsAccountRealm; +import org.sleuthkit.datamodel.SleuthkitCase; /** * Panel for displaying the properties of an OsAccount. @@ -42,12 +57,15 @@ import org.sleuthkit.datamodel.OsAccountRealm; public class OsAccountDataPanel extends JPanel { private static final long serialVersionUID = 1L; + final private static Logger logger = Logger.getLogger(OsAccountDataPanel.class.getName()); private static final int KEY_COLUMN = 0; private static final int VALUE_COLUMN = 1; - + private static final SimpleDateFormat DATE_FORMAT = new SimpleDateFormat("MMM dd yyyy", US); + private PanelDataFetcher dataFetcher = null; + // Panel constructor. OsAccountDataPanel() { initialize(); @@ -66,21 +84,38 @@ public class OsAccountDataPanel extends JPanel { * @param account OsAccount to display, if null is passed the panel will * appear blank. */ - void setOsAccount(OsAccount account) { + void setOsAccountId(Long osAccountId) { removeAll(); + revalidate(); - if (account != null) { - List data = new ArrayList<>(); - data.add(buildBasicProperties(account)); + if (osAccountId != null) { + setLayout(new BorderLayout()); + add(new JLabel("Loading OsAccount Data..."), BorderLayout.NORTH); - OsAccountRealm realm = account.getRealm(); - if (realm != null) { - data.add(buildRealmProperties(realm)); + if (dataFetcher != null && !dataFetcher.isDone()) { + dataFetcher.cancel(true); } - addDataComponents(data); + dataFetcher = new PanelDataFetcher(osAccountId); + dataFetcher.execute(); } + } + + void setOsAccount(OsAccount account) { + removeAll(); revalidate(); + + if (account != null) { + setLayout(new BorderLayout()); + add(new JLabel("Loading OsAccount Data..."), BorderLayout.NORTH); + + if (dataFetcher != null && !dataFetcher.isDone()) { + dataFetcher.cancel(true); + } + + dataFetcher = new PanelDataFetcher(account); + dataFetcher.execute(); + } } /** @@ -122,8 +157,7 @@ public class OsAccountDataPanel extends JPanel { "OsAccountDataPanel_basic_address=Address", "OsAccountDataPanel_basic_admin=Administrator", "OsAccountDataPanel_basic_type=Type", - "OsAccountDataPanel_basic_creationDate=Creation Date", - }) + "OsAccountDataPanel_basic_creationDate=Creation Date",}) /** * Returns the data for the Basic Properties section of the panel. @@ -146,13 +180,14 @@ public class OsAccountDataPanel extends JPanel { data.addData(Bundle.OsAccountDataPanel_basic_address(), account.getName() == null || account.getName().isEmpty() ? "" : account.getName()); - data.addData(Bundle.OsAccountDataPanel_basic_type(), account.getOsAccountType().getName()); + data.addData(Bundle.OsAccountDataPanel_basic_type(), + account.getOsAccountType().isPresent() ? account.getOsAccountType().get().getName() : ""); + Optional crTime = account.getCreationTime(); - if(crTime.isPresent()) { + if (crTime.isPresent()) { data.addData(Bundle.OsAccountDataPanel_basic_creationDate(), DATE_FORMAT.format(new Date(crTime.get() * 1000))); - } - else { + } else { data.addData(Bundle.OsAccountDataPanel_basic_creationDate(), ""); } @@ -177,23 +212,54 @@ public class OsAccountDataPanel extends JPanel { private SectionData buildRealmProperties(OsAccountRealm realm) { SectionData data = new SectionData(Bundle.OsAccountDataPanel_realm_title()); - Optional optional = realm.getRealmName(); - data.addData(Bundle.OsAccountDataPanel_realm_name(), - optional.isPresent() ? optional.get() : Bundle.OsAccountDataPanel_realm_unknown()); + String realmName = realm.getRealmNames().isEmpty() ? Bundle.OsAccountDataPanel_realm_unknown() : realm.getRealmNames().get(0); + data.addData(Bundle.OsAccountDataPanel_realm_name(), realmName); - optional = realm.getRealmAddr(); + Optional optional = realm.getRealmAddr(); data.addData(Bundle.OsAccountDataPanel_realm_address(), optional.isPresent() ? optional.get() : ""); - + data.addData(Bundle.OsAccountDataPanel_realm_scope(), - realm.getScope().getName()); - + realm.getScope().getName()); + data.addData(Bundle.OsAccountDataPanel_realm_confidence(), realm.getScopeConfidence().getName()); return data; } + @Messages({ + "# {0} - hostName", + "OsAccountDataPanel_host_section_title={0} Details", + "OsAccountDataPanel_host_count_title=Login Count", + "OsAccountDataPanel_data_accessed_title=Last Login", + "OsAccountDataPanel_administrator_title=Administrator" + }) + private SectionData buildHostData(Host host, List attributeList) { + SectionData data = new SectionData(Bundle.OsAccountDataPanel_host_section_title(host.getName())); + for (OsAccountAttribute attribute : attributeList) { + String displayName = attribute.getAttributeType().getDisplayName(); + String value = attribute.getDisplayString(); + + if(attribute.getAttributeType().getTypeID() == BlackboardAttribute.ATTRIBUTE_TYPE.TSK_COUNT.getTypeID()) { + displayName = Bundle.OsAccountDataPanel_host_count_title(); + } else if(attribute.getAttributeType().getTypeID() == BlackboardAttribute.ATTRIBUTE_TYPE.TSK_IS_ADMIN.getTypeID()) { + displayName = Bundle.OsAccountDataPanel_administrator_title(); + if(attribute.getValueInt() == 0) { + value = "False"; + } else { + value = "True"; + } + } else if(attribute.getAttributeType().getTypeID() == BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED.getTypeID()) { + displayName = Bundle.OsAccountDataPanel_data_accessed_title(); + } + + data.addData(displayName, value); + } + + return data; + } + /** * Add a section title to the panel with the given title and location. * @@ -214,7 +280,7 @@ public class OsAccountDataPanel extends JPanel { * @param row The row in the layout. */ private void addPropertyName(String key, int row) { - JLabel label = new JLabel(key); + JLabel label = new JLabel(key + ":"); add(label, getPropertyNameContraints(row)); } @@ -291,4 +357,189 @@ public class OsAccountDataPanel extends JPanel { return constraints; } + + /** + * A SwingWorker to gather the data for the content panel. + */ + private class PanelDataFetcher extends SwingWorker { + + private final Long accountId; + private OsAccount account; + + /** + * Construct a new worker for the given account. + * + * @param account + */ + PanelDataFetcher(Long accountId) { + this.accountId = accountId; + this.account = null; + } + + PanelDataFetcher(OsAccount account) { + this.account = account; + this.accountId = null; + } + + @Override + protected WorkerResults doInBackground() throws Exception { + Map> hostMap = new HashMap<>(); + Map instanceMap = new HashMap<>(); + SleuthkitCase skCase = Case.getCurrentCase().getSleuthkitCase(); + OsAccountManager osAccountManager = skCase.getOsAccountManager(); + + if(account == null) { + account = osAccountManager.getOsAccountByObjectId(accountId); + } + + OsAccountRealm realm = skCase.getOsAccountRealmManager().getRealmByRealmId(account.getRealmId()); + + List hosts = osAccountManager.getHosts(account); + List attributeList = account.getExtendedOsAccountAttributes(); + + if (attributeList != null) { + if (hosts != null) { + // Organize the attributes by hostId + Map> idMap = new HashMap<>(); + for (OsAccountAttribute attribute : attributeList) { + List atList = null; + Optional optionalId = attribute.getHostId(); + Long key = null; + if (optionalId.isPresent()) { + key = optionalId.get(); + } + + atList = idMap.get(key); + + if (atList == null) { + atList = new ArrayList<>(); + idMap.put(key, atList); + } + + atList.add(attribute); + } + + // Add attribute lists to the hostMap + for (Host host : hosts) { + List atList = idMap.get(host.getHostId()); + if (atList != null) { + hostMap.put(host, atList); + } + + } + List atList = idMap.get(null); + if (atList != null) { + hostMap.put(null, atList); + } + + // Store both the host and the dataSource so that we get + // all of the calls to the db done in the thread. + for (OsAccountInstance instance : account.getOsAccountInstances()) { + instanceMap.put(instance.getDataSource().getHost(), instance.getDataSource()); + } + + } else { + hostMap.put(null, attributeList); + } + } + + return new WorkerResults(hostMap, instanceMap, realm); + } + + @Override + protected void done() { + WorkerResults results = null; + + try { + if (this.isCancelled()) { + return; + } else { + results = get(); + } + } catch (ExecutionException | InterruptedException ex) { + logger.log(Level.SEVERE, String.format("Failed to retrieve data for OsAccount (%d)", account.getId()), ex); + } + + if (results != null) { + removeAll(); + setLayout(new GridBagLayout()); + + List data = new ArrayList<>(); + data.add(buildBasicProperties(account)); + Map> hostDataMap = results.getAttributeMap(); + if (hostDataMap != null && !hostDataMap.isEmpty()) { + hostDataMap.forEach((K, V) -> data.add(buildHostData(K, V))); + } + + OsAccountRealm realm = results.getRealm(); + if (realm != null) { + data.add(buildRealmProperties(realm)); + } + +// Removing the instance section for now. Leaving code here for +// future use. +// Map instanceMap = results.getDataSourceMap(); +// if (!instanceMap.isEmpty()) { +// SectionData instanceSection = new SectionData("Instances"); +// instanceMap.forEach((K, V) -> instanceSection.addData(K.getName(), V.getName())); +// +// data.add(instanceSection); +// } + + addDataComponents(data); + + revalidate(); + repaint(); + } + } + } + + /** + * Helper class for PanelDataFetcher that wraps the returned data needed for + * the panel. + */ + private final class WorkerResults { + + private final Map> attributeMap; + private final Map instanceMap; + private final OsAccountRealm realm; + + /** + * Construct a new WorkerResult object. + * + * @param attributeMap Maps the OsAccountAttributes to the host they + * belong with. + * @param instanceMap A map of data to display OsAccount instance + * information. + */ + WorkerResults(Map> attributeMap, Map instanceMap, OsAccountRealm realm) { + this.attributeMap = attributeMap; + this.instanceMap = instanceMap; + this.realm = realm; + } + + /** + * Returns a map of OsAccountAttributes that belong to a specific Host. + * There maybe a null key in the map which represents properties that + * are not host specific. + * + * @return OsAccountAttribute map. + */ + Map> getAttributeMap() { + return attributeMap; + } + + /** + * A map of the instance data for the OsAccount. + * + * @return + */ + Map getDataSourceMap() { + return instanceMap; + } + + OsAccountRealm getRealm() { + return realm; + } + } } diff --git a/Core/src/org/sleuthkit/autopsy/contentviewers/osaccount/OsAccountViewer.form b/Core/src/org/sleuthkit/autopsy/contentviewers/osaccount/OsAccountViewer.form index 910ba27b78..1b6d64b8a1 100755 --- a/Core/src/org/sleuthkit/autopsy/contentviewers/osaccount/OsAccountViewer.form +++ b/Core/src/org/sleuthkit/autopsy/contentviewers/osaccount/OsAccountViewer.form @@ -11,14 +11,20 @@ + - + + + + + + - - + + diff --git a/Core/src/org/sleuthkit/autopsy/contentviewers/osaccount/OsAccountViewer.java b/Core/src/org/sleuthkit/autopsy/contentviewers/osaccount/OsAccountViewer.java index ccedd942bb..2195e4fbdb 100755 --- a/Core/src/org/sleuthkit/autopsy/contentviewers/osaccount/OsAccountViewer.java +++ b/Core/src/org/sleuthkit/autopsy/contentviewers/osaccount/OsAccountViewer.java @@ -18,7 +18,10 @@ */ package org.sleuthkit.autopsy.contentviewers.osaccount; +import java.awt.BorderLayout; import java.awt.Component; +import java.beans.PropertyChangeEvent; +import java.beans.PropertyChangeListener; import java.util.Optional; import java.util.logging.Level; import org.openide.nodes.Node; @@ -34,7 +37,7 @@ import org.sleuthkit.datamodel.TskCoreException; /** * DataContentViewer for OsAccounts. */ -@ServiceProvider(service = DataContentViewer.class, position = 12) +@ServiceProvider(service = DataContentViewer.class, position = 7) public class OsAccountViewer extends javax.swing.JPanel implements DataContentViewer { private static final long serialVersionUID = 1L; @@ -53,26 +56,29 @@ public class OsAccountViewer extends javax.swing.JPanel implements DataContentVi @Override public void setNode(Node node) { - OsAccount osAccount = null; + Long osAccountId = null; try { - osAccount = node.getLookup().lookup(OsAccount.class); - if (osAccount == null) { - Optional optional; - AbstractFile file = node.getLookup().lookup(AbstractFile.class); - if (file != null) { - optional = file.getOsAccount(); - if (optional.isPresent()) { - osAccount = optional.get(); - } + OsAccount osAccount = node.getLookup().lookup(OsAccount.class); + if (osAccount != null) { + dataPanel.setOsAccount(osAccount); + return; + } + + Optional optional; + AbstractFile file = node.getLookup().lookup(AbstractFile.class); + if (file != null) { + optional = file.getOsAccountObjectId(); + if (optional.isPresent()) { + osAccountId = optional.get(); } } - if (osAccount == null) { + if (osAccountId == null) { DataArtifact dataArtifact = node.getLookup().lookup(DataArtifact.class); if (dataArtifact != null) { - Optional optional = dataArtifact.getOsAccount(); + optional = dataArtifact.getOsAccountObjectId(); if (optional.isPresent()) { - osAccount = optional.get(); + osAccountId = optional.get(); } } @@ -81,13 +87,13 @@ public class OsAccountViewer extends javax.swing.JPanel implements DataContentVi logger.log(Level.SEVERE, String.format("Failed to get OsAccount for node %s", node.getDisplayName()), ex); } - if (osAccount != null) { - dataPanel.setOsAccount(osAccount); + if (osAccountId != null) { + dataPanel.setOsAccountId(osAccountId); } } @Messages({ - "OsAccountViewer_title=Os Account" + "OsAccountViewer_title=OS Account" }) @Override public String getTitle() { @@ -95,7 +101,7 @@ public class OsAccountViewer extends javax.swing.JPanel implements DataContentVi } @Messages({ - "OsAccountViewer_tooltip=Viewer for OS accounts related to the selected node." + "OsAccountViewer_tooltip=Viewer for Operating System accounts related to the selected node." }) @Override public String getToolTip() { @@ -125,8 +131,8 @@ public class OsAccountViewer extends javax.swing.JPanel implements DataContentVi try { return osAccount != null - || (file != null && file.getOsAccount().isPresent()) - || (dataArtifact != null && dataArtifact.getOsAccount().isPresent()); + || (file != null && file.getOsAccountObjectId().isPresent()) + || (dataArtifact != null && dataArtifact.getOsAccountObjectId().isPresent()); } catch (TskCoreException ex) { logger.log(Level.SEVERE, String.format("Failed to determine if node %s is Supported for OsAccountViewer", node.getDisplayName()), ex); return false; @@ -150,8 +156,14 @@ public class OsAccountViewer extends javax.swing.JPanel implements DataContentVi mainScrollPane = new javax.swing.JScrollPane(); - setLayout(new java.awt.BorderLayout()); - add(mainScrollPane, java.awt.BorderLayout.CENTER); + setLayout(new java.awt.GridBagLayout()); + + mainScrollPane.setPreferredSize(new java.awt.Dimension(200, 0)); + gridBagConstraints = new java.awt.GridBagConstraints(); + gridBagConstraints.fill = java.awt.GridBagConstraints.BOTH; + gridBagConstraints.weightx = 1.0; + gridBagConstraints.weighty = 1.0; + add(mainScrollPane, gridBagConstraints); }// //GEN-END:initComponents diff --git a/Core/src/org/sleuthkit/autopsy/contentviewers/osaccount/SectionData.java b/Core/src/org/sleuthkit/autopsy/contentviewers/osaccount/SectionData.java index 4674ff0756..afb60251fa 100755 --- a/Core/src/org/sleuthkit/autopsy/contentviewers/osaccount/SectionData.java +++ b/Core/src/org/sleuthkit/autopsy/contentviewers/osaccount/SectionData.java @@ -51,10 +51,10 @@ final class SectionData implements Iterable> } /** - * Add a new property name\property value pair. + * Add a new property name/property value pair. * - * @param key The property display name. - * @param value The property value. + * @param properytName The property display name. + * @param propertyValue The property value. */ void addData(String properytName, String propertyValue) { data.add(new RowData<>(properytName, propertyValue)); diff --git a/Core/src/org/sleuthkit/autopsy/core/Bundle.properties b/Core/src/org/sleuthkit/autopsy/core/Bundle.properties index b106a1a123..0f6042fc60 100644 --- a/Core/src/org/sleuthkit/autopsy/core/Bundle.properties +++ b/Core/src/org/sleuthkit/autopsy/core/Bundle.properties @@ -27,3 +27,7 @@ ServicesMonitor.remoteKeywordSearch.displayName.text=Multi-user keyword search s ServicesMonitor.messaging.displayName.text=Messaging service ServicesMonitor.databaseConnectionInfo.error.msg=Error accessing case database connection info ServicesMonitor.messagingService.connErr.text=Error accessing messaging service connection info +Actions/Case=Case +Menu/Case=Case +Toolbars/Case=Case +Menu/Case/OpenRecentCase=Open Recent Case \ No newline at end of file diff --git a/Core/src/org/sleuthkit/autopsy/core/Bundle.properties-MERGED b/Core/src/org/sleuthkit/autopsy/core/Bundle.properties-MERGED index cd9d1002cd..1d50092e80 100755 --- a/Core/src/org/sleuthkit/autopsy/core/Bundle.properties-MERGED +++ b/Core/src/org/sleuthkit/autopsy/core/Bundle.properties-MERGED @@ -31,3 +31,7 @@ ServicesMonitor.remoteKeywordSearch.displayName.text=Multi-user keyword search s ServicesMonitor.messaging.displayName.text=Messaging service ServicesMonitor.databaseConnectionInfo.error.msg=Error accessing case database connection info ServicesMonitor.messagingService.connErr.text=Error accessing messaging service connection info +Actions/Case=Case +Menu/Case=Case +Toolbars/Case=Case +Menu/Case/OpenRecentCase=Open Recent Case diff --git a/Core/src/org/sleuthkit/autopsy/core/Installer.java b/Core/src/org/sleuthkit/autopsy/core/Installer.java index 0612729b61..5bd649e8cb 100644 --- a/Core/src/org/sleuthkit/autopsy/core/Installer.java +++ b/Core/src/org/sleuthkit/autopsy/core/Installer.java @@ -1,7 +1,7 @@ /* * Autopsy Forensic Browser * - * Copyright 2011-2018 Basis Technology Corp. + * Copyright 2011-2021 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -50,6 +50,7 @@ import org.sleuthkit.autopsy.coreutils.ModuleSettings; import org.sleuthkit.autopsy.coreutils.PlatformUtil; import org.sleuthkit.autopsy.modules.filetypeid.FileTypeDetector; import org.sleuthkit.autopsy.python.JythonModuleLoader; +import org.sleuthkit.autopsy.texttranslation.TextTranslationService; /** * Wrapper over Installers in packages in Core module. This is the main @@ -369,6 +370,7 @@ public class Installer extends ModuleInstall { } logger.log(Level.INFO, "Autopsy Core restore completed"); //NON-NLS preloadJython(); + preloadTranslationServices(); } /** @@ -376,7 +378,7 @@ public class Installer extends ModuleInstall { * because we encountered issues related to file locking when initialization * was performed closer to where the bindings are used. See JIRA-6528. */ - private void initializeSevenZip() { + private static void initializeSevenZip() { try { SevenZip.initSevenZipFromPlatformJAR(); logger.log(Level.INFO, "7zip-java bindings loaded"); //NON-NLS @@ -388,7 +390,7 @@ public class Installer extends ModuleInstall { /** * Runs an initial load of the Jython modules to speed up subsequent loads. */ - private void preloadJython() { + private static void preloadJython() { Runnable loader = () -> { try { JythonModuleLoader.getIngestModuleFactories(); @@ -402,6 +404,22 @@ public class Installer extends ModuleInstall { }; new Thread(loader).start(); } + + /** + * Runs an initial load of the translation services to speed up subsequent loads. + */ + private static void preloadTranslationServices() { + Runnable loader = () -> { + try { + TextTranslationService.getInstance(); + } catch (Exception ex) { + // This is a firewall exception to ensure that any possible exception caused + // by this initial load of the translation modules are caught and logged. + logger.log(Level.SEVERE, "There was an error while doing an initial load of translation services.", ex); + } + }; + new Thread(loader).start(); + } @Override public void validate() throws IllegalStateException { diff --git a/Core/src/org/sleuthkit/autopsy/core/UserPreferences.java b/Core/src/org/sleuthkit/autopsy/core/UserPreferences.java index 537c25c827..85b7b35cbc 100644 --- a/Core/src/org/sleuthkit/autopsy/core/UserPreferences.java +++ b/Core/src/org/sleuthkit/autopsy/core/UserPreferences.java @@ -18,12 +18,14 @@ */ package org.sleuthkit.autopsy.core; +import java.io.File; import java.nio.file.Paths; import org.sleuthkit.autopsy.coreutils.TextConverter; import java.util.prefs.BackingStoreException; import org.sleuthkit.autopsy.events.MessageServiceConnectionInfo; import java.util.prefs.PreferenceChangeListener; import java.util.prefs.Preferences; +import org.apache.commons.lang3.StringUtils; import org.openide.util.NbPreferences; import org.python.icu.util.TimeZone; import org.sleuthkit.autopsy.machinesettings.UserMachinePreferences; @@ -93,7 +95,8 @@ public final class UserPreferences { private static final String GEO_OSM_SERVER_ADDRESS = "GeolocationOsmServerAddress"; private static final String GEO_MBTILES_FILE_PATH = "GeolcoationMBTilesFilePath"; private static final String HEALTH_MONITOR_REPORT_PATH = "HealthMonitorReportPath"; - + private static final String TEMP_FOLDER = "Temp"; + // Prevent instantiation. private UserPreferences() { } @@ -348,27 +351,27 @@ public final class UserPreferences { public static void setIndexingServerPort(int port) { preferences.putInt(SOLR8_SERVER_PORT, port); } - + public static String getSolr4ServerHost() { return preferences.get(SOLR4_SERVER_HOST, ""); } public static void setSolr4ServerHost(String hostName) { preferences.put(SOLR4_SERVER_HOST, hostName); - } - + } + public static String getSolr4ServerPort() { return preferences.get(SOLR4_SERVER_PORT, ""); } public static void setSolr4ServerPort(String port) { preferences.put(SOLR4_SERVER_PORT, port); - } - + } + public static String getZkServerHost() { return preferences.get(ZK_SERVER_HOST, ""); } - + public static void setZkServerHost(String hostName) { preferences.put(ZK_SERVER_HOST, hostName); } @@ -380,7 +383,7 @@ public final class UserPreferences { public static void setZkServerPort(String port) { preferences.put(ZK_SERVER_PORT, port); } - + public static void setTextTranslatorName(String textTranslatorName) { preferences.put(TEXT_TRANSLATOR_NAME, textTranslatorName); } @@ -388,14 +391,14 @@ public final class UserPreferences { public static String getTextTranslatorName() { return preferences.get(TEXT_TRANSLATOR_NAME, null); } - + public static void setUseOcrInTranslation(boolean enableOcr) { preferences.putBoolean(OCR_TRANSLATION_ENABLED, enableOcr); } public static boolean getUseOcrInTranslation() { return preferences.getBoolean(OCR_TRANSLATION_ENABLED, true); - } + } /** * Persists message service connection info. @@ -536,10 +539,11 @@ public final class UserPreferences { } /** - * Get the maximum JVM heap size (in MB) for the embedded Solr server. The returned value - * depends on the platform (64bit vs 32bit). + * Get the maximum JVM heap size (in MB) for the embedded Solr server. The + * returned value depends on the platform (64bit vs 32bit). * - * @return Saved value or default (2 GB for 64bit platforms, 512MB for 32bit) + * @return Saved value or default (2 GB for 64bit platforms, 512MB for + * 32bit) */ public static int getMaxSolrVMSize() { if (PlatformUtil.is64BitJVM()) { @@ -594,20 +598,21 @@ public final class UserPreferences { public static String getExternalHexEditorPath() { return preferences.get(EXTERNAL_HEX_EDITOR_PATH, Paths.get("C:", "Program Files", "HxD", "HxD.exe").toString()); } - + /** * Set the geolocation tile server option. - * - * @param option + * + * @param option */ public static void setGeolocationTileOption(int option) { preferences.putInt(GEO_TILE_OPTION, option); } /** - * Retrieves the Geolocation tile option. If not found, the value will + * Retrieves the Geolocation tile option. If not found, the value will * default to 0. - * @return + * + * @return */ public static int getGeolocationtTileOption() { return preferences.getInt(GEO_TILE_OPTION, 0); @@ -615,8 +620,8 @@ public final class UserPreferences { /** * Sets the path to the OSM tile zip file. - * - * @param absolutePath + * + * @param absolutePath */ public static void setGeolocationOsmZipPath(String absolutePath) { preferences.put(GEO_OSM_TILE_ZIP_PATH, absolutePath); @@ -625,7 +630,7 @@ public final class UserPreferences { /** * Retrieves the path for the OSM tile zip file or returns empty string if * none was found. - * + * * @return Path to zip file */ public static String getGeolocationOsmZipPath() { @@ -633,9 +638,10 @@ public final class UserPreferences { } /** - * Sets the address of geolocation window user defined OSM server data source. - * - * @param address + * Sets the address of geolocation window user defined OSM server data + * source. + * + * @param address */ public static void setGeolocationOsmServerAddress(String address) { preferences.put(GEO_OSM_SERVER_ADDRESS, address); @@ -643,41 +649,72 @@ public final class UserPreferences { /** * Retrieves the address to the OSM server or null if one was not found. - * + * * @return Address of OSM server */ public static String getGeolocationOsmServerAddress() { return preferences.get(GEO_OSM_SERVER_ADDRESS, ""); } - + /** * Sets the path for Geolocation MBTiles data source file. - * - * @param absolutePath + * + * @param absolutePath */ public static void setGeolocationMBTilesFilePath(String absolutePath) { preferences.put(GEO_MBTILES_FILE_PATH, absolutePath); } - + /** * Retrieves the path for the Geolocation MBTiles data source file. - * - * @return Absolute path to MBTiles file or empty string if none was found. + * + * @return Absolute path to MBTiles file or empty string if none was found. */ public static String getGeolocationMBTilesFilePath() { return preferences.get(GEO_MBTILES_FILE_PATH, ""); } - + /** - * Retrieves the root application temp directory. - * + * @return A subdirectory of java.io.tmpdir. + */ + private static File getSystemTempDirFile() { + return Paths.get(System.getProperty("java.io.tmpdir"), getAppName(), TEMP_FOLDER).toFile(); + } + + /** + * Retrieves the application temp directory and ensures the directory + * exists. + * * @return The absolute path to the application temp directory. */ public static String getAppTempDirectory() { - return Paths.get(UserMachinePreferences.getBaseTempDirectory(), getAppName()) - .toAbsolutePath().toString(); + // NOTE: If this code changes, Case.getTempDirectory() should likely be checked + // as well. See JIRA 7505 for more information. + File appTempDir = null; + switch (UserMachinePreferences.getTempDirChoice()) { + case CUSTOM: + String customDirectory = UserMachinePreferences.getCustomTempDirectory(); + appTempDir = (StringUtils.isBlank(customDirectory)) + ? null + : Paths.get(customDirectory, getAppName(), TEMP_FOLDER).toFile(); + break; + case SYSTEM: + default: + // at this level, if the case directory is specified for a temp + // directory, return the system temp directory instead. + appTempDir = getSystemTempDirFile(); + break; + } + + appTempDir = appTempDir == null ? getSystemTempDirFile() : appTempDir; + + if (!appTempDir.exists()) { + appTempDir.mkdirs(); + } + + return appTempDir.getAbsolutePath(); } - + /** * Set the last used health monitor report path. * @@ -690,9 +727,10 @@ public final class UserPreferences { /** * Gets the last used health monitor report path. * - * @return Last used health monitor report path. Empty string if no value has been recorded. + * @return Last used health monitor report path. Empty string if no value + * has been recorded. */ public static String getHealthMonitorReportPath() { return preferences.get(HEALTH_MONITOR_REPORT_PATH, ""); } -} \ No newline at end of file +} diff --git a/Core/src/org/sleuthkit/autopsy/core/layer.xml b/Core/src/org/sleuthkit/autopsy/core/layer.xml index 923e8f5c5e..3a8865f643 100644 --- a/Core/src/org/sleuthkit/autopsy/core/layer.xml +++ b/Core/src/org/sleuthkit/autopsy/core/layer.xml @@ -40,6 +40,7 @@ ====================================================== --> + @@ -140,13 +141,14 @@ + - + + - @@ -248,18 +250,10 @@ - - - - - - - - @@ -281,7 +275,6 @@ - @@ -378,6 +371,7 @@ + - @@ -379,8 +370,9 @@ - - + + + diff --git a/Core/src/org/sleuthkit/autopsy/modules/leappanalyzers/ileap-artifact-attribute-reference.xml b/Core/src/org/sleuthkit/autopsy/modules/leappanalyzers/ileap-artifact-attribute-reference.xml index 5799161685..70725fd0c6 100644 --- a/Core/src/org/sleuthkit/autopsy/modules/leappanalyzers/ileap-artifact-attribute-reference.xml +++ b/Core/src/org/sleuthkit/autopsy/modules/leappanalyzers/ileap-artifact-attribute-reference.xml @@ -50,9 +50,9 @@ - + - + @@ -72,7 +72,7 @@ - + @@ -101,6 +101,7 @@ + @@ -237,8 +238,8 @@ - - + + @@ -252,17 +253,15 @@ - - - - + + + + - - @@ -314,9 +313,9 @@ - - - + + + @@ -357,7 +356,7 @@ - + @@ -377,7 +376,7 @@ - + @@ -495,13 +494,6 @@ - - - - - - - @@ -524,7 +516,7 @@ - + @@ -543,14 +535,12 @@ - + - - @@ -714,13 +704,14 @@ - + - + + - + @@ -741,28 +732,26 @@ - + - + - + - diff --git a/Core/src/org/sleuthkit/autopsy/rejview/Bundle.properties-MERGED b/Core/src/org/sleuthkit/autopsy/rejview/Bundle.properties-MERGED index c633b9838e..84a54f1640 100644 --- a/Core/src/org/sleuthkit/autopsy/rejview/Bundle.properties-MERGED +++ b/Core/src/org/sleuthkit/autopsy/rejview/Bundle.properties-MERGED @@ -15,6 +15,7 @@ RejTreeKeyView.columns.type=Type RejTreeKeyView.columns.value=Value RejTreeKeyView.failedToParse.keyName=FAILED TO PARSE KEY NAME RejTreeKeyView.metadataBorder.title=Metadata +RejTreeKeyView.template.dateTime=Modification Time: RejTreeKeyView.template.name=Name: RejTreeKeyView.template.numberOfSubkeys=Number of subkeys: RejTreeKeyView.template.numberOfValues=Number of values: diff --git a/Core/src/org/sleuthkit/autopsy/rejview/RejTreeKeyView.java b/Core/src/org/sleuthkit/autopsy/rejview/RejTreeKeyView.java index a74d4d26b4..6d2d55ba1a 100644 --- a/Core/src/org/sleuthkit/autopsy/rejview/RejTreeKeyView.java +++ b/Core/src/org/sleuthkit/autopsy/rejview/RejTreeKeyView.java @@ -28,6 +28,8 @@ import java.awt.Dimension; import javax.swing.table.TableColumn; import javax.swing.table.TableColumnModel; import java.io.UnsupportedEncodingException; +import java.text.SimpleDateFormat; +import java.util.Date; import java.util.Iterator; import java.util.logging.Level; import javax.swing.BorderFactory; @@ -54,7 +56,8 @@ public final class RejTreeKeyView extends RejTreeNodeView { "RejTreeKeyView.valuesBorder.title=Values", "RejTreeKeyView.template.name=Name:", "RejTreeKeyView.template.numberOfSubkeys=Number of subkeys:", - "RejTreeKeyView.template.numberOfValues=Number of values:"}) + "RejTreeKeyView.template.numberOfValues=Number of values:", + "RejTreeKeyView.template.dateTime=Modification Time:"}) public RejTreeKeyView(RejTreeKeyNode node) { super(new BorderLayout()); @@ -62,6 +65,7 @@ public final class RejTreeKeyView extends RejTreeNodeView { * param 1 Name * param 2 Number of subkeys * param 3 Number of values + * param 4 Date/time */ String metadataTemplate = "" + Bundle.RejTreeKeyView_template_name() @@ -69,10 +73,13 @@ public final class RejTreeKeyView extends RejTreeNodeView { + Bundle.RejTreeKeyView_template_numberOfSubkeys() + " %2$d
" + Bundle.RejTreeKeyView_template_numberOfValues() - + " %3$d
"; + + " %3$d
" + + Bundle.RejTreeKeyView_template_dateTime() + + " %4$s
"; String keyName; int numSubkeys; int numValues; + String dateTime; try { keyName = node.getKey().getName(); @@ -95,7 +102,12 @@ public final class RejTreeKeyView extends RejTreeNodeView { numValues = -1; } - JLabel metadataLabel = new JLabel(String.format(metadataTemplate, keyName, numSubkeys, numValues), JLabel.LEFT); + Date date = new java.util.Date(node.getKey().getTimestamp().getTimeInMillis()); + SimpleDateFormat sdf = new java.text.SimpleDateFormat("yyyy-MM-dd HH:mm:ss z"); + sdf.setTimeZone(java.util.TimeZone.getTimeZone("GMT+0")); + dateTime = sdf.format(date); + + JLabel metadataLabel = new JLabel(String.format(metadataTemplate, keyName, numSubkeys, numValues, dateTime), JLabel.LEFT); metadataLabel.setBorder(BorderFactory.createTitledBorder(Bundle.RejTreeKeyView_metadataBorder_title())); metadataLabel.setVerticalAlignment(SwingConstants.TOP); diff --git a/Core/src/org/sleuthkit/autopsy/report/ReportBranding.java b/Core/src/org/sleuthkit/autopsy/report/ReportBranding.java index 93b4ee0081..e9ebf4216c 100644 --- a/Core/src/org/sleuthkit/autopsy/report/ReportBranding.java +++ b/Core/src/org/sleuthkit/autopsy/report/ReportBranding.java @@ -2,7 +2,7 @@ * * Autopsy Forensic Browser * - * Copyright 2013-2014 Basis Technology Corp. + * Copyright 2013-2021 Basis Technology Corp. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -21,6 +21,8 @@ package org.sleuthkit.autopsy.report; import org.sleuthkit.autopsy.report.infrastructure.ReportGenerator; import java.io.File; import java.io.IOException; +import java.nio.file.Path; +import java.nio.file.Paths; import java.util.logging.Level; import org.sleuthkit.autopsy.coreutils.Logger; import org.openide.util.NbBundle; @@ -37,7 +39,7 @@ import org.sleuthkit.autopsy.coreutils.PlatformUtil; public final class ReportBranding implements ReportBrandingProviderI { //property names - public static final String AGENCY_LOGO_PATH_PROP = "AgencyLogoPath"; //NON-NLS + private static final String AGENCY_LOGO_PATH_PROP = "AgencyLogoPath"; //NON-NLS private static final String REPORT_TITLE_PROP = "ReportTitle"; //NON-NLS private static final String REPORT_FOOTER_PROP = "ReportFooter"; //NON-NLS //default settings @@ -46,8 +48,9 @@ public final class ReportBranding implements ReportBrandingProviderI { .getMessage(ReportBranding.class, "ReportBranding.defaultReportTitle.text"); private static final String DEFAULT_REPORT_FOOTER = NbBundle .getMessage(ReportBranding.class, "ReportBranding.defaultReportFooter.text"); - private String reportsBrandingDir; //dir with extracted reports branding resources - public static final String MODULE_NAME = ReportBranding.class.getSimpleName(); + private final String reportsBrandingDir; //dir with extracted reports branding resources + private final Path userConfigDir = Paths.get(PlatformUtil.getUserDirectory().getAbsolutePath()); + private static final String MODULE_NAME = ReportBranding.class.getSimpleName(); private static final Logger logger = Logger.getLogger(ReportBranding.class.getName()); // this is static so that it can be set by another object @@ -109,38 +112,73 @@ public final class ReportBranding implements ReportBrandingProviderI { generatorLogoPath = path; } + /** + * Read logo path from preferences file. Reverses the path relativization performed + * in setAgencyLogoPath(). If the stored path starts with either “/” or drive letter, + * it is a full path, and is returned to the caller. Otherwise, append current user + * directory to the saved relative path. See JIRA-7348. + * + * @return Full path to the logo file. + */ @Override public String getAgencyLogoPath() { - String curPath = null; /* * The agency logo code uses these properties to persist changes in the * logo (within the same process). This is different from the generator * logo that uses a static variable. */ - curPath = ModuleSettings.getConfigSetting(MODULE_NAME, AGENCY_LOGO_PATH_PROP); + String curPath = ModuleSettings.getConfigSetting(MODULE_NAME, AGENCY_LOGO_PATH_PROP); + + //if has been set, validate it's correct, if not set, return null - if (curPath != null && new File(curPath).canRead() == false) { - //use default - logger.log(Level.INFO, "Custom report branding for agency logo is not valid: " + curPath); //NON-NLS - curPath = null; + if (curPath != null && !curPath.isEmpty()) { + + // check if the path is an absolute path (starts with either drive letter or "/") + Path driveLetterOrNetwork = Paths.get(curPath).getRoot(); + if (driveLetterOrNetwork != null) { + // absolute path + return curPath; + } + + // Path is a relative path. Reverse path relativization performed in setAgencyLogoPath() + Path absolutePath = userConfigDir.resolve(curPath); + curPath = absolutePath.toString(); + if (new File(curPath).canRead() == false) { + //use default + logger.log(Level.INFO, "Custom report branding for agency logo is not valid: {0}", curPath); //NON-NLS + curPath = null; + } } return curPath; } + /** + * Save logo path. If the path is inside user directory (e.g. + * "C:\Users\USER_NAME\AppData\Roaming\autopsy"), trim that off and save it + * as a relative path (i.e it will not start with a “/” or drive letter). Otherwise, + * full path is saved. See JIRA-7348. + * + * @param fullPath Full path to the logo file. + */ @Override - public void setAgencyLogoPath(String path) { + public void setAgencyLogoPath(String fullPath) { + + Path relativePath = Paths.get(fullPath); + // check if the path is within user directory + if (Paths.get(fullPath).startsWith(userConfigDir)) { + // relativize the path + relativePath = userConfigDir.relativize(relativePath); + } // Use properties to persist the logo to use. - // Should use static variable instead - ModuleSettings.setConfigSetting(MODULE_NAME, AGENCY_LOGO_PATH_PROP, path); + ModuleSettings.setConfigSetting(MODULE_NAME, AGENCY_LOGO_PATH_PROP, relativePath.toString()); } @Override public String getReportTitle() { - String curTitle = null; - curTitle = ModuleSettings.getConfigSetting(MODULE_NAME, REPORT_TITLE_PROP); + String curTitle = ModuleSettings.getConfigSetting(MODULE_NAME, REPORT_TITLE_PROP); if (curTitle == null || curTitle.isEmpty()) { //use default logger.log(Level.INFO, "Using default report branding for report title"); //NON-NLS @@ -158,9 +196,8 @@ public final class ReportBranding implements ReportBrandingProviderI { @Override public String getReportFooter() { - String curFooter = null; - curFooter = ModuleSettings.getConfigSetting(MODULE_NAME, REPORT_FOOTER_PROP); + String curFooter = ModuleSettings.getConfigSetting(MODULE_NAME, REPORT_FOOTER_PROP); if (curFooter == null) { //use default logger.log(Level.INFO, "Using default report branding for report footer"); //NON-NLS diff --git a/Core/src/org/sleuthkit/autopsy/report/modules/html/HTMLReport.java b/Core/src/org/sleuthkit/autopsy/report/modules/html/HTMLReport.java index d1008f110b..aa084d66f3 100644 --- a/Core/src/org/sleuthkit/autopsy/report/modules/html/HTMLReport.java +++ b/Core/src/org/sleuthkit/autopsy/report/modules/html/HTMLReport.java @@ -213,6 +213,7 @@ public class HTMLReport implements TableReportModule { * Copies a suitable icon for the given data type in the output directory * and returns the icon file name to use for the given data type. */ + @SuppressWarnings( "deprecation" ) private String useDataTypeIcon(String dataType) { String iconFilePath; String iconFileName; diff --git a/Core/src/org/sleuthkit/autopsy/report/modules/portablecase/PortableCaseReportModule.java b/Core/src/org/sleuthkit/autopsy/report/modules/portablecase/PortableCaseReportModule.java index 56bb8e9133..f8dbebe224 100644 --- a/Core/src/org/sleuthkit/autopsy/report/modules/portablecase/PortableCaseReportModule.java +++ b/Core/src/org/sleuthkit/autopsy/report/modules/portablecase/PortableCaseReportModule.java @@ -1085,7 +1085,7 @@ public class PortableCaseReportModule implements ReportModule { Host newHost = null; if (content instanceof DataSource) { Host oldHost = ((DataSource)content).getHost(); - newHost = portableSkCase.getHostManager().createHost(oldHost.getName()); + newHost = portableSkCase.getHostManager().newHost(oldHost.getName()); } CaseDbTransaction trans = portableSkCase.beginTransaction(); diff --git a/Core/src/org/sleuthkit/autopsy/report/modules/stix/EvalAccountObj.java b/Core/src/org/sleuthkit/autopsy/report/modules/stix/EvalAccountObj.java index 5d58f10388..e52d28d943 100644 --- a/Core/src/org/sleuthkit/autopsy/report/modules/stix/EvalAccountObj.java +++ b/Core/src/org/sleuthkit/autopsy/report/modules/stix/EvalAccountObj.java @@ -37,14 +37,14 @@ import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; */ class EvalAccountObj extends EvaluatableObject { - private AccountObjectType obj; + private final AccountObjectType obj; - public EvalAccountObj(AccountObjectType a_obj, String a_id, String a_spacing) { + EvalAccountObj(AccountObjectType a_obj, String a_id, String a_spacing) { obj = a_obj; id = a_id; spacing = a_spacing; } - + @SuppressWarnings( "deprecation" ) @Override public synchronized ObservableResult evaluate() { @@ -103,7 +103,7 @@ class EvalAccountObj extends EvaluatableObject { // The assumption here is that there aren't going to be too many network shares, so we // can cycle through all of them. try { - List finalHits = new ArrayList(); + List finalHits = new ArrayList<>(); Case case1 = Case.getCurrentCaseThrows(); SleuthkitCase sleuthkitCase = case1.getSleuthkitCase(); diff --git a/Core/src/org/sleuthkit/autopsy/textextractors/TikaTextExtractor.java b/Core/src/org/sleuthkit/autopsy/textextractors/TikaTextExtractor.java index 07d1d39308..f2e7654908 100644 --- a/Core/src/org/sleuthkit/autopsy/textextractors/TikaTextExtractor.java +++ b/Core/src/org/sleuthkit/autopsy/textextractors/TikaTextExtractor.java @@ -70,12 +70,13 @@ import org.sleuthkit.datamodel.ReadContentInputStream; import org.xml.sax.ContentHandler; import org.xml.sax.SAXException; import org.xml.sax.helpers.DefaultHandler; -import com.google.common.collect.ImmutableMap; +import com.google.common.collect.ImmutableMap; import java.io.InputStreamReader; import java.nio.charset.Charset; import java.util.ArrayList; import org.apache.tika.parser.pdf.PDFParserConfig.OCR_STRATEGY; import org.sleuthkit.autopsy.coreutils.ExecUtil.HybridTerminator; +import org.sleuthkit.datamodel.TskData; /** * Extracts text from Tika supported content. Protects against Tika parser hangs @@ -133,7 +134,7 @@ final class TikaTextExtractor implements TextExtractor { // Used to log to the tika file that is why it uses the java.util.logging.logger class instead of the Autopsy one private static final java.util.logging.Logger TIKA_LOGGER = java.util.logging.Logger.getLogger("Tika"); //NON-NLS private static final Logger AUTOPSY_LOGGER = Logger.getLogger(TikaTextExtractor.class.getName()); - + private static final int LIMITED_OCR_SIZE_MIN = 100 * 1024; private final ThreadFactory tikaThreadFactory = new ThreadFactoryBuilder().setNameFormat("tika-reader-%d").build(); private final ExecutorService executorService = Executors.newSingleThreadExecutor(tikaThreadFactory); @@ -143,6 +144,7 @@ final class TikaTextExtractor implements TextExtractor { private final Content content; private boolean tesseractOCREnabled; + private boolean limitedOCREnabled; private static final String TESSERACT_DIR_NAME = "Tesseract-OCR"; //NON-NLS private static final String TESSERACT_EXECUTABLE = "tesseract.exe"; //NON-NLS private static final File TESSERACT_PATH = locateTesseractExecutable(); @@ -158,7 +160,7 @@ final class TikaTextExtractor implements TextExtractor { .map(mt -> mt.getType() + "/" + mt.getSubtype()) .collect(Collectors.toList()); - public TikaTextExtractor(Content content) { + TikaTextExtractor(Content content) { this.content = content; } @@ -198,7 +200,7 @@ final class TikaTextExtractor implements TextExtractor { // Handle images seperately so the OCR task can be cancelled. // See JIRA-4519 for the need to have cancellation in the UI and ingest. - if (ocrEnabled() && mimeType.toLowerCase().startsWith("image/")) { + if (ocrEnabled() && mimeType.toLowerCase().startsWith("image/") && useOcrOnFile(file)) { InputStream imageOcrStream = performOCR(file); return new InputStreamReader(imageOcrStream, Charset.forName("UTF-8")); } @@ -219,7 +221,7 @@ final class TikaTextExtractor implements TextExtractor { officeParserConfig.setUseSAXDocxExtractor(true); parseContext.set(OfficeParserConfig.class, officeParserConfig); - if (ocrEnabled()) { + if (ocrEnabled() && useOcrOnFile(file)) { // Configure OCR for Tika if it chooses to run OCR // during extraction TesseractOCRConfig ocrConfig = new TesseractOCRConfig(); @@ -256,7 +258,7 @@ final class TikaTextExtractor implements TextExtractor { + "Tika returned empty reader for " + content); } pushbackReader.unread(read); - + //Save the metadata if it has not been fetched already. if (metadataMap == null) { metadataMap = new HashMap<>(); @@ -264,7 +266,7 @@ final class TikaTextExtractor implements TextExtractor { metadataMap.put(mtdtKey, metadata.get(mtdtKey)); } } - + return new ReaderCharSource(pushbackReader).openStream(); } catch (TimeoutException te) { final String msg = NbBundle.getMessage(this.getClass(), @@ -345,6 +347,22 @@ final class TikaTextExtractor implements TextExtractor { } } + /** + * Method to indicate if OCR should be performed on this image file. Checks + * to see if the limited OCR setting is enabled. If it is it will also check + * that one of the limiting factors is true. + * + * @param file The AbstractFile which OCR might be performed on. + * @param boolean The configuration setting which indicates if limited OCR + * is enabled in Keyword Search. + * + * @return True if limited OCR is not enabled or the image is greater than + * 100KB in size or the image is a derived file. + */ + private boolean useOcrOnFile(AbstractFile file) { + return !limitedOCREnabled || file.getSize() > LIMITED_OCR_SIZE_MIN || file.getType() == TskData.TSK_DB_FILES_TYPE_ENUM.DERIVED; + } + /** * Wraps the creation of a TikaReader into a Future so that it can be * cancelled. @@ -356,7 +374,7 @@ final class TikaTextExtractor implements TextExtractor { private final Metadata metadata; private final ParseContext parseContext; - public GetTikaReader(AutoDetectParser parser, InputStream stream, + GetTikaReader(AutoDetectParser parser, InputStream stream, Metadata metadata, ParseContext parseContext) { this.parser = parser; this.stream = stream; @@ -386,7 +404,7 @@ final class TikaTextExtractor implements TextExtractor { * * @throws FileNotFoundException */ - public CleanUpStream(File file) throws FileNotFoundException { + CleanUpStream(File file) throws FileNotFoundException { super(file); this.file = file; } @@ -442,7 +460,7 @@ final class TikaTextExtractor implements TextExtractor { if (metadataMap != null) { return ImmutableMap.copyOf(metadataMap); } - + try { metadataMap = new HashMap<>(); InputStream stream = new ReadContentInputStream(content); @@ -528,7 +546,7 @@ final class TikaTextExtractor implements TextExtractor { * @param context Instance containing config classes */ @Override - public void setExtractionSettings(Lookup context) { + public void setExtractionSettings(Lookup context) { if (context != null) { List terminators = new ArrayList<>(); ImageConfig configInstance = context.lookup(ImageConfig.class); @@ -536,11 +554,13 @@ final class TikaTextExtractor implements TextExtractor { if (Objects.nonNull(configInstance.getOCREnabled())) { this.tesseractOCREnabled = configInstance.getOCREnabled(); } - + if (Objects.nonNull(configInstance.getLimitedOCREnabled())) { + this.limitedOCREnabled = configInstance.getLimitedOCREnabled(); + } if (Objects.nonNull(configInstance.getOCRLanguages())) { this.languagePacks = formatLanguagePacks(configInstance.getOCRLanguages()); } - + terminators.add(configInstance.getOCRTimeoutTerminator()); } @@ -548,8 +568,8 @@ final class TikaTextExtractor implements TextExtractor { if (terminatorInstance != null) { terminators.add(terminatorInstance); } - - if(!terminators.isEmpty()) { + + if (!terminators.isEmpty()) { this.processTerminator = new HybridTerminator(terminators); } } @@ -572,4 +592,4 @@ final class TikaTextExtractor implements TextExtractor { return reader; } } -} \ No newline at end of file +} diff --git a/Core/src/org/sleuthkit/autopsy/textextractors/configs/ImageConfig.java b/Core/src/org/sleuthkit/autopsy/textextractors/configs/ImageConfig.java index a92852101c..6c501a7bdf 100755 --- a/Core/src/org/sleuthkit/autopsy/textextractors/configs/ImageConfig.java +++ b/Core/src/org/sleuthkit/autopsy/textextractors/configs/ImageConfig.java @@ -29,10 +29,11 @@ import org.sleuthkit.autopsy.coreutils.ExecUtil.TimedProcessTerminator; * @see org.openide.util.Lookup */ public class ImageConfig { - + private static final int OCR_TIMEOUT_SECONDS = 30 * 60; private Boolean OCREnabled; + private Boolean limitedOCREnabled; private List ocrLanguages; private final TimedProcessTerminator ocrTimedTerminator = new TimedProcessTerminator(OCR_TIMEOUT_SECONDS); @@ -46,6 +47,16 @@ public class ImageConfig { this.OCREnabled = enabled; } + /** + * Enables the limiting OCR to be run on larger images and images which were + * extracted from documents. + * + * @param enabled Flag indicating if OCR is enabled. + */ + public void setLimitedOCREnabled(boolean enabled) { + this.limitedOCREnabled = enabled; + } + /** * Gets the OCR flag that has been set. By default this flag is turned off. * @@ -57,9 +68,9 @@ public class ImageConfig { /** * Sets languages for OCR. - * + * * See PlatformUtil for list of installed language packs. - * + * * @param languages List of languages to use */ public void setOCRLanguages(List languages) { @@ -68,19 +79,30 @@ public class ImageConfig { /** * Gets the list of languages OCR should perform. - * + * * @return Collection of OCR languages */ public List getOCRLanguages() { return this.ocrLanguages; } - + /** * Returns a ProcessTerminator for timing out the OCR process. - * + * * @return ProcessTerminator instance. */ public ProcessTerminator getOCRTimeoutTerminator() { return ocrTimedTerminator; } + + /** + * Gets the limited OCR flag to indicate if OCR should be limited to larger + * images and images which were extracted from documents. + * + * @return Flag indicating if limited OCR is enabled. True if OCR should be + * limited, false otherwise.. + */ + public boolean getLimitedOCREnabled() { + return limitedOCREnabled; + } } diff --git a/Core/src/org/sleuthkit/autopsy/texttranslation/translators/BingTranslator.java b/Core/src/org/sleuthkit/autopsy/texttranslation/translators/BingTranslator.java index 948aae9217..6524c1835d 100644 --- a/Core/src/org/sleuthkit/autopsy/texttranslation/translators/BingTranslator.java +++ b/Core/src/org/sleuthkit/autopsy/texttranslation/translators/BingTranslator.java @@ -30,6 +30,7 @@ import java.io.IOException; import javax.swing.JPanel; import org.openide.util.NbBundle.Messages; import org.openide.util.lookup.ServiceProvider; +import org.sleuthkit.autopsy.coreutils.ThreadConfined; import org.sleuthkit.autopsy.texttranslation.TextTranslator; import org.sleuthkit.autopsy.texttranslation.TranslationConfigException; import org.sleuthkit.autopsy.texttranslation.TranslationException; @@ -46,7 +47,7 @@ public class BingTranslator implements TextTranslator { //https://docs.microsoft.com/en-us/azure/cognitive-services/translator/language-support private static final String BASE_URL = "https://api.cognitive.microsofttranslator.com/translate?api-version=3.0&to="; private static final int MAX_STRING_LENGTH = 5000; - private final BingTranslatorSettingsPanel settingsPanel; + private BingTranslatorSettingsPanel settingsPanel; private final BingTranslatorSettings settings = new BingTranslatorSettings(); // This sends messages to Microsoft. private final OkHttpClient CLIENT = new OkHttpClient(); @@ -55,11 +56,11 @@ public class BingTranslator implements TextTranslator { * Create a Bing Translator */ public BingTranslator() { - settingsPanel = new BingTranslatorSettingsPanel(settings.getAuthenticationKey(), settings.getTargetLanguageCode()); + } /** - * Get the tranlationurl for the specified language code + * Get the tranlation url for the specified language code * * * @@ -133,7 +134,11 @@ public class BingTranslator implements TextTranslator { } @Override + @ThreadConfined(type = ThreadConfined.ThreadType.AWT) public JPanel getSettingsPanel() { + if(settingsPanel == null) { + settingsPanel = new BingTranslatorSettingsPanel(settings.getAuthenticationKey(), settings.getTargetLanguageCode()); + } return settingsPanel; } diff --git a/Core/src/org/sleuthkit/autopsy/texttranslation/translators/GoogleTranslator.java b/Core/src/org/sleuthkit/autopsy/texttranslation/translators/GoogleTranslator.java index ec74190229..c0820b688c 100644 --- a/Core/src/org/sleuthkit/autopsy/texttranslation/translators/GoogleTranslator.java +++ b/Core/src/org/sleuthkit/autopsy/texttranslation/translators/GoogleTranslator.java @@ -36,6 +36,7 @@ import org.apache.commons.lang3.StringUtils; import org.openide.util.NbBundle.Messages; import org.openide.util.lookup.ServiceProvider; import org.sleuthkit.autopsy.coreutils.EscapeUtil; +import org.sleuthkit.autopsy.coreutils.ThreadConfined; import org.sleuthkit.autopsy.texttranslation.TextTranslator; import org.sleuthkit.autopsy.texttranslation.TranslationConfigException; import org.sleuthkit.autopsy.texttranslation.TranslationException; @@ -50,7 +51,7 @@ public final class GoogleTranslator implements TextTranslator { private static final Logger logger = Logger.getLogger(GoogleTranslator.class.getName()); //See translate method for justification of this limit. private static final int MAX_PAYLOAD_SIZE = 5000; - private final GoogleTranslatorSettingsPanel settingsPanel; + private GoogleTranslatorSettingsPanel settingsPanel; private final GoogleTranslatorSettings settings = new GoogleTranslatorSettings(); private Translate googleTranslate; @@ -59,7 +60,6 @@ public final class GoogleTranslator implements TextTranslator { */ public GoogleTranslator() { // Instantiates a client - settingsPanel = new GoogleTranslatorSettingsPanel(settings.getCredentialPath(), settings.getTargetLanguageCode()); loadTranslator(); } @@ -134,7 +134,11 @@ public final class GoogleTranslator implements TextTranslator { } @Override + @ThreadConfined(type = ThreadConfined.ThreadType.AWT) public JPanel getSettingsPanel() { + if(settingsPanel == null) { + settingsPanel = new GoogleTranslatorSettingsPanel(settings.getCredentialPath(), settings.getTargetLanguageCode()); + } return settingsPanel; } diff --git a/Core/src/org/sleuthkit/autopsy/threadutils/TaskRetryUtil.java b/Core/src/org/sleuthkit/autopsy/threadutils/TaskRetryUtil.java index e00cedab83..79484dddb1 100755 --- a/Core/src/org/sleuthkit/autopsy/threadutils/TaskRetryUtil.java +++ b/Core/src/org/sleuthkit/autopsy/threadutils/TaskRetryUtil.java @@ -1,7 +1,7 @@ /* * Autopsy Forensic Browser * - * Copyright 2020 Basis Technology Corp. + * Copyright 2020-2021 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -163,7 +163,7 @@ public class TaskRetryUtil { * each attempt and an optional timeout for each attempt. If an attempt * times out, that particular attempt task will be cancelled. * - * @tparam T The return type of the task. + * @tparam T The return type of the task. * @param task The task. * @param attempts The defining details for each attempt of the task. * @param executor The scheduled task executor to be used to attempt the @@ -181,6 +181,9 @@ public class TaskRetryUtil { * @throws InterruptedException */ public static T attemptTask(Callable task, List attempts, ScheduledThreadPoolExecutor executor, Terminator terminator, Logger logger, String taskDesc) throws InterruptedException { + /* + * Attempt the task. + */ T result = null; String taskDescForLog = taskDesc != null ? taskDesc : "Task"; int attemptCounter = 0; @@ -195,9 +198,6 @@ public class TaskRetryUtil { break; } TaskAttempt attempt = attempts.get(attemptCounter); - if (logger != null) { - logger.log(Level.INFO, String.format("SCHEDULING '%s' (attempt = %d, delay = %d %s, timeout = %d %s)", taskDescForLog, attemptCounter + 1, attempt.getDelay(), attempt.getTimeUnit(), attempt.getTimeout(), attempt.getTimeUnit())); - } if (attemptCounter > 0) { totalTaskRetries.incrementAndGet(); } @@ -222,11 +222,27 @@ public class TaskRetryUtil { } ++attemptCounter; } + + /* + * If the task required more than one attempt, log it. + */ + if (logger != null && attemptCounter > 1) { + if (result != null) { + logger.log(Level.WARNING, String.format("'%s' succeeded after %d attempts", taskDescForLog, attemptCounter)); + } else { + logger.log(Level.SEVERE, String.format("'%s' failed after %d attempts", taskDescForLog, attemptCounter)); + } + } + + /* + * If the task failed, count it as a failed task. + */ if (result == null) { if (terminator == null || !terminator.stopTaskAttempts()) { totalFailedTasks.incrementAndGet(); } } + return result; } diff --git a/Core/src/org/sleuthkit/autopsy/timeline/EventsModel.java b/Core/src/org/sleuthkit/autopsy/timeline/EventsModel.java index f80eef206b..b85f43bab9 100755 --- a/Core/src/org/sleuthkit/autopsy/timeline/EventsModel.java +++ b/Core/src/org/sleuthkit/autopsy/timeline/EventsModel.java @@ -55,6 +55,7 @@ import org.sleuthkit.autopsy.timeline.events.TagsAddedEvent; import org.sleuthkit.autopsy.timeline.events.TagsDeletedEvent; import org.sleuthkit.autopsy.timeline.ui.filtering.datamodel.FilterState; import org.sleuthkit.autopsy.timeline.ui.filtering.datamodel.RootFilterState; +import org.sleuthkit.autopsy.timeline.ui.filtering.datamodel.SqlFilterState; import org.sleuthkit.autopsy.timeline.utils.CacheLoaderImpl; import org.sleuthkit.autopsy.timeline.utils.FilterUtils; import org.sleuthkit.autopsy.timeline.zooming.EventsModelParams; @@ -222,8 +223,7 @@ public final class EventsModel { * @param rootFilterState A root filter state object. */ synchronized void addDataSourceFilters(RootFilterState rootFilterState) { - DataSourcesFilter dataSourcesFilter = rootFilterState.getDataSourcesFilterState().getFilter(); - datasourceIDsToNamesMap.entrySet().forEach(entry -> dataSourcesFilter.addSubFilter(newDataSourceFilter(entry))); + datasourceIDsToNamesMap.entrySet().forEach(entry -> rootFilterState.getDataSourcesFilterState().addSubFilterState(new SqlFilterState<>(newDataSourceFilter(entry)))); } /** diff --git a/Core/src/org/sleuthkit/autopsy/timeline/TimeLineModule.java b/Core/src/org/sleuthkit/autopsy/timeline/TimeLineModule.java index 6d8e5da23c..1dff37d7b1 100755 --- a/Core/src/org/sleuthkit/autopsy/timeline/TimeLineModule.java +++ b/Core/src/org/sleuthkit/autopsy/timeline/TimeLineModule.java @@ -49,19 +49,19 @@ public class TimeLineModule { } /** - * Get instance of the controller for the current case + * Get instance of the controller for the current case. + * The controller instance is initialized from a case open event. * * @return the controller for the current case. * - * @throws NoCurrentCaseException If there is no case open. * @throws TskCoreException If there was a problem accessing the case * database. * */ - public static TimeLineController getController() throws NoCurrentCaseException, TskCoreException { + public static TimeLineController getController() throws TskCoreException { synchronized (controllerLock) { if (controller == null) { - controller = new TimeLineController(Case.getCurrentCaseThrows()); + throw new TskCoreException("Timeline controller not initialized"); } return controller; } @@ -100,13 +100,22 @@ public class TimeLineModule { } controller = null; } + } else { + // Case is opening - create the controller now + synchronized (controllerLock) { + try { + controller = new TimeLineController(Case.getCurrentCaseThrows()); + } catch (TskCoreException | NoCurrentCaseException ex) { + logger.log(Level.SEVERE, "Error creating Timeline controller", ex); + } + } } } else { try { getController().handleCaseEvent(evt); - } catch (NoCurrentCaseException ignored) { } catch (TskCoreException ex) { - logger.log(Level.SEVERE, "Error handling application event", ex); + // The call to getController() will only fail due to case closing, so do + // not record the error. } } } @@ -121,12 +130,9 @@ public class TimeLineModule { public void propertyChange(PropertyChangeEvent evt) { try { getController().handleIngestModuleEvent(evt); - } catch (NoCurrentCaseException ex) { - // ignore - return; } catch (TskCoreException ex) { - MessageNotifyUtil.Message.error("Error creating timeline controller."); - logger.log(Level.SEVERE, "Error creating timeline controller", ex); + // The call to getController() will only fail due to case closing, so do + // not record the error. } } } diff --git a/Core/src/org/sleuthkit/autopsy/timeline/ui/ViewFrame.java b/Core/src/org/sleuthkit/autopsy/timeline/ui/ViewFrame.java index 39506554ac..a834ccd0f6 100755 --- a/Core/src/org/sleuthkit/autopsy/timeline/ui/ViewFrame.java +++ b/Core/src/org/sleuthkit/autopsy/timeline/ui/ViewFrame.java @@ -588,7 +588,7 @@ final public class ViewFrame extends BorderPane { long startMillis = filteredEvents.getTimeRange().getStartMillis(); long endMillis = filteredEvents.getTimeRange().getEndMillis(); - if (minTime > 0 && maxTime > minTime) { + if ( maxTime > minTime) { Platform.runLater(() -> { startPicker.localDateTimeProperty().removeListener(startListener); endPicker.localDateTimeProperty().removeListener(endListener); diff --git a/Core/src/org/sleuthkit/autopsy/timeline/ui/filtering/datamodel/CompoundFilterState.java b/Core/src/org/sleuthkit/autopsy/timeline/ui/filtering/datamodel/CompoundFilterState.java index 6884d275a8..f7e628f698 100755 --- a/Core/src/org/sleuthkit/autopsy/timeline/ui/filtering/datamodel/CompoundFilterState.java +++ b/Core/src/org/sleuthkit/autopsy/timeline/ui/filtering/datamodel/CompoundFilterState.java @@ -110,7 +110,7 @@ public class CompoundFilterState newSubFilterState) { + public void addSubFilterState(FilterState< ? extends SubFilterType> newSubFilterState) { SubFilterType filter = newSubFilterState.getFilter(); if (getSubFilterStates().stream().map(FilterState::getFilter).noneMatch(filter::equals)) { diff --git a/Core/src/org/sleuthkit/autopsy/timeline/ui/listvew/ListTimeline.fxml b/Core/src/org/sleuthkit/autopsy/timeline/ui/listvew/ListTimeline.fxml index de86817f00..7a2e5168b3 100755 --- a/Core/src/org/sleuthkit/autopsy/timeline/ui/listvew/ListTimeline.fxml +++ b/Core/src/org/sleuthkit/autopsy/timeline/ui/listvew/ListTimeline.fxml @@ -75,15 +75,15 @@ - + - + - + diff --git a/Core/src/org/sleuthkit/autopsy/url/analytics/domaincategorization/AddEditCategoryDialog.form b/Core/src/org/sleuthkit/autopsy/url/analytics/domaincategorization/AddEditCategoryDialog.form new file mode 100644 index 0000000000..a863b48212 --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/url/analytics/domaincategorization/AddEditCategoryDialog.form @@ -0,0 +1,138 @@ + + +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
diff --git a/Core/src/org/sleuthkit/autopsy/url/analytics/domaincategorization/AddEditCategoryDialog.java b/Core/src/org/sleuthkit/autopsy/url/analytics/domaincategorization/AddEditCategoryDialog.java new file mode 100644 index 0000000000..26be388c8e --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/url/analytics/domaincategorization/AddEditCategoryDialog.java @@ -0,0 +1,282 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2021 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.url.analytics.domaincategorization; + +import java.util.Set; +import javax.swing.event.DocumentEvent; +import javax.swing.event.DocumentListener; +import org.openide.util.NbBundle.Messages; +import org.sleuthkit.autopsy.url.analytics.DomainCategory; + +/** + * Dialog for adding or editing a custom domain suffix category. + */ +@Messages({ + "AddEditCategoryDialog_Edit=Edit Entry", + "AddEditCategoryDialog_Add=Add Entry" +}) +class AddEditCategoryDialog extends javax.swing.JDialog { + + private boolean changed = false; + private final Set currentSuffixes; + private final DomainCategory currentDomainCategory; + + // listens for document updates + private final DocumentListener updateListener = new DocumentListener() { + @Override + public void insertUpdate(DocumentEvent e) { + onValueUpdate(domainSuffixTextField.getText(), categoryTextField.getText()); + } + + @Override + public void removeUpdate(DocumentEvent e) { + onValueUpdate(domainSuffixTextField.getText(), categoryTextField.getText()); + } + + @Override + public void changedUpdate(DocumentEvent e) { + onValueUpdate(domainSuffixTextField.getText(), categoryTextField.getText()); + } + }; + + /** + * Main constructor if adding a new domain suffix. + * + * @param parent The parent frame for this dialog. + * @param currentSuffixes The current domain suffixes. + */ + AddEditCategoryDialog(java.awt.Frame parent, Set currentSuffixes) { + this(parent, currentSuffixes, null); + } + + /** + * Main constructor if editing a domain suffix. + * + * @param parentThe parent frame for this dialog. + * @param currentSuffixes The current domain suffixes. + * @param currentDomainCategory The domain category being edited. If null, + * it will be assumed that a new domain suffix is being added. + */ + AddEditCategoryDialog(java.awt.Frame parent, Set currentSuffixes, DomainCategory currentDomainCategory) { + super(parent, true); + initComponents(); + this.currentSuffixes = currentSuffixes; + this.currentDomainCategory = currentDomainCategory; + + // set title based on whether or not we are editing or adding + // also don't allow editing of domain suffix if editing + if (currentDomainCategory == null) { + setTitle(Bundle.AddEditCategoryDialog_Add()); + domainSuffixTextField.setEditable(true); + domainSuffixTextField.setEnabled(true); + onValueUpdate(null, null); + } else { + setTitle(Bundle.AddEditCategoryDialog_Edit()); + domainSuffixTextField.setEditable(false); + domainSuffixTextField.setEnabled(false); + onValueUpdate(currentDomainCategory.getHostSuffix(), currentDomainCategory.getCategory()); + } + + validationLabel.setText(""); + + categoryTextField.getDocument().addDocumentListener(updateListener); + domainSuffixTextField.getDocument().addDocumentListener(updateListener); + } + + /** + * Returns the string value for the name in the input field if Ok pressed or + * null if not. + * + * @return The string value for the name in the input field if Ok pressed or + * null if not. + */ + DomainCategory getValue() { + return new DomainCategory(domainSuffixTextField.getText(), categoryTextField.getText()); + } + + /** + * Returns whether or not the value has been changed and saved by the user. + * + * @return Whether or not the value has been changed and saved by the user. + */ + boolean isChanged() { + return changed; + } + + /** + * When the text field is updated, this method is called. + * + * @param suffixStr The current domain suffix string in the input. + * @param categoryStr The current category string in the input. + */ + @Messages({ + "# {0} - maxSuffixLen", + "AddEditCategoryDialog_onValueUpdate_badSuffix=Please provide a domain suffix that is no more than {0} characters that includes at least one period.", + "# {0} - maxCategoryLen", + "AddEditCategoryDialog_onValueUpdate_badCategory=Please provide a category that is no more than {0} characters.", + "AddEditCategoryDialog_onValueUpdate_suffixRepeat=Please provide a unique domain suffix.", + "AddEditCategoryDialog_onValueUpdate_sameCategory=Please provide a new category for this domain suffix.",}) + void onValueUpdate(String suffixStr, String categoryStr) { + + String safeSuffixStr = suffixStr == null ? "" : suffixStr; + String normalizedSuffix = WebCategoriesDataModel.getNormalizedSuffix(safeSuffixStr); + String safeCategoryStr = categoryStr == null ? "" : categoryStr; + String normalizedCategory = WebCategoriesDataModel.getNormalizedCategory(safeCategoryStr); + + // update input text field if it is not the same. + if (!safeCategoryStr.equals(categoryTextField.getText())) { + categoryTextField.setText(safeCategoryStr); + } + + if (!safeSuffixStr.equals(domainSuffixTextField.getText())) { + domainSuffixTextField.setText(safeSuffixStr); + } + + String validationMessage = null; + if (normalizedSuffix.length() == 0 + || normalizedSuffix.length() > WebCategoriesDataModel.getMaxDomainSuffixLength() + || normalizedSuffix.indexOf('.') < 0) { + + validationMessage = Bundle.AddEditCategoryDialog_onValueUpdate_badSuffix(WebCategoriesDataModel.getMaxCategoryLength()); + + } else if (normalizedCategory.length() == 0 || normalizedCategory.length() > WebCategoriesDataModel.getMaxCategoryLength()) { + validationMessage = Bundle.AddEditCategoryDialog_onValueUpdate_badCategory(WebCategoriesDataModel.getMaxCategoryLength()); + + } else if (currentSuffixes.contains(normalizedSuffix) + && (currentDomainCategory == null + || !normalizedSuffix.equals(currentDomainCategory.getHostSuffix()))) { + + validationMessage = Bundle.AddEditCategoryDialog_onValueUpdate_suffixRepeat(); + + } else if (currentDomainCategory != null + && currentDomainCategory.getCategory() != null + && normalizedCategory.equals(WebCategoriesDataModel.getNormalizedCategory(currentDomainCategory.getCategory()))) { + + validationMessage = Bundle.AddEditCategoryDialog_onValueUpdate_sameCategory(); + } + + saveButton.setEnabled(validationMessage == null); + validationLabel.setText(validationMessage == null ? "" : String.format("%s", validationMessage)); + } + + /** + * This method is called from within the constructor to initialize the form. + * WARNING: Do NOT modify this code. The content of this method is always + * regenerated by the Form Editor. + */ + @SuppressWarnings("unchecked") + // //GEN-BEGIN:initComponents + private void initComponents() { + + categoryTextField = new javax.swing.JTextField(); + domainSuffixTextField = new javax.swing.JTextField(); + javax.swing.JLabel categoryLabel = new javax.swing.JLabel(); + javax.swing.JLabel domainSuffixLabel = new javax.swing.JLabel(); + validationLabel = new javax.swing.JLabel(); + javax.swing.JButton cancelButton = new javax.swing.JButton(); + saveButton = new javax.swing.JButton(); + + setDefaultCloseOperation(javax.swing.WindowConstants.DISPOSE_ON_CLOSE); + + categoryLabel.setText(org.openide.util.NbBundle.getMessage(AddEditCategoryDialog.class, "AddEditCategoryDialog.categoryLabel.text")); // NOI18N + + domainSuffixLabel.setText(org.openide.util.NbBundle.getMessage(AddEditCategoryDialog.class, "AddEditCategoryDialog.domainSuffixLabel.text")); // NOI18N + + validationLabel.setForeground(java.awt.Color.RED); + validationLabel.setText(" "); + validationLabel.setToolTipText(""); + + cancelButton.setText(org.openide.util.NbBundle.getMessage(AddEditCategoryDialog.class, "AddEditCategoryDialog.cancelButton.text")); // NOI18N + cancelButton.addActionListener(new java.awt.event.ActionListener() { + public void actionPerformed(java.awt.event.ActionEvent evt) { + cancelButtonActionPerformed(evt); + } + }); + + saveButton.setText(org.openide.util.NbBundle.getMessage(AddEditCategoryDialog.class, "AddEditCategoryDialog.saveButton.text")); // NOI18N + saveButton.addActionListener(new java.awt.event.ActionListener() { + public void actionPerformed(java.awt.event.ActionEvent evt) { + saveButtonActionPerformed(evt); + } + }); + + javax.swing.GroupLayout layout = new javax.swing.GroupLayout(getContentPane()); + getContentPane().setLayout(layout); + layout.setHorizontalGroup( + layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) + .addGroup(layout.createSequentialGroup() + .addContainerGap() + .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) + .addGroup(javax.swing.GroupLayout.Alignment.TRAILING, layout.createSequentialGroup() + .addGap(0, 0, Short.MAX_VALUE) + .addComponent(saveButton) + .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) + .addComponent(cancelButton)) + .addComponent(validationLabel, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) + .addGroup(layout.createSequentialGroup() + .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) + .addComponent(domainSuffixLabel) + .addComponent(categoryLabel)) + .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED) + .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) + .addComponent(categoryTextField, javax.swing.GroupLayout.DEFAULT_SIZE, 276, Short.MAX_VALUE) + .addComponent(domainSuffixTextField)))) + .addContainerGap()) + ); + layout.setVerticalGroup( + layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) + .addGroup(layout.createSequentialGroup() + .addContainerGap() + .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) + .addComponent(domainSuffixTextField, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) + .addComponent(domainSuffixLabel)) + .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) + .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) + .addComponent(categoryTextField, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) + .addComponent(categoryLabel)) + .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) + .addComponent(validationLabel, javax.swing.GroupLayout.PREFERRED_SIZE, 46, javax.swing.GroupLayout.PREFERRED_SIZE) + .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) + .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) + .addComponent(cancelButton) + .addComponent(saveButton)) + .addContainerGap(8, Short.MAX_VALUE)) + ); + + pack(); + }// //GEN-END:initComponents + + private void saveButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_saveButtonActionPerformed + this.changed = true; + dispose(); + }//GEN-LAST:event_saveButtonActionPerformed + + private void cancelButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_cancelButtonActionPerformed + this.changed = false; + dispose(); + }//GEN-LAST:event_cancelButtonActionPerformed + + + // Variables declaration - do not modify//GEN-BEGIN:variables + private javax.swing.JTextField categoryTextField; + private javax.swing.JTextField domainSuffixTextField; + private javax.swing.JButton saveButton; + private javax.swing.JLabel validationLabel; + // End of variables declaration//GEN-END:variables +} diff --git a/Core/src/org/sleuthkit/autopsy/url/analytics/domaincategorization/Bundle.properties b/Core/src/org/sleuthkit/autopsy/url/analytics/domaincategorization/Bundle.properties new file mode 100644 index 0000000000..f4d258315a --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/url/analytics/domaincategorization/Bundle.properties @@ -0,0 +1,14 @@ +WebCategoryOptionsController_title=Custom Web Categories +WebCategoryOptionsController_keywords=Custom Web Categories +AddEditCategoryDialog.categoryLabel.text=Category: +AddEditCategoryDialog.domainSuffixLabel.text=Domain Suffix: +AddEditCategoryDialog.saveButton.text=Save +AddEditCategoryDialog.cancelButton.text=Cancel +WebCategoriesOptionsPanel.panelDescription.text=This feature allows the Recent Activity ingest module to perform custom categorization of web sites based on domain suffixes. +WebCategoriesOptionsPanel.categoriesTitle.text=Categories: +WebCategoriesOptionsPanel.newEntryButton.text=New Entry +WebCategoriesOptionsPanel.editEntryButton.text=Edit Entry +WebCategoriesOptionsPanel.deleteEntryButton.text=Delete Entry +WebCategoriesOptionsPanel.importSetButton.text=Import Set +WebCategoriesOptionsPanel.exportSetButton.text=Export Set +WebCategoriesOptionsPanel.ingestRunningWarning.text=Ingest is currently running. No editing can take place at this time. diff --git a/Core/src/org/sleuthkit/autopsy/url/analytics/domaincategorization/Bundle.properties-MERGED b/Core/src/org/sleuthkit/autopsy/url/analytics/domaincategorization/Bundle.properties-MERGED new file mode 100644 index 0000000000..f08c1c2986 --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/url/analytics/domaincategorization/Bundle.properties-MERGED @@ -0,0 +1,37 @@ +AddEditCategoryDialog_Add=Add Entry +AddEditCategoryDialog_Edit=Edit Entry +# {0} - maxCategoryLen +AddEditCategoryDialog_onValueUpdate_badCategory=Please provide a category that is no more than {0} characters. +# {0} - maxSuffixLen +AddEditCategoryDialog_onValueUpdate_badSuffix=Please provide a domain suffix that is no more than {0} characters that includes at least one period. +AddEditCategoryDialog_onValueUpdate_sameCategory=Please provide a new category for this domain suffix. +AddEditCategoryDialog_onValueUpdate_suffixRepeat=Please provide a unique domain suffix. +WebCategoriesOptionsPanel_categoryTable_categoryColumnName=Category +WebCategoriesOptionsPanel_categoryTable_suffixColumnName=Domain Suffix +WebCategoriesOptionsPanel_exportSetButtonActionPerformed_defaultFileName=Custom Categories Export +WebCategoriesOptionsPanel_exportSetButtonActionPerformed_duplicateMessage=A file already exists at the selected path. The categories will not be exported. +WebCategoriesOptionsPanel_exportSetButtonActionPerformed_duplicateTitle=File Already Exists +WebCategoriesOptionsPanel_exportSetButtonActionPerformed_errorMessage=There was an error exporting. +WebCategoriesOptionsPanel_exportSetButtonActionPerformed_errorTitle=Export Error +WebCategoriesOptionsPanel_importSetButtonActionPerformed_errorMessage=There was an error importing this json file. +WebCategoriesOptionsPanel_importSetButtonActionPerformed_errorTitle=Import Error +WebCategoriesOptionsPanel_importSetButtonActionPerformed_onConflictCancel=Cancel +# {0} - domainSuffix +WebCategoriesOptionsPanel_importSetButtonActionPerformed_onConflictMessage=Domain suffix {0} already exists. What would you like to do? +WebCategoriesOptionsPanel_importSetButtonActionPerformed_onConflictOverwrite=Overwrite +WebCategoriesOptionsPanel_importSetButtonActionPerformed_onConflictSkip=Skip +WebCategoriesOptionsPanel_importSetButtonActionPerformed_onConflictTitle=Domain Suffix Already Exists +WebCategoryOptionsController_title=Custom Web Categories +WebCategoryOptionsController_keywords=Custom Web Categories +AddEditCategoryDialog.categoryLabel.text=Category: +AddEditCategoryDialog.domainSuffixLabel.text=Domain Suffix: +AddEditCategoryDialog.saveButton.text=Save +AddEditCategoryDialog.cancelButton.text=Cancel +WebCategoriesOptionsPanel.panelDescription.text=This feature allows the Recent Activity ingest module to perform custom categorization of web sites based on domain suffixes. +WebCategoriesOptionsPanel.categoriesTitle.text=Categories: +WebCategoriesOptionsPanel.newEntryButton.text=New Entry +WebCategoriesOptionsPanel.editEntryButton.text=Edit Entry +WebCategoriesOptionsPanel.deleteEntryButton.text=Delete Entry +WebCategoriesOptionsPanel.importSetButton.text=Import Set +WebCategoriesOptionsPanel.exportSetButton.text=Export Set +WebCategoriesOptionsPanel.ingestRunningWarning.text=Ingest is currently running. No editing can take place at this time. diff --git a/Core/src/org/sleuthkit/autopsy/url/analytics/domaincategorization/CustomWebCategorizer.java b/Core/src/org/sleuthkit/autopsy/url/analytics/domaincategorization/CustomWebCategorizer.java new file mode 100644 index 0000000000..26acd97f3a --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/url/analytics/domaincategorization/CustomWebCategorizer.java @@ -0,0 +1,93 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2021 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.url.analytics.domaincategorization; + +import java.sql.SQLException; +import java.util.logging.Level; +import org.apache.commons.lang3.StringUtils; +import org.openide.util.lookup.ServiceProvider; +import org.sleuthkit.autopsy.coreutils.Logger; +import org.sleuthkit.autopsy.url.analytics.DomainCategorizer; +import org.sleuthkit.autopsy.url.analytics.DomainCategorizerException; +import org.sleuthkit.autopsy.url.analytics.DomainCategory; + +/** + * A DomainCategoryProvider for custom web categories. NOTE: If this class + * package or name change, code in DomainCategoryRunner will also need to change + * to reflect the changing class name for ordering purposes. + */ +@ServiceProvider(service = DomainCategorizer.class) +public class CustomWebCategorizer implements DomainCategorizer { + + private static final Logger logger = Logger.getLogger(CustomWebCategorizer.class.getName()); + + private final WebCategoriesDataModel dataModel; + + /** + * Constructor accepting a custom WebCategoriesDataModel. + * + * @param dataModel The WebCategoriesDataModel to use as a data model. + */ + CustomWebCategorizer(WebCategoriesDataModel dataModel) { + this.dataModel = dataModel; + } + + /** + * No parameter constructor that uses the singleton instance of the + * WebCategoriesDataModel. + * + */ + public CustomWebCategorizer() { + this(WebCategoriesDataModel.getInstance()); + } + + @Override + public DomainCategory getCategory(String domain, String host) throws DomainCategorizerException { + if (!dataModel.isInitialized()) { + return null; + } + String hostToUse = (StringUtils.isBlank(host)) ? domain : host; + if (StringUtils.isBlank(hostToUse)) { + return null; + } + + hostToUse = hostToUse.toLowerCase(); + + try { + return dataModel.getMatchingRecord(hostToUse); + } catch (SQLException ex) { + logger.log(Level.WARNING, "There was an error while retrieving data for: " + hostToUse, ex); + return null; + } + } + + @Override + public void initialize() throws DomainCategorizerException { + try { + dataModel.initialize(); + } catch (SQLException ex) { + throw new DomainCategorizerException("Unable to initialize.", ex); + } + } + + @Override + public void close() throws SQLException { + dataModel.close(); + } +} diff --git a/Core/src/org/sleuthkit/autopsy/url/analytics/domaincategorization/WebCategoriesDataModel.java b/Core/src/org/sleuthkit/autopsy/url/analytics/domaincategorization/WebCategoriesDataModel.java new file mode 100644 index 0000000000..bb5c2abbae --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/url/analytics/domaincategorization/WebCategoriesDataModel.java @@ -0,0 +1,543 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2021 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.url.analytics.domaincategorization; + +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonGetter; +import com.fasterxml.jackson.annotation.JsonIgnoreProperties; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.type.TypeReference; +import com.fasterxml.jackson.databind.ObjectMapper; +import java.io.File; +import java.io.IOException; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.sql.Connection; +import java.sql.DriverManager; +import java.sql.PreparedStatement; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.sql.Statement; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.logging.Level; +import java.util.logging.Logger; +import java.util.stream.Collectors; +import java.util.stream.IntStream; +import java.util.stream.Stream; +import org.apache.commons.lang3.StringUtils; +import org.apache.commons.lang3.tuple.Pair; +import org.sleuthkit.autopsy.coreutils.PlatformUtil; +import org.sleuthkit.autopsy.url.analytics.DomainCategory; + +/** + * Provides the data model for exporting, importing and CRUD operations on + * custom web categories. + */ +class WebCategoriesDataModel implements AutoCloseable { + + /** + * DTO to be used with jackson when converting to and from exported content. + */ + @JsonIgnoreProperties(ignoreUnknown = true) + static class CustomCategorizationJsonDto { + + private final String category; + private final List domains; + + /** + * Main constructor. + * + * @param category The category. + * @param domains The list of host suffixes in this category. + */ + @JsonCreator + CustomCategorizationJsonDto( + @JsonProperty("category") String category, + @JsonProperty("domains") List domains) { + this.category = category; + this.domains = domains == null + ? Collections.emptyList() + : new ArrayList<>(domains); + } + + /** + * Returns the category. + * + * @return The category. + */ + @JsonGetter("category") + String getCategory() { + return category; + } + + /** + * Returns the list of domain suffixes in this category. + * + * @return The list of domain suffixes in this category. + */ + @JsonGetter("domains") + List getDomains() { + return domains; + } + } + + private static final int MAX_CAT_SIZE = 300; + private static final int MAX_DOMAIN_SIZE = 255; + + private static final String ROOT_FOLDER = "DomainCategorization"; + private static final String FILE_REL_PATH = "custom_list.db"; + private static final String JDBC_SQLITE_PREFIX = "jdbc:sqlite:"; + private static final String TABLE_NAME = "domain_suffix"; + private static final String SUFFIX_COLUMN = "suffix"; + private static final String CATEGORY_COLUMN = "category"; + + private static final Logger logger = Logger.getLogger(WebCategoriesDataModel.class.getName()); + private static WebCategoriesDataModel instance; + + /** + * Returns the maximum string length of a domain suffix. + * + * @return The maximum string length of a domain suffix. + */ + static int getMaxDomainSuffixLength() { + return MAX_DOMAIN_SIZE; + } + + /** + * Returns the maximum string length of a category. + * + * @return The maximum string length of a category. + */ + static int getMaxCategoryLength() { + return MAX_DOMAIN_SIZE; + } + + /** + * Retrieves the default path for where custom domain categorization exists. + * + * @return The path or null if the path cannot be reconciled. + */ + private static File getDefaultPath() { + String configDir = PlatformUtil.getUserConfigDirectory(); + if (configDir == null || !new File(configDir).exists()) { + logger.log(Level.WARNING, "Unable to find UserConfigDirectory"); + return null; + } + + Path subDirPath = Paths.get(configDir, ROOT_FOLDER); + File subDir = subDirPath.toFile(); + if (!subDir.exists() && !subDir.mkdirs()) { + logger.log(Level.WARNING, "There was an issue creating custom domain config at: {0}", subDirPath.toString()); + } + + return Paths.get(configDir, ROOT_FOLDER, FILE_REL_PATH).toFile(); + } + + /** + * Generates the normalized category string to be inserted into the + * database. + * + * @param category The category. + * @return The normalized string. + */ + static String getNormalizedCategory(String category) { + if (category == null) { + return ""; + } + + String trimmedCategory = category.trim(); + + return trimmedCategory.substring(0, Math.min(trimmedCategory.length(), MAX_CAT_SIZE)); + } + + /** + * Generates the normalized domain suffix string to be inserted into the + * database. + * + * @param domainSuffix The domain suffix. + * @return The normalized string. + */ + static String getNormalizedSuffix(String domainSuffix) { + if (domainSuffix == null) { + return ""; + } + + String sanitized = Stream.of(domainSuffix.split("\\.")) + .map(s -> { + return s + // alphanumeric and hyphen + .replaceAll("[^0-9a-zA-Z\\-]", "") + // no leading or trailing hyphen + .replaceAll("^\\-*(.+?)?\\-*$", "$1"); + }) + .filter(StringUtils::isNotEmpty) + .collect(Collectors.joining(".")); + + return sanitized.substring(0, Math.min(sanitized.length(), MAX_DOMAIN_SIZE)).toLowerCase(); + } + + /** + * Retrieves a singleton instance of this class. + * + * @return The singleton instance of this class. + */ + static WebCategoriesDataModel getInstance() { + if (instance == null) { + instance = new WebCategoriesDataModel(); + } + + return instance; + } + + private final File sqlitePath; + private Connection dbConn = null; + + /** + * Constructor used to create singleton instance. + */ + private WebCategoriesDataModel() { + this(getDefaultPath()); + } + + /** + * Constructor that accepts a variable path for the custom sqlite database + * for custom domain categories. + * + * @param sqlitePath The path. + */ + WebCategoriesDataModel(File sqlitePath) { + this.sqlitePath = sqlitePath; + } + + /** + * Creates a sqlite jdbc connection. + * + * @throws SQLException + */ + synchronized void initialize() throws SQLException { + String url = JDBC_SQLITE_PREFIX + sqlitePath.getAbsolutePath(); + if (this.dbConn != null) { + this.dbConn.close(); + this.dbConn = null; + } + + this.dbConn = DriverManager.getConnection(url); + + // speed up operations by turning off WAL + try (Statement turnOffWal = dbConn.createStatement()) { + turnOffWal.execute("PRAGMA journal_mode=OFF"); + } + + // create table if it doesn't exist + try (Statement createDomainsTable = dbConn.createStatement()) { + createDomainsTable.execute( + " CREATE TABLE IF NOT EXISTS " + TABLE_NAME + " (\n" + + " " + SUFFIX_COLUMN + " VARCHAR(" + MAX_DOMAIN_SIZE + ") PRIMARY KEY,\n" + + " " + CATEGORY_COLUMN + " VARCHAR(" + MAX_CAT_SIZE + ")\n" + + " ) WITHOUT ROWID"); + } + } + + /** + * Returns true if initialized. + * + * @return True if initialized. + */ + synchronized boolean isInitialized() { + return this.dbConn != null; + } + + /** + * Retrieves all domain categories present in json file. + * + * @param jsonInput The json file. + * @return The domain categories. + * @throws IOException + */ + List getJsonEntries(File jsonInput) throws IOException { + if (jsonInput == null) { + logger.log(Level.WARNING, "No valid file provided."); + return Collections.emptyList(); + } + + ObjectMapper mapper = new ObjectMapper(); + List customCategorizations = mapper.readValue(jsonInput, new TypeReference>() { + }); + + Stream categoryStream = (customCategorizations != null) ? customCategorizations.stream() : Stream.empty(); + + return categoryStream + .filter(c -> c != null && c.getCategory() != null && c.getDomains() != null) + .flatMap(c -> c.getDomains().stream() + .map(WebCategoriesDataModel::getNormalizedSuffix) + .filter(StringUtils::isNotBlank) + .map(d -> new DomainCategory(d, getNormalizedCategory(c.getCategory())))) + .collect(Collectors.toList()); + + } + + /** + * Exports current database to a json file. + * + * @param jsonOutput The output file. + * @throws SQLException + * @throws IOException + */ + synchronized void exportToJson(File jsonOutput) throws SQLException, IOException { + if (jsonOutput == null) { + logger.log(Level.WARNING, "Null file provided."); + return; + } + + if (!isInitialized()) { + initialize(); + } + + // retrieve items from the database + List> categoryDomains = new ArrayList<>(); + try (Statement domainSelect = dbConn.createStatement(); + ResultSet resultSet = domainSelect.executeQuery( + "SELECT " + SUFFIX_COLUMN + ", " + CATEGORY_COLUMN + " FROM " + TABLE_NAME + " ORDER BY " + SUFFIX_COLUMN)) { + + while (resultSet.next()) { + categoryDomains.add(Pair.of(resultSet.getString(CATEGORY_COLUMN), resultSet.getString(SUFFIX_COLUMN))); + } + } + + // aggregate data appropriately into CustomCategorizationJsonDto + List categories + = categoryDomains.stream() + .collect(Collectors.toMap( + p -> p.getKey(), + p -> new ArrayList<>(Arrays.asList(p.getValue())), + (p1, p2) -> { + p1.addAll(p2); + return p1; + } + )) + .entrySet().stream() + .map(entry -> new CustomCategorizationJsonDto(entry.getKey(), entry.getValue())) + .collect(Collectors.toList()); + + // write to disk + ObjectMapper mapper = new ObjectMapper(); + mapper.writerWithDefaultPrettyPrinter().writeValue(jsonOutput, categories); + } + + /** + * Delete a record from the database. + * + * @param domainSuffix The domain suffix of the item to delete. + * @return Whether or not the operation actually deleted something. + * @throws SQLException + * @throws IllegalArgumentException + */ + synchronized boolean deleteRecord(String domainSuffix) throws SQLException, IllegalArgumentException { + if (StringUtils.isBlank(domainSuffix)) { + throw new IllegalArgumentException("Expected non-empty domain suffix"); + } + + if (!isInitialized()) { + initialize(); + } + + try (PreparedStatement suffixDelete = dbConn.prepareStatement( + "DELETE FROM " + TABLE_NAME + " WHERE LOWER(" + SUFFIX_COLUMN + ") = LOWER(?)", Statement.RETURN_GENERATED_KEYS);) { + + suffixDelete.setString(1, getNormalizedSuffix(domainSuffix)); + return suffixDelete.executeUpdate() > 0; + } + } + + /** + * Inserts or updates the entry for the given domain suffix. + * + * @param entry The domain suffix and category. + * @return True if successfully inserted/updated. + * @throws SQLException + * @throws IllegalStateException + * @throws IllegalArgumentException + */ + synchronized boolean insertUpdateSuffix(DomainCategory entry) throws SQLException, IllegalStateException, IllegalArgumentException { + if (entry == null || StringUtils.isBlank(getNormalizedCategory(entry.getCategory())) || StringUtils.isBlank(getNormalizedSuffix(entry.getHostSuffix()))) { + throw new IllegalArgumentException("Expected non-empty, valid category and domain suffix."); + } + + if (!isInitialized()) { + initialize(); + } + + try (PreparedStatement insertUpdate = dbConn.prepareStatement( + "INSERT OR REPLACE INTO " + TABLE_NAME + "(" + SUFFIX_COLUMN + ", " + CATEGORY_COLUMN + ") VALUES (?, ?)", + Statement.RETURN_GENERATED_KEYS)) { + + insertUpdate.setString(1, getNormalizedSuffix(entry.getHostSuffix())); + insertUpdate.setString(2, getNormalizedCategory(entry.getCategory())); + return insertUpdate.executeUpdate() > 0; + } + } + + /** + * Return all records in the database. + * + * @return The list of domain suffixes and their categories. + * @throws SQLException + */ + synchronized List getRecords() throws SQLException { + if (!isInitialized()) { + initialize(); + } + + List entries = new ArrayList<>(); + + try (Statement domainSelect = dbConn.createStatement(); + ResultSet resultSet = domainSelect.executeQuery( + "SELECT " + SUFFIX_COLUMN + ", " + CATEGORY_COLUMN + " FROM " + TABLE_NAME + " ORDER BY " + SUFFIX_COLUMN)) { + + while (resultSet.next()) { + entries.add(new DomainCategory( + resultSet.getString(SUFFIX_COLUMN), + resultSet.getString(CATEGORY_COLUMN))); + } + } + return entries; + + } + + private static final String GET_DOMAIN_SUFFIX_QUERY + = "SELECT " + SUFFIX_COLUMN + ", " + CATEGORY_COLUMN + + " FROM " + TABLE_NAME + " WHERE " + SUFFIX_COLUMN + " = ?"; + + /** + * Return the matching domain suffix or null if none found. + * + * @param domainSuffix The domain suffix. + * @return The found entry or null. + * @throws SQLException + */ + synchronized DomainCategory getRecordBySuffix(String domainSuffix) throws SQLException { + if (!isInitialized()) { + initialize(); + } + + try (PreparedStatement domainSelect = dbConn.prepareStatement(GET_DOMAIN_SUFFIX_QUERY)) { + domainSelect.setString(1, domainSuffix); + + try (ResultSet resultSet = domainSelect.executeQuery()) { + if (resultSet.next()) { + return new DomainCategory( + resultSet.getString(SUFFIX_COLUMN), + resultSet.getString(CATEGORY_COLUMN)); + } else { + return null; + } + } + } + } + + // get the suffix and category from the main table and gets the longest matching suffix. + private static final String BASE_QUERY_FMT_STR + = "SELECT " + SUFFIX_COLUMN + ", " + CATEGORY_COLUMN + " FROM " + TABLE_NAME + + " WHERE suffix IN (%s) ORDER BY LENGTH(" + SUFFIX_COLUMN + ") DESC LIMIT 1"; + + /** + * Retrieves the longest matching domain suffix and category matching the + * list of suffixes or null if no item can be found. + * + * @param suffixes The list of suffixes. + * @return The longest matching entry or null if no entry found. + * @throws SQLException + */ + synchronized DomainCategory getLongestSuffixRecord(List suffixes) throws SQLException { + if (suffixes == null) { + return null; + } + + if (!isInitialized()) { + initialize(); + } + + String questionMarks = IntStream.range(0, suffixes.size()) + .mapToObj((num) -> "?") + .collect(Collectors.joining(",")); + + try (PreparedStatement stmt = dbConn.prepareStatement(String.format(BASE_QUERY_FMT_STR, questionMarks))) { + for (int i = 0; i < suffixes.size(); i++) { + stmt.setString(i + 1, suffixes.get(i)); + } + + try (ResultSet resultSet = stmt.executeQuery()) { + if (resultSet.next()) { + String suffix = resultSet.getString(SUFFIX_COLUMN); + String category = resultSet.getString(CATEGORY_COLUMN); + return new DomainCategory(suffix, category); + } + } + } + + return null; + } + + /** + * Retrieves the longest matching domain suffix and category matching the + * list of suffixes or null if no item can be found. + * + * @param host The host name. + * @return The longest matching entry or null if no entry found. + * @throws SQLException + */ + DomainCategory getMatchingRecord(String host) throws SQLException { + return getLongestSuffixRecord(getSuffixes(host)); + } + + /** + * Retrieves all the possible suffixes that could be tracked. For instance, + * if the host was "chatenabled.mail.google.com", the list should be + * ["chatenabled.mail.google.com", "mail.google.com", "google.com", "com"]. + * + * @param host The host. + * @return The possible suffixes. + */ + private List getSuffixes(String host) { + if (host == null) { + return null; + } + + List hostTokens = Arrays.asList(host.split("\\.")); + List hostSegmentations = new ArrayList<>(); + + for (int i = 0; i < hostTokens.size(); i++) { + String searchString = String.join(".", hostTokens.subList(i, hostTokens.size())); + hostSegmentations.add(searchString); + } + + return hostSegmentations; + } + + @Override + public synchronized void close() throws SQLException { + if (dbConn != null) { + dbConn.close(); + dbConn = null; + } + } +} diff --git a/Core/src/org/sleuthkit/autopsy/url/analytics/domaincategorization/WebCategoriesOptionsController.java b/Core/src/org/sleuthkit/autopsy/url/analytics/domaincategorization/WebCategoriesOptionsController.java new file mode 100644 index 0000000000..aa1d6b0956 --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/url/analytics/domaincategorization/WebCategoriesOptionsController.java @@ -0,0 +1,86 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2021 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.url.analytics.domaincategorization; + +import java.beans.PropertyChangeListener; +import javax.swing.JComponent; +import org.netbeans.spi.options.OptionsPanelController; +import org.openide.util.HelpCtx; +import org.openide.util.Lookup; + +/** + * The options panel controller that registers and displays the option panel for + * custom web categories. + */ +@OptionsPanelController.TopLevelRegistration(categoryName = "#WebCategoryOptionsController_title", + iconBase = "org/sleuthkit/autopsy/images/domain-32.png", + position = 21, + keywords = "#WebCategoryOptionsController_keywords", + keywordsCategory = "Custom Web Categories") +public class WebCategoriesOptionsController extends OptionsPanelController { + + private final WebCategoriesDataModel dataModel = WebCategoriesDataModel.getInstance(); + private final WebCategoriesOptionsPanel panel = new WebCategoriesOptionsPanel(dataModel); + + @Override + public void update() { + panel.refresh(); + } + + @Override + public void applyChanges() { + // NO OP since saves happen whenever there is a change. + } + + @Override + public void cancel() { + // NO OP since saves happen whenever there is a change. + } + + @Override + public boolean isValid() { + return true; + } + + @Override + public boolean isChanged() { + return false; + } + + @Override + public JComponent getComponent(Lookup masterLookup) { + return panel; + } + + @Override + public HelpCtx getHelpCtx() { + return null; + } + + @Override + public void addPropertyChangeListener(PropertyChangeListener l) { + // NO OP since saves happen whenever there is a change. + } + + @Override + public void removePropertyChangeListener(PropertyChangeListener l) { + // NO OP since saves happen whenever there is a change. + } + +} diff --git a/Core/src/org/sleuthkit/autopsy/url/analytics/domaincategorization/WebCategoriesOptionsPanel.form b/Core/src/org/sleuthkit/autopsy/url/analytics/domaincategorization/WebCategoriesOptionsPanel.form new file mode 100644 index 0000000000..00dc510d0d --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/url/analytics/domaincategorization/WebCategoriesOptionsPanel.form @@ -0,0 +1,214 @@ + + +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/Core/src/org/sleuthkit/autopsy/url/analytics/domaincategorization/WebCategoriesOptionsPanel.java b/Core/src/org/sleuthkit/autopsy/url/analytics/domaincategorization/WebCategoriesOptionsPanel.java new file mode 100644 index 0000000000..7db16e9d19 --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/url/analytics/domaincategorization/WebCategoriesOptionsPanel.java @@ -0,0 +1,576 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2021 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.url.analytics.domaincategorization; + +import java.awt.Cursor; +import java.beans.PropertyChangeListener; +import java.io.File; +import java.io.IOException; +import java.sql.SQLException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.List; +import java.util.Set; +import java.util.logging.Level; +import java.util.logging.Logger; +import java.util.stream.Collectors; +import javax.swing.JFileChooser; +import javax.swing.JFrame; +import javax.swing.JOptionPane; +import javax.swing.SwingUtilities; +import javax.swing.filechooser.FileNameExtensionFilter; +import org.apache.commons.collections.CollectionUtils; +import org.apache.commons.lang.StringUtils; +import org.openide.util.NbBundle.Messages; +import org.openide.util.WeakListeners; +import org.sleuthkit.autopsy.corecomponents.OptionsPanel; +import org.sleuthkit.autopsy.datasourcesummary.uiutils.ColumnModel; +import org.sleuthkit.autopsy.datasourcesummary.uiutils.DataFetchResult; +import org.sleuthkit.autopsy.datasourcesummary.uiutils.DataFetchResult.ResultType; +import org.sleuthkit.autopsy.datasourcesummary.uiutils.DataFetchWorker; +import org.sleuthkit.autopsy.datasourcesummary.uiutils.DefaultCellModel; +import org.sleuthkit.autopsy.datasourcesummary.uiutils.JTablePanel; +import org.sleuthkit.autopsy.ingest.IngestManager; +import org.sleuthkit.autopsy.ingest.IngestModuleGlobalSettingsPanel; +import org.sleuthkit.autopsy.url.analytics.DomainCategory; + +/** + * The options panel displayed for import, export, and CRUD operations on domain + * categories. + */ +@Messages({ + "WebCategoriesOptionsPanel_categoryTable_suffixColumnName=Domain Suffix", + "WebCategoriesOptionsPanel_categoryTable_categoryColumnName=Category",}) +public class WebCategoriesOptionsPanel extends IngestModuleGlobalSettingsPanel implements OptionsPanel, AutoCloseable { + + private static final Logger logger = Logger.getLogger(WebCategoriesOptionsPanel.class.getName()); + private static final String DEFAULT_EXTENSION = "json"; + private static final FileNameExtensionFilter DB_FILTER = new FileNameExtensionFilter("JSON", DEFAULT_EXTENSION); + + private final JFileChooser fileChooser = new JFileChooser(); + private final WebCategoriesDataModel dataModel; + + private final JTablePanel categoryTable + = JTablePanel.getJTablePanel(Arrays.asList( + new ColumnModel>( + Bundle.WebCategoriesOptionsPanel_categoryTable_suffixColumnName(), + (domCat) -> new DefaultCellModel<>(domCat.getHostSuffix()) + .setTooltip(domCat.getHostSuffix()), + 300 + ), + new ColumnModel<>( + Bundle.WebCategoriesOptionsPanel_categoryTable_categoryColumnName(), + (domCat) -> new DefaultCellModel<>(domCat.getCategory()) + .setTooltip(domCat.getCategory()), + 200 + ) + )).setKeyFunction((domCat) -> domCat.getHostSuffix()); + + private final PropertyChangeListener ingestListener = (evt) -> refreshComponentStates(); + private final PropertyChangeListener weakIngestListener = WeakListeners.propertyChange(ingestListener, this); + private Set domainSuffixes = new HashSet<>(); + private boolean isRefreshing = false; + + /** + * Main constructor. + * + * @param dataModel The data model that interacts with the database. + */ + public WebCategoriesOptionsPanel(WebCategoriesDataModel dataModel) { + initComponents(); + this.dataModel = dataModel; + + fileChooser.addChoosableFileFilter(DB_FILTER); + fileChooser.setFileFilter(DB_FILTER); + categoryTable.setCellListener((evt) -> refreshComponentStates()); + IngestManager.getInstance().addIngestJobEventListener(weakIngestListener); + setDefaultCursor(); + refresh(); + } + + /** + * Returns the items selected in the table or null if no selection. + * + * @return The items selected in the table or null if no selection. + */ + private List getSelected() { + return categoryTable.getSelectedItems(); + } + + /** + * Triggers swing worker to fetch data and show in table. + */ + void refresh() { + isRefreshing = true; + refreshComponentStates(); + categoryTable.showDefaultLoadingMessage(); + new DataFetchWorker>( + (noVal) -> this.dataModel.getRecords(), + (data) -> onRefreshedData(data), + null).execute(); + } + + /** + * When the result of loading the data is returned, this function handles + * updating the GUI. + * + * @param categoriesResult The result of attempting to fetch the data. + */ + private void onRefreshedData(DataFetchResult> categoriesResult) { + categoryTable.showDataFetchResult(categoriesResult); + if (categoriesResult.getResultType() == ResultType.SUCCESS && categoriesResult.getData() != null) { + domainSuffixes = categoriesResult.getData().stream() + .map((dc) -> dc.getHostSuffix()) + .collect(Collectors.toSet()); + } else { + domainSuffixes = new HashSet<>(); + } + isRefreshing = false; + refreshComponentStates(); + } + + /** + * Refreshes the state of the components based on whether or not an item is + * selected as well as whether or not data is loading or ingest is + * happening. + */ + private void refreshComponentStates() { + List selectedItems = getSelected(); + int selectedCount = CollectionUtils.isEmpty(selectedItems) ? 0 : selectedItems.size(); + boolean isIngestRunning = IngestManager.getInstance().isIngestRunning(); + boolean operationsPermitted = !isIngestRunning && !isRefreshing; + + deleteEntryButton.setEnabled(selectedCount > 0 && operationsPermitted); + editEntryButton.setEnabled(selectedCount == 1 && operationsPermitted); + + newEntryButton.setEnabled(operationsPermitted); + exportSetButton.setEnabled(operationsPermitted); + importSetButton.setEnabled(operationsPermitted); + + ingestRunningWarning.setVisible(isIngestRunning); + } + + /** + * Shows the AddEditCategoryDialog to the user and returns the user-inputted + * DomainCategory or null if nothing was saved. + * + * @param original If editing a value, this is the original value being + * edited. If adding a new value, this should be null. + * @return + */ + private DomainCategory getAddEditValue(DomainCategory original) { + JFrame parent = (this.getRootPane() != null && this.getRootPane().getParent() instanceof JFrame) + ? (JFrame) this.getRootPane().getParent() + : null; + + AddEditCategoryDialog addEditDialog = new AddEditCategoryDialog(parent, domainSuffixes, original); + addEditDialog.setResizable(false); + addEditDialog.setLocationRelativeTo(parent); + addEditDialog.setVisible(true); + addEditDialog.toFront(); + + if (addEditDialog.isChanged()) { + return addEditDialog.getValue(); + } else { + return null; + } + } + + /** + * Set cursor to waiting. + */ + private void setWaitingCursor() { + SwingUtilities.invokeLater(() -> this.setCursor(Cursor.getPredefinedCursor(Cursor.WAIT_CURSOR))); + } + + /** + * Set cursor to default. + */ + private void setDefaultCursor() { + SwingUtilities.invokeLater(() -> this.setCursor(Cursor.getPredefinedCursor(Cursor.DEFAULT_CURSOR))); + } + + /** + * An action for updating or altering data in the custom configuration. + */ + private interface UpdateAction { + + /** + * A runnable action to update custom configuration. + * + * @throws IllegalArgumentException + * @throws IOException + * @throws SQLException + */ + void run() throws IllegalArgumentException, IOException, SQLException; + } + + /** + * Runs an action to update the state of the configuration and runs refresh + * when complete. + * + * @param runnable The runnable action. + * @throws IllegalArgumentException + * @throws IOException + * @throws SQLException + */ + private void runUpdateAction(UpdateAction runnable) throws IllegalArgumentException, IOException, SQLException { + setWaitingCursor(); + runnable.run(); + setDefaultCursor(); + refresh(); + } + + /** + * This method is called from within the constructor to initialize the form. + * WARNING: Do NOT modify this code. The content of this method is always + * regenerated by the Form Editor. + */ + @SuppressWarnings("unchecked") + // //GEN-BEGIN:initComponents + private void initComponents() { + java.awt.GridBagConstraints gridBagConstraints; + + javax.swing.JLabel panelDescription = new javax.swing.JLabel(); + javax.swing.JLabel categoriesTitle = new javax.swing.JLabel(); + javax.swing.JPanel categoryTablePanel = categoryTable; + newEntryButton = new javax.swing.JButton(); + editEntryButton = new javax.swing.JButton(); + deleteEntryButton = new javax.swing.JButton(); + importSetButton = new javax.swing.JButton(); + exportSetButton = new javax.swing.JButton(); + javax.swing.JPanel bottomStrut = new javax.swing.JPanel(); + ingestRunningWarning = new javax.swing.JLabel(); + + setLayout(new java.awt.GridBagLayout()); + + panelDescription.setHorizontalAlignment(javax.swing.SwingConstants.CENTER); + panelDescription.setText(org.openide.util.NbBundle.getMessage(WebCategoriesOptionsPanel.class, "WebCategoriesOptionsPanel.panelDescription.text")); // NOI18N + panelDescription.setBorder(javax.swing.BorderFactory.createCompoundBorder(javax.swing.BorderFactory.createEtchedBorder(), javax.swing.BorderFactory.createEmptyBorder(5, 5, 5, 5))); + gridBagConstraints = new java.awt.GridBagConstraints(); + gridBagConstraints.gridwidth = 3; + gridBagConstraints.fill = java.awt.GridBagConstraints.BOTH; + gridBagConstraints.anchor = java.awt.GridBagConstraints.LINE_START; + gridBagConstraints.insets = new java.awt.Insets(10, 10, 10, 0); + add(panelDescription, gridBagConstraints); + + categoriesTitle.setText(org.openide.util.NbBundle.getMessage(WebCategoriesOptionsPanel.class, "WebCategoriesOptionsPanel.categoriesTitle.text")); // NOI18N + gridBagConstraints = new java.awt.GridBagConstraints(); + gridBagConstraints.gridx = 0; + gridBagConstraints.gridy = 1; + gridBagConstraints.gridwidth = 3; + gridBagConstraints.anchor = java.awt.GridBagConstraints.LINE_START; + gridBagConstraints.insets = new java.awt.Insets(0, 10, 0, 0); + add(categoriesTitle, gridBagConstraints); + + categoryTablePanel.setAutoscrolls(true); + categoryTablePanel.setMaximumSize(new java.awt.Dimension(400, 32767)); + categoryTablePanel.setMinimumSize(new java.awt.Dimension(400, 300)); + categoryTablePanel.setPreferredSize(new java.awt.Dimension(400, 600)); + gridBagConstraints = new java.awt.GridBagConstraints(); + gridBagConstraints.gridx = 0; + gridBagConstraints.gridy = 2; + gridBagConstraints.gridwidth = 3; + gridBagConstraints.fill = java.awt.GridBagConstraints.BOTH; + gridBagConstraints.weighty = 1.0; + gridBagConstraints.insets = new java.awt.Insets(2, 10, 10, 0); + add(categoryTablePanel, gridBagConstraints); + + newEntryButton.setIcon(new javax.swing.ImageIcon(getClass().getResource("/org/sleuthkit/autopsy/images/add16.png"))); // NOI18N + newEntryButton.setText(org.openide.util.NbBundle.getMessage(WebCategoriesOptionsPanel.class, "WebCategoriesOptionsPanel.newEntryButton.text")); // NOI18N + newEntryButton.addActionListener(new java.awt.event.ActionListener() { + public void actionPerformed(java.awt.event.ActionEvent evt) { + newEntryButtonActionPerformed(evt); + } + }); + gridBagConstraints = new java.awt.GridBagConstraints(); + gridBagConstraints.gridx = 0; + gridBagConstraints.gridy = 3; + gridBagConstraints.fill = java.awt.GridBagConstraints.BOTH; + gridBagConstraints.anchor = java.awt.GridBagConstraints.LINE_START; + gridBagConstraints.insets = new java.awt.Insets(0, 10, 5, 5); + add(newEntryButton, gridBagConstraints); + + editEntryButton.setIcon(new javax.swing.ImageIcon(getClass().getResource("/org/sleuthkit/autopsy/images/edit16.png"))); // NOI18N + editEntryButton.setText(org.openide.util.NbBundle.getMessage(WebCategoriesOptionsPanel.class, "WebCategoriesOptionsPanel.editEntryButton.text")); // NOI18N + editEntryButton.addActionListener(new java.awt.event.ActionListener() { + public void actionPerformed(java.awt.event.ActionEvent evt) { + editEntryButtonActionPerformed(evt); + } + }); + gridBagConstraints = new java.awt.GridBagConstraints(); + gridBagConstraints.gridx = 1; + gridBagConstraints.gridy = 3; + gridBagConstraints.fill = java.awt.GridBagConstraints.BOTH; + gridBagConstraints.anchor = java.awt.GridBagConstraints.LINE_START; + gridBagConstraints.insets = new java.awt.Insets(0, 0, 5, 5); + add(editEntryButton, gridBagConstraints); + + deleteEntryButton.setIcon(new javax.swing.ImageIcon(getClass().getResource("/org/sleuthkit/autopsy/images/delete16.png"))); // NOI18N + deleteEntryButton.setText(org.openide.util.NbBundle.getMessage(WebCategoriesOptionsPanel.class, "WebCategoriesOptionsPanel.deleteEntryButton.text")); // NOI18N + deleteEntryButton.addActionListener(new java.awt.event.ActionListener() { + public void actionPerformed(java.awt.event.ActionEvent evt) { + deleteEntryButtonActionPerformed(evt); + } + }); + gridBagConstraints = new java.awt.GridBagConstraints(); + gridBagConstraints.gridx = 2; + gridBagConstraints.gridy = 3; + gridBagConstraints.anchor = java.awt.GridBagConstraints.LINE_START; + gridBagConstraints.insets = new java.awt.Insets(0, 0, 5, 5); + add(deleteEntryButton, gridBagConstraints); + + importSetButton.setIcon(new javax.swing.ImageIcon(getClass().getResource("/org/sleuthkit/autopsy/images/import16.png"))); // NOI18N + importSetButton.setText(org.openide.util.NbBundle.getMessage(WebCategoriesOptionsPanel.class, "WebCategoriesOptionsPanel.importSetButton.text")); // NOI18N + importSetButton.addActionListener(new java.awt.event.ActionListener() { + public void actionPerformed(java.awt.event.ActionEvent evt) { + importSetButtonActionPerformed(evt); + } + }); + gridBagConstraints = new java.awt.GridBagConstraints(); + gridBagConstraints.gridx = 0; + gridBagConstraints.gridy = 4; + gridBagConstraints.fill = java.awt.GridBagConstraints.BOTH; + gridBagConstraints.anchor = java.awt.GridBagConstraints.LINE_START; + gridBagConstraints.insets = new java.awt.Insets(0, 10, 5, 5); + add(importSetButton, gridBagConstraints); + + exportSetButton.setIcon(new javax.swing.ImageIcon(getClass().getResource("/org/sleuthkit/autopsy/images/export16.png"))); // NOI18N + exportSetButton.setText(org.openide.util.NbBundle.getMessage(WebCategoriesOptionsPanel.class, "WebCategoriesOptionsPanel.exportSetButton.text")); // NOI18N + exportSetButton.addActionListener(new java.awt.event.ActionListener() { + public void actionPerformed(java.awt.event.ActionEvent evt) { + exportSetButtonActionPerformed(evt); + } + }); + gridBagConstraints = new java.awt.GridBagConstraints(); + gridBagConstraints.gridx = 1; + gridBagConstraints.gridy = 4; + gridBagConstraints.anchor = java.awt.GridBagConstraints.LINE_START; + gridBagConstraints.insets = new java.awt.Insets(0, 0, 5, 5); + add(exportSetButton, gridBagConstraints); + + bottomStrut.setPreferredSize(new java.awt.Dimension(10, 0)); + gridBagConstraints = new java.awt.GridBagConstraints(); + gridBagConstraints.gridx = 3; + gridBagConstraints.gridy = 6; + gridBagConstraints.fill = java.awt.GridBagConstraints.HORIZONTAL; + gridBagConstraints.weightx = 1.0; + add(bottomStrut, gridBagConstraints); + + ingestRunningWarning.setIcon(new javax.swing.ImageIcon(getClass().getResource("/org/sleuthkit/autopsy/images/warning16.png"))); // NOI18N + ingestRunningWarning.setText(org.openide.util.NbBundle.getMessage(WebCategoriesOptionsPanel.class, "WebCategoriesOptionsPanel.ingestRunningWarning.text")); // NOI18N + gridBagConstraints = new java.awt.GridBagConstraints(); + gridBagConstraints.gridx = 0; + gridBagConstraints.gridy = 5; + gridBagConstraints.gridwidth = 3; + gridBagConstraints.anchor = java.awt.GridBagConstraints.WEST; + gridBagConstraints.insets = new java.awt.Insets(10, 10, 10, 10); + add(ingestRunningWarning, gridBagConstraints); + }// //GEN-END:initComponents + + private void deleteEntryButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_deleteEntryButtonActionPerformed + List selectedItems = getSelected(); + if (!CollectionUtils.isEmpty(selectedItems)) { + setWaitingCursor(); + for (DomainCategory selected : selectedItems) { + if (selected != null && selected.getHostSuffix() != null) { + try { + dataModel.deleteRecord(selected.getHostSuffix()); + } catch (IllegalArgumentException | SQLException ex) { + logger.log(Level.WARNING, "There was an error while deleting: " + selected.getHostSuffix(), ex); + } + } + } + setDefaultCursor(); + refresh(); + } + }//GEN-LAST:event_deleteEntryButtonActionPerformed + + private void newEntryButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_newEntryButtonActionPerformed + DomainCategory newCategory = getAddEditValue(null); + if (newCategory != null) { + try { + runUpdateAction(() -> dataModel.insertUpdateSuffix(newCategory)); + } catch (IllegalArgumentException | SQLException | IOException ex) { + setDefaultCursor(); + logger.log(Level.WARNING, "There was an error while adding new record: " + newCategory.getHostSuffix(), ex); + } + } + }//GEN-LAST:event_newEntryButtonActionPerformed + + private void editEntryButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_editEntryButtonActionPerformed + List selectedItems = getSelected(); + if (CollectionUtils.isNotEmpty(selectedItems)) { + DomainCategory selected = selectedItems.get(0); + if (selected != null && selected.getHostSuffix() != null) { + try { + DomainCategory newCategory = getAddEditValue(selected); + if (newCategory != null) { + runUpdateAction(() -> dataModel.insertUpdateSuffix(newCategory)); + } + } catch (IllegalArgumentException | SQLException | IOException ex) { + setDefaultCursor(); + logger.log(Level.WARNING, "There was an error while editing: " + selected.getHostSuffix(), ex); + } + } + } + }//GEN-LAST:event_editEntryButtonActionPerformed + + @Messages({ + "WebCategoriesOptionsPanel_importSetButtonActionPerformed_errorMessage=There was an error importing this json file.", + "WebCategoriesOptionsPanel_importSetButtonActionPerformed_errorTitle=Import Error", + "WebCategoriesOptionsPanel_importSetButtonActionPerformed_onConflictTitle=Domain Suffix Already Exists", + "# {0} - domainSuffix", + "WebCategoriesOptionsPanel_importSetButtonActionPerformed_onConflictMessage=Domain suffix {0} already exists. What would you like to do?", + "WebCategoriesOptionsPanel_importSetButtonActionPerformed_onConflictOverwrite=Overwrite", + "WebCategoriesOptionsPanel_importSetButtonActionPerformed_onConflictSkip=Skip", + "WebCategoriesOptionsPanel_importSetButtonActionPerformed_onConflictCancel=Cancel"}) + private void importSetButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_importSetButtonActionPerformed + fileChooser.setSelectedFile(new File("")); + int result = fileChooser.showOpenDialog(this); + if (result == JFileChooser.APPROVE_OPTION) { + File selectedFile = fileChooser.getSelectedFile(); + if (selectedFile != null && selectedFile.exists()) { + try { + runUpdateAction(() -> { + List categories = dataModel.getJsonEntries(selectedFile); + + for (DomainCategory domcat : categories) { + String normalizedCategory = domcat == null ? "" : WebCategoriesDataModel.getNormalizedCategory(domcat.getCategory()); + String normalizedSuffix = domcat == null ? "" : WebCategoriesDataModel.getNormalizedSuffix(domcat.getHostSuffix()); + + if (StringUtils.isBlank(normalizedCategory) || StringUtils.isBlank(normalizedSuffix)) { + logger.log(Level.WARNING, String.format("Invalid entry [category: %s, domain suffix: %s]", normalizedCategory, normalizedSuffix)); + continue; + } + + DomainCategory currentCategory = dataModel.getRecordBySuffix(normalizedSuffix); + // if a mapping for the domain suffix already exists and the value will change, prompt the user on what to do. + if (currentCategory != null) { + if (normalizedCategory.equalsIgnoreCase(currentCategory.getCategory())) { + // do nothing if import item is same as already present + continue; + } else { + + String[] options = { + Bundle.WebCategoriesOptionsPanel_importSetButtonActionPerformed_onConflictOverwrite(), + Bundle.WebCategoriesOptionsPanel_importSetButtonActionPerformed_onConflictSkip(), + Bundle.WebCategoriesOptionsPanel_importSetButtonActionPerformed_onConflictCancel() + }; + + int optionItem = JOptionPane.showOptionDialog(null, + Bundle.WebCategoriesOptionsPanel_importSetButtonActionPerformed_onConflictMessage(normalizedSuffix), + Bundle.WebCategoriesOptionsPanel_importSetButtonActionPerformed_onConflictTitle(), + JOptionPane.DEFAULT_OPTION, JOptionPane.WARNING_MESSAGE, null, options, options[0]); + + switch (optionItem) { + case 0: + break; + case 1: + continue; + case 2: + return; + } + } + } + + dataModel.insertUpdateSuffix(new DomainCategory(normalizedSuffix, normalizedCategory)); + } + }); + } catch (IllegalArgumentException | SQLException | IOException ex) { + setDefaultCursor(); + JOptionPane.showMessageDialog( + this, + Bundle.WebCategoriesOptionsPanel_importSetButtonActionPerformed_errorMessage(), + Bundle.WebCategoriesOptionsPanel_importSetButtonActionPerformed_errorTitle(), + JOptionPane.ERROR_MESSAGE); + logger.log(Level.WARNING, "There was an error on import.", ex); + } + } + } + }//GEN-LAST:event_importSetButtonActionPerformed + + @Messages({ + "WebCategoriesOptionsPanel_exportSetButtonActionPerformed_duplicateMessage=A file already exists at the selected path. The categories will not be exported.", + "WebCategoriesOptionsPanel_exportSetButtonActionPerformed_duplicateTitle=File Already Exists", + "WebCategoriesOptionsPanel_exportSetButtonActionPerformed_errorMessage=There was an error exporting.", + "WebCategoriesOptionsPanel_exportSetButtonActionPerformed_errorTitle=Export Error", + "WebCategoriesOptionsPanel_exportSetButtonActionPerformed_defaultFileName=Custom Categories Export" + }) + private void exportSetButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_exportSetButtonActionPerformed + fileChooser.setSelectedFile(new File(String.format("%s.json", Bundle.WebCategoriesOptionsPanel_exportSetButtonActionPerformed_defaultFileName()))); + int result = fileChooser.showSaveDialog(this); + if (result == JFileChooser.APPROVE_OPTION) { + File selectedFile = fileChooser.getSelectedFile(); + if (selectedFile != null) { + if (selectedFile.exists()) { + JOptionPane.showMessageDialog( + this, + Bundle.WebCategoriesOptionsPanel_exportSetButtonActionPerformed_duplicateMessage(), + Bundle.WebCategoriesOptionsPanel_exportSetButtonActionPerformed_duplicateTitle(), + JOptionPane.ERROR_MESSAGE); + return; + } + try { + setWaitingCursor(); + dataModel.exportToJson(selectedFile); + setDefaultCursor(); + } catch (SQLException | IOException ex) { + setDefaultCursor(); + JOptionPane.showMessageDialog( + this, + Bundle.WebCategoriesOptionsPanel_importSetButtonActionPerformed_errorMessage(), + Bundle.WebCategoriesOptionsPanel_importSetButtonActionPerformed_errorTitle(), + JOptionPane.ERROR_MESSAGE); + logger.log(Level.WARNING, "There was an error on export.", ex); + } + } + } + }//GEN-LAST:event_exportSetButtonActionPerformed + + + // Variables declaration - do not modify//GEN-BEGIN:variables + private javax.swing.JButton deleteEntryButton; + private javax.swing.JButton editEntryButton; + private javax.swing.JButton exportSetButton; + private javax.swing.JButton importSetButton; + private javax.swing.JLabel ingestRunningWarning; + private javax.swing.JButton newEntryButton; + // End of variables declaration//GEN-END:variables + + @Override + public void saveSettings() { + // NO OP since saves happen whenever there is a change. + } + + @Override + public void store() { + // NO OP since saves happen whenever there is a change. + } + + @Override + public void load() { + refresh(); + } + + @Override + public void close() { + IngestManager.getInstance().removeIngestJobEventListener(weakIngestListener); + } +} diff --git a/Core/test/qa-functional/src/org/sleuthkit/autopsy/centralrepository/datamodel/CentralRepoAccountsTest.java b/Core/test/qa-functional/src/org/sleuthkit/autopsy/centralrepository/datamodel/CentralRepoAccountsTest.java index a87c2959ba..537835bc01 100755 --- a/Core/test/qa-functional/src/org/sleuthkit/autopsy/centralrepository/datamodel/CentralRepoAccountsTest.java +++ b/Core/test/qa-functional/src/org/sleuthkit/autopsy/centralrepository/datamodel/CentralRepoAccountsTest.java @@ -22,6 +22,7 @@ import java.io.IOException; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; +import java.util.Optional; import junit.framework.Assert; import junit.framework.TestCase; import junit.framework.Test; @@ -104,10 +105,11 @@ public class CentralRepoAccountsTest extends TestCase { if(expectedAccountType == Account.Type.DEVICE) continue; try { - CentralRepoAccountType crAccountType = CentralRepository.getInstance() + Optional optCrAccountType = CentralRepository.getInstance() .getAccountTypeByName(expectedAccountType.getTypeName()); + Assert.assertTrue(optCrAccountType.isPresent()); - Account.Type actualAccountType = crAccountType.getAcctType(); + Account.Type actualAccountType = optCrAccountType.get().getAcctType(); Assert.assertEquals(expectedAccountType, actualAccountType); } catch (CentralRepoException ex) { Assert.fail("Didn't expect an exception here. Exception: " + ex); @@ -118,35 +120,34 @@ public class CentralRepoAccountsTest extends TestCase { public void testRejectionOfDeviceAccountType() { try { Account.Type deviceAccount = Account.Type.DEVICE; - CentralRepository.getInstance() + Optional optType = CentralRepository.getInstance() .getAccountTypeByName(deviceAccount.getTypeName()); - Assert.fail("Expected an exception from getAccountTypeByName() when" - + " querying the device account type"); + Assert.assertFalse(optType.isPresent()); } catch (CentralRepoException ex) { - // Pass + Assert.fail("Didn't expect an exception here. Exception: " + ex); } } public void testNonExistentAccountType() { try { - CentralRepository.getInstance() + Optional optType = CentralRepository.getInstance() .getAccountTypeByName("NotARealAccountType"); - Assert.fail("Expected an exception from getAccountTypeByName()" - + " when querying a non-existent account type"); + Assert.assertFalse(optType.isPresent()); } catch (CentralRepoException ex) { - // Pass + Assert.fail("Didn't expect an exception here. Exception: " + ex); } } public void testCreatingAccount() { try { Account.Type facebookAccountType = Account.Type.FACEBOOK; - CentralRepoAccountType expectedAccountType = CentralRepository.getInstance() + Optional optExpectedAccountType = CentralRepository.getInstance() .getAccountTypeByName(facebookAccountType.getTypeName()); + assertTrue(optExpectedAccountType.isPresent()); // Create the account CentralRepository.getInstance() - .getOrCreateAccount(expectedAccountType, "+1 401-231-2552"); + .getOrCreateAccount(optExpectedAccountType.get(), "+1 401-231-2552"); } catch (InvalidAccountIDException | CentralRepoException ex) { Assert.fail("Didn't expect an exception here. Exception: " + ex); } @@ -155,19 +156,20 @@ public class CentralRepoAccountsTest extends TestCase { public void testRetreivingAnAccount() { try { Account.Type facebookAccountType = Account.Type.FACEBOOK; - CentralRepoAccountType expectedAccountType = CentralRepository + Optional optExpectedAccountType = CentralRepository .getInstance() .getAccountTypeByName(facebookAccountType.getTypeName()); + assertTrue(optExpectedAccountType.isPresent()); // Create the account CentralRepository.getInstance() - .getOrCreateAccount(expectedAccountType, "+1 441-231-2552"); + .getOrCreateAccount(optExpectedAccountType.get(), "+1 441-231-2552"); // Retrieve the account CentralRepoAccount actualAccount = CentralRepository.getInstance() - .getOrCreateAccount(expectedAccountType, "+1 441-231-2552"); + .getOrCreateAccount(optExpectedAccountType.get(), "+1 441-231-2552"); - Assert.assertEquals(expectedAccountType, actualAccount.getAccountType()); + Assert.assertEquals(optExpectedAccountType.get(), actualAccount.getAccountType()); Assert.assertEquals("+1 441-231-2552", actualAccount.getIdentifier()); } catch (InvalidAccountIDException | CentralRepoException ex) { Assert.fail("Didn't expect an exception here. Exception: " + ex); diff --git a/Core/test/qa-functional/src/org/sleuthkit/autopsy/centralrepository/datamodel/CentralRepoPersonasTest.java b/Core/test/qa-functional/src/org/sleuthkit/autopsy/centralrepository/datamodel/CentralRepoPersonasTest.java index 199a48b845..47372fcc07 100644 --- a/Core/test/qa-functional/src/org/sleuthkit/autopsy/centralrepository/datamodel/CentralRepoPersonasTest.java +++ b/Core/test/qa-functional/src/org/sleuthkit/autopsy/centralrepository/datamodel/CentralRepoPersonasTest.java @@ -24,6 +24,7 @@ import java.nio.file.Path; import java.nio.file.Paths; import java.time.Instant; import java.util.Collection; +import java.util.Optional; import junit.framework.Assert; import static junit.framework.Assert.assertTrue; import junit.framework.TestCase; @@ -222,27 +223,39 @@ public class CentralRepoPersonasTest extends TestCase { org2 = CentralRepository.getInstance().newOrganization(org2); // get some correltion types for different account types, for later use - phoneAccountType = CentralRepository.getInstance().getAccountTypeByName( Account.Type.PHONE.getTypeName()); + Optional optType = CentralRepository.getInstance().getAccountTypeByName( Account.Type.PHONE.getTypeName()); + assertTrue(optType.isPresent()); + phoneAccountType = optType.get(); phoneInstanceType = CentralRepository.getInstance().getCorrelationTypeById(phoneAccountType.getCorrelationTypeId()); assertTrue("getCorrelationTypeById(PHONE) returned null", phoneInstanceType != null); - emailAccountType = CentralRepository.getInstance().getAccountTypeByName( Account.Type.EMAIL.getTypeName()); + optType = CentralRepository.getInstance().getAccountTypeByName( Account.Type.EMAIL.getTypeName()); + assertTrue(optType.isPresent()); + emailAccountType = optType.get(); emailInstanceType = CentralRepository.getInstance().getCorrelationTypeById(emailAccountType.getCorrelationTypeId()); assertTrue("getCorrelationTypeById(EMAIL) returned null", emailInstanceType != null); - facebookAccountType = CentralRepository.getInstance().getAccountTypeByName( Account.Type.FACEBOOK.getTypeName()); + optType = CentralRepository.getInstance().getAccountTypeByName( Account.Type.FACEBOOK.getTypeName()); + assertTrue(optType.isPresent()); + facebookAccountType = optType.get(); facebookInstanceType = CentralRepository.getInstance().getCorrelationTypeById(facebookAccountType.getCorrelationTypeId()); assertTrue("getCorrelationTypeById(FACEBOOK) returned null", facebookInstanceType != null); - textnowAccountType = CentralRepository.getInstance().getAccountTypeByName( Account.Type.TEXTNOW.getTypeName()); + optType = CentralRepository.getInstance().getAccountTypeByName( Account.Type.TEXTNOW.getTypeName()); + assertTrue(optType.isPresent()); + textnowAccountType = optType.get(); textnowInstanceType = CentralRepository.getInstance().getCorrelationTypeById(textnowAccountType.getCorrelationTypeId()); assertTrue("getCorrelationTypeById(TEXTNOW) returned null", textnowInstanceType != null); - whatsAppAccountType = CentralRepository.getInstance().getAccountTypeByName( Account.Type.WHATSAPP.getTypeName()); + optType = CentralRepository.getInstance().getAccountTypeByName( Account.Type.WHATSAPP.getTypeName()); + assertTrue(optType.isPresent()); + whatsAppAccountType = optType.get(); whatsAppInstanceType = CentralRepository.getInstance().getCorrelationTypeById(whatsAppAccountType.getCorrelationTypeId()); assertTrue("getCorrelationTypeById(WHATSAPP) returned null", whatsAppInstanceType != null); - skypeAccountType = CentralRepository.getInstance().getAccountTypeByName( Account.Type.SKYPE.getTypeName()); + optType = CentralRepository.getInstance().getAccountTypeByName( Account.Type.SKYPE.getTypeName()); + assertTrue(optType.isPresent()); + skypeAccountType = optType.get(); skypeInstanceType = CentralRepository.getInstance().getCorrelationTypeById(skypeAccountType.getCorrelationTypeId()); assertTrue("getCorrelationTypeById(SKYPE) returned null", skypeInstanceType != null); diff --git a/CoreLibs/ivy.xml b/CoreLibs/ivy.xml index 7457154839..303f0a04bb 100644 --- a/CoreLibs/ivy.xml +++ b/CoreLibs/ivy.xml @@ -14,7 +14,7 @@ - + diff --git a/CoreLibs/nbproject/project.properties b/CoreLibs/nbproject/project.properties index 3d8ebb951a..60d2362fd8 100644 --- a/CoreLibs/nbproject/project.properties +++ b/CoreLibs/nbproject/project.properties @@ -42,8 +42,8 @@ file.reference.javassist-3.12.1.GA.jar=release/modules/ext/javassist-3.12.1.GA.j file.reference.jfxtras-common-8.0-r4.jar=release/modules/ext/jfxtras-common-8.0-r4.jar file.reference.jfxtras-controls-8.0-r4.jar=release/modules/ext/jfxtras-controls-8.0-r4.jar file.reference.jfxtras-fxml-8.0-r4.jar=release/modules/ext/jfxtras-fxml-8.0-r4.jar -file.reference.jna-5.7.0.jar=release/modules/ext/jna-5.7.0.jar -file.reference.jna-platform-5.7.0.jar=release/modules/ext/jna-platform-5.7.0.jar +file.reference.jna-5.8.0.jar=release/modules/ext/jna-5.8.0.jar +file.reference.jna-platform-5.8.0.jar=release/modules/ext/jna-platform-5.8.0.jar file.reference.joda-time-2.4.jar=release/modules/ext/joda-time-2.4.jar file.reference.jsr305-1.3.9.jar=release/modules/ext/jsr305-1.3.9.jar file.reference.LGoodDatePicker-10.3.1.jar=release/modules/ext/LGoodDatePicker-10.3.1.jar diff --git a/CoreLibs/nbproject/project.xml b/CoreLibs/nbproject/project.xml index 24219c333d..0d3dac9aa3 100644 --- a/CoreLibs/nbproject/project.xml +++ b/CoreLibs/nbproject/project.xml @@ -923,8 +923,8 @@ release/modules/ext/commons-compress-1.18.jar
- ext/jna-platform-5.7.0.jar - release\modules\ext\jna-platform-5.7.0.jar + ext/jna-platform-5.8.0.jar + release\modules\ext\jna-platform-5.8.0.jar ext/opencv-248.jar @@ -951,8 +951,8 @@ release/modules/ext/imageio-bmp-3.2.jar - ext/jna-5.7.0.jar - release\modules\ext\jna-5.7.0.jar + ext/jna-5.8.0.jar + release\modules\ext\jna-5.8.0.jar ext/commons-lang-2.6.jar diff --git a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AinStatusDashboard.form b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AinStatusDashboard.form index d54cd080a2..1aa6230020 100644 --- a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AinStatusDashboard.form +++ b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AinStatusDashboard.form @@ -11,107 +11,128 @@ + - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + - - - - - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AinStatusDashboard.java b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AinStatusDashboard.java index 4c03ca839f..91419ef269 100644 --- a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AinStatusDashboard.java +++ b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AinStatusDashboard.java @@ -1,7 +1,7 @@ /* * Autopsy Forensic Browser * - * Copyright 2018 Basis Technology Corp. + * Copyright 2018-2021 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -21,6 +21,8 @@ package org.sleuthkit.autopsy.experimental.autoingest; import com.google.common.util.concurrent.ThreadFactoryBuilder; import java.awt.Cursor; import java.awt.EventQueue; +import java.awt.GridBagConstraints; +import java.awt.Insets; import java.util.Observable; import java.util.Observer; import java.util.concurrent.ScheduledThreadPoolExecutor; @@ -34,6 +36,8 @@ import org.sleuthkit.autopsy.healthmonitor.HealthMonitorDashboard; @SuppressWarnings("PMD.SingularField") // UI widgets cause lots of false positives final class AinStatusDashboard extends javax.swing.JPanel implements Observer { + private static final long serialVersionUID = 1L; + private final AutoIngestMonitor autoIngestMonitor; private final AinStatusPanel nodesPanel; private final static String AIN_REFRESH_THREAD_NAME = "AID-refresh-jobs-%d"; @@ -50,9 +54,17 @@ final class AinStatusDashboard extends javax.swing.JPanel implements Observer { scheduledRefreshThreadPoolExecutor = new ScheduledThreadPoolExecutor(1, new ThreadFactoryBuilder().setNameFormat(AIN_REFRESH_THREAD_NAME).build()); autoIngestMonitor = monitor; nodesPanel = new AinStatusPanel(); - nodesPanel.setSize(nodesPanel.getSize()); - nodeStatusScrollPane.add(nodesPanel); - nodeStatusScrollPane.setViewportView(nodesPanel); + GridBagConstraints gridBagConstraints = new java.awt.GridBagConstraints(); + gridBagConstraints.gridx = 0; + gridBagConstraints.gridy = 1; + gridBagConstraints.fill = GridBagConstraints.BOTH; + gridBagConstraints.weightx = 1; + gridBagConstraints.weighty = 1; + gridBagConstraints.anchor = java.awt.GridBagConstraints.NORTHWEST; + gridBagConstraints.insets = new Insets(0, 10, 0, 10); + mainPanel.add(nodesPanel, gridBagConstraints); + + refreshTables(); } @@ -79,20 +91,45 @@ final class AinStatusDashboard extends javax.swing.JPanel implements Observer { @SuppressWarnings("unchecked") // //GEN-BEGIN:initComponents private void initComponents() { + java.awt.GridBagConstraints gridBagConstraints; + javax.swing.JScrollPane mainScrollPane = new javax.swing.JScrollPane(); + mainPanel = new javax.swing.JPanel(); + nodeStatusTableTitle = new javax.swing.JLabel(); + buttonPanel = new javax.swing.JPanel(); refreshButton = new javax.swing.JButton(); clusterMetricsButton = new javax.swing.JButton(); - nodeStatusScrollPane = new javax.swing.JScrollPane(); - nodeStatusTableTitle = new javax.swing.JLabel(); healthMonitorButton = new javax.swing.JButton(); + filler1 = new javax.swing.Box.Filler(new java.awt.Dimension(0, 0), new java.awt.Dimension(0, 0), new java.awt.Dimension(32767, 0)); + + setLayout(new java.awt.BorderLayout()); + + mainPanel.setLayout(new java.awt.GridBagLayout()); + + nodeStatusTableTitle.setFont(nodeStatusTableTitle.getFont().deriveFont(nodeStatusTableTitle.getFont().getSize()+3f)); + org.openide.awt.Mnemonics.setLocalizedText(nodeStatusTableTitle, org.openide.util.NbBundle.getMessage(AinStatusDashboard.class, "AinStatusDashboard.nodeStatusTableTitle.text")); // NOI18N + gridBagConstraints = new java.awt.GridBagConstraints(); + gridBagConstraints.gridx = 0; + gridBagConstraints.gridy = 0; + gridBagConstraints.fill = java.awt.GridBagConstraints.HORIZONTAL; + gridBagConstraints.insets = new java.awt.Insets(10, 10, 5, 10); + mainPanel.add(nodeStatusTableTitle, gridBagConstraints); + + buttonPanel.setLayout(new java.awt.GridBagLayout()); org.openide.awt.Mnemonics.setLocalizedText(refreshButton, org.openide.util.NbBundle.getMessage(AinStatusDashboard.class, "AinStatusDashboard.refreshButton.text")); // NOI18N refreshButton.setToolTipText(org.openide.util.NbBundle.getMessage(AinStatusDashboard.class, "AinStatusDashboard.refreshButton.toolTipText")); // NOI18N + refreshButton.setPreferredSize(new java.awt.Dimension(133, 23)); refreshButton.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { refreshButtonActionPerformed(evt); } }); + gridBagConstraints = new java.awt.GridBagConstraints(); + gridBagConstraints.gridx = 0; + gridBagConstraints.gridy = 0; + gridBagConstraints.anchor = java.awt.GridBagConstraints.NORTHWEST; + buttonPanel.add(refreshButton, gridBagConstraints); org.openide.awt.Mnemonics.setLocalizedText(clusterMetricsButton, org.openide.util.NbBundle.getMessage(AinStatusDashboard.class, "AinStatusDashboard.clusterMetricsButton.text")); // NOI18N clusterMetricsButton.addActionListener(new java.awt.event.ActionListener() { @@ -100,56 +137,42 @@ final class AinStatusDashboard extends javax.swing.JPanel implements Observer { clusterMetricsButtonActionPerformed(evt); } }); - - nodeStatusTableTitle.setFont(nodeStatusTableTitle.getFont().deriveFont(nodeStatusTableTitle.getFont().getSize()+3f)); - org.openide.awt.Mnemonics.setLocalizedText(nodeStatusTableTitle, org.openide.util.NbBundle.getMessage(AinStatusDashboard.class, "AinStatusDashboard.nodeStatusTableTitle.text")); // NOI18N + gridBagConstraints = new java.awt.GridBagConstraints(); + gridBagConstraints.gridx = 3; + gridBagConstraints.gridy = 0; + gridBagConstraints.anchor = java.awt.GridBagConstraints.NORTHWEST; + buttonPanel.add(clusterMetricsButton, gridBagConstraints); org.openide.awt.Mnemonics.setLocalizedText(healthMonitorButton, org.openide.util.NbBundle.getMessage(AinStatusDashboard.class, "AinStatusDashboard.healthMonitorButton.text")); // NOI18N - healthMonitorButton.setMaximumSize(new java.awt.Dimension(133, 23)); - healthMonitorButton.setMinimumSize(new java.awt.Dimension(133, 23)); healthMonitorButton.setPreferredSize(new java.awt.Dimension(133, 23)); healthMonitorButton.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { healthMonitorButtonActionPerformed(evt); } }); + gridBagConstraints = new java.awt.GridBagConstraints(); + gridBagConstraints.gridx = 2; + gridBagConstraints.gridy = 0; + gridBagConstraints.anchor = java.awt.GridBagConstraints.NORTHWEST; + buttonPanel.add(healthMonitorButton, gridBagConstraints); + gridBagConstraints = new java.awt.GridBagConstraints(); + gridBagConstraints.gridx = 1; + gridBagConstraints.gridy = 0; + gridBagConstraints.fill = java.awt.GridBagConstraints.HORIZONTAL; + gridBagConstraints.weightx = 1.0; + buttonPanel.add(filler1, gridBagConstraints); - javax.swing.GroupLayout layout = new javax.swing.GroupLayout(this); - this.setLayout(layout); - layout.setHorizontalGroup( - layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) - .addGroup(layout.createSequentialGroup() - .addContainerGap() - .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) - .addComponent(nodeStatusScrollPane) - .addGroup(layout.createSequentialGroup() - .addComponent(nodeStatusTableTitle) - .addGap(0, 0, Short.MAX_VALUE)) - .addGroup(layout.createSequentialGroup() - .addComponent(refreshButton, javax.swing.GroupLayout.PREFERRED_SIZE, 100, javax.swing.GroupLayout.PREFERRED_SIZE) - .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED, 576, Short.MAX_VALUE) - .addComponent(healthMonitorButton, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) - .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) - .addComponent(clusterMetricsButton))) - .addContainerGap()) - ); + gridBagConstraints = new java.awt.GridBagConstraints(); + gridBagConstraints.gridx = 0; + gridBagConstraints.gridy = 2; + gridBagConstraints.fill = java.awt.GridBagConstraints.HORIZONTAL; + gridBagConstraints.weightx = 1.0; + gridBagConstraints.insets = new java.awt.Insets(10, 10, 10, 10); + mainPanel.add(buttonPanel, gridBagConstraints); - layout.linkSize(javax.swing.SwingConstants.HORIZONTAL, new java.awt.Component[] {clusterMetricsButton, refreshButton}); + mainScrollPane.setViewportView(mainPanel); - layout.setVerticalGroup( - layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) - .addGroup(javax.swing.GroupLayout.Alignment.TRAILING, layout.createSequentialGroup() - .addGap(40, 40, 40) - .addComponent(nodeStatusTableTitle, javax.swing.GroupLayout.PREFERRED_SIZE, 23, javax.swing.GroupLayout.PREFERRED_SIZE) - .addGap(0, 0, 0) - .addComponent(nodeStatusScrollPane, javax.swing.GroupLayout.PREFERRED_SIZE, 215, javax.swing.GroupLayout.PREFERRED_SIZE) - .addGap(382, 382, 382) - .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) - .addComponent(refreshButton) - .addComponent(clusterMetricsButton) - .addComponent(healthMonitorButton, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)) - .addContainerGap()) - ); + add(mainScrollPane, java.awt.BorderLayout.CENTER); }// //GEN-END:initComponents private void refreshButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_refreshButtonActionPerformed @@ -172,9 +195,11 @@ final class AinStatusDashboard extends javax.swing.JPanel implements Observer { // Variables declaration - do not modify//GEN-BEGIN:variables + private javax.swing.JPanel buttonPanel; private javax.swing.JButton clusterMetricsButton; + private javax.swing.Box.Filler filler1; private javax.swing.JButton healthMonitorButton; - private javax.swing.JScrollPane nodeStatusScrollPane; + private javax.swing.JPanel mainPanel; private javax.swing.JLabel nodeStatusTableTitle; private javax.swing.JButton refreshButton; // End of variables declaration//GEN-END:variables diff --git a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AinStatusDashboardTopComponent.java b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AinStatusDashboardTopComponent.java index 090932e154..46704560b3 100644 --- a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AinStatusDashboardTopComponent.java +++ b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AinStatusDashboardTopComponent.java @@ -1,7 +1,7 @@ /* * Autopsy Forensic Browser * - * Copyright 2018 Basis Technology Corp. + * Copyright 2018-2021 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -18,6 +18,7 @@ */ package org.sleuthkit.autopsy.experimental.autoingest; +import java.awt.BorderLayout; import java.awt.Component; import java.util.List; import java.util.logging.Level; @@ -73,7 +74,8 @@ final class AinStatusDashboardTopComponent extends TopComponent { AinStatusDashboard nodeTab = new AinStatusDashboard(monitor); nodeTab.startUp(); nodeTab.setSize(nodeTab.getPreferredSize()); - tc.add(nodeTab); + tc.setLayout(new BorderLayout()); + tc.add(nodeTab, BorderLayout.CENTER); tc.open(); } } diff --git a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AinStatusNode.java b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AinStatusNode.java index ef8e280d1e..8c7ba7cdd9 100644 --- a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AinStatusNode.java +++ b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AinStatusNode.java @@ -151,7 +151,8 @@ final class AinStatusNode extends AbstractNode { || nodeState.getState() == AutoIngestNodeState.State.RUNNING) { actions.add(new AutoIngestAdminActions.AutoIngestNodeControlAction.PauseResumeAction(nodeState)); } - actions.add(new AutoIngestAdminActions.AutoIngestNodeControlAction.ShutdownAction(nodeState)); + actions.add(new AutoIngestAdminActions.AutoIngestNodeControlAction.ShutdownAction(nodeState)); + actions.add(new AutoIngestAdminActions.AutoIngestNodeControlAction.GenerateThreadDumpControlAction(nodeState)); } return actions.toArray(new Action[actions.size()]); } diff --git a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestAdminActions.java b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestAdminActions.java index 7a31ab6573..a881a96fb2 100644 --- a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestAdminActions.java +++ b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestAdminActions.java @@ -1,7 +1,7 @@ /* * Autopsy Forensic Browser * - * Copyright 2018 Basis Technology Corp. + * Copyright 2018-2021 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -137,6 +137,25 @@ final class AutoIngestAdminActions { } } } + + static final class GenerateThreadDumpControlAction extends AutoIngestNodeControlAction { + + private static final long serialVersionUID = 1L; + + GenerateThreadDumpControlAction(AutoIngestNodeState nodeState) { + super(nodeState, Bundle.AutoIngestAdminActions_getThreadDump_title()); + } + + @Override + public Object clone() throws CloneNotSupportedException { + return super.clone(); //To change body of generated methods, choose Tools | Templates. + } + + @Override + protected void controlAutoIngestNode(AinStatusDashboard dashboard) throws AutoIngestMonitor.AutoIngestMonitorException { + dashboard.getMonitor().generateThreadDump(getNodeState().getName()); + } + } } @NbBundle.Messages({"AutoIngestAdminActions.progressDialogAction.title=Ingest Progress"}) @@ -167,6 +186,41 @@ final class AutoIngestAdminActions { } } + @NbBundle.Messages({"AutoIngestAdminActions.getThreadDump.title=Generate Thread Dump"}) + static final class GenerateThreadDump extends AbstractAction { + + private static final long serialVersionUID = 1L; + private final AutoIngestJob job; + + GenerateThreadDump(AutoIngestJob job) { + super(Bundle.AutoIngestAdminActions_getThreadDump_title()); + this.job = job; + } + + @Override + public void actionPerformed(ActionEvent e) { + + if (job == null) { + return; + } + + final AutoIngestDashboardTopComponent tc = (AutoIngestDashboardTopComponent) WindowManager.getDefault().findTopComponent(AutoIngestDashboardTopComponent.PREFERRED_ID); + if (tc == null) { + return; + } + + AutoIngestDashboard dashboard = tc.getAutoIngestDashboard(); + if (dashboard != null) { + dashboard.getMonitor().generateThreadDump(job.getProcessingHostName()); + } + } + + @Override + public Object clone() throws CloneNotSupportedException { + return super.clone(); //To change body of generated methods, choose Tools | Templates. + } + } + @NbBundle.Messages({"AutoIngestAdminActions.cancelJobAction.title=Cancel Job"}) static final class CancelJobAction extends AbstractAction { diff --git a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestDashboard.form b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestDashboard.form index e6aaa64a77..fb0726febc 100644 --- a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestDashboard.form +++ b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestDashboard.form @@ -22,167 +22,185 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + - - - - - - - - - - - - + + + + + + - - - - - + + - - - - - - + + + + + + + + + + + + + + + + + + + - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestDashboard.java b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestDashboard.java index 9308e55ad2..075ba9395b 100644 --- a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestDashboard.java +++ b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestDashboard.java @@ -348,8 +348,11 @@ final class AutoIngestDashboard extends JPanel implements Observer { @SuppressWarnings("unchecked") // //GEN-BEGIN:initComponents private void initComponents() { + java.awt.GridBagConstraints gridBagConstraints; jButton1 = new javax.swing.JButton(); + mainScrollPane = new javax.swing.JScrollPane(); + mainPanel = new javax.swing.JPanel(); pendingScrollPane = new javax.swing.JScrollPane(); runningScrollPane = new javax.swing.JScrollPane(); completedScrollPane = new javax.swing.JScrollPane(); @@ -362,19 +365,77 @@ final class AutoIngestDashboard extends JPanel implements Observer { org.openide.awt.Mnemonics.setLocalizedText(jButton1, org.openide.util.NbBundle.getMessage(AutoIngestDashboard.class, "AutoIngestDashboard.jButton1.text")); // NOI18N + setLayout(new java.awt.BorderLayout()); + + mainPanel.setLayout(new java.awt.GridBagLayout()); + pendingScrollPane.setHorizontalScrollBarPolicy(javax.swing.ScrollPaneConstants.HORIZONTAL_SCROLLBAR_NEVER); pendingScrollPane.setVerticalScrollBarPolicy(javax.swing.ScrollPaneConstants.VERTICAL_SCROLLBAR_NEVER); pendingScrollPane.setOpaque(false); - pendingScrollPane.setPreferredSize(new java.awt.Dimension(2, 215)); + pendingScrollPane.setPreferredSize(new java.awt.Dimension(2, 150)); + gridBagConstraints = new java.awt.GridBagConstraints(); + gridBagConstraints.gridx = 0; + gridBagConstraints.gridy = 3; + gridBagConstraints.gridwidth = 2; + gridBagConstraints.fill = java.awt.GridBagConstraints.BOTH; + gridBagConstraints.anchor = java.awt.GridBagConstraints.NORTHWEST; + gridBagConstraints.weightx = 1.0; + gridBagConstraints.weighty = 1.0; + gridBagConstraints.insets = new java.awt.Insets(0, 10, 10, 10); + mainPanel.add(pendingScrollPane, gridBagConstraints); + + runningScrollPane.setPreferredSize(new java.awt.Dimension(2, 150)); + gridBagConstraints = new java.awt.GridBagConstraints(); + gridBagConstraints.gridx = 0; + gridBagConstraints.gridy = 5; + gridBagConstraints.gridwidth = 2; + gridBagConstraints.fill = java.awt.GridBagConstraints.BOTH; + gridBagConstraints.anchor = java.awt.GridBagConstraints.NORTHWEST; + gridBagConstraints.weightx = 1.0; + gridBagConstraints.weighty = 1.0; + gridBagConstraints.insets = new java.awt.Insets(0, 10, 10, 10); + mainPanel.add(runningScrollPane, gridBagConstraints); + + completedScrollPane.setPreferredSize(new java.awt.Dimension(2, 150)); + gridBagConstraints = new java.awt.GridBagConstraints(); + gridBagConstraints.gridx = 0; + gridBagConstraints.gridy = 7; + gridBagConstraints.gridwidth = 5; + gridBagConstraints.fill = java.awt.GridBagConstraints.BOTH; + gridBagConstraints.anchor = java.awt.GridBagConstraints.NORTHWEST; + gridBagConstraints.weightx = 1.0; + gridBagConstraints.weighty = 1.0; + gridBagConstraints.insets = new java.awt.Insets(0, 10, 10, 10); + mainPanel.add(completedScrollPane, gridBagConstraints); lbPending.setFont(lbPending.getFont().deriveFont(lbPending.getFont().getSize()+3f)); org.openide.awt.Mnemonics.setLocalizedText(lbPending, org.openide.util.NbBundle.getMessage(AutoIngestDashboard.class, "AutoIngestDashboard.lbPending.text")); // NOI18N + gridBagConstraints = new java.awt.GridBagConstraints(); + gridBagConstraints.gridx = 0; + gridBagConstraints.gridy = 2; + gridBagConstraints.gridwidth = 2; + gridBagConstraints.anchor = java.awt.GridBagConstraints.NORTHWEST; + gridBagConstraints.insets = new java.awt.Insets(10, 10, 5, 10); + mainPanel.add(lbPending, gridBagConstraints); lbRunning.setFont(lbRunning.getFont().deriveFont(lbRunning.getFont().getSize()+3f)); org.openide.awt.Mnemonics.setLocalizedText(lbRunning, org.openide.util.NbBundle.getMessage(AutoIngestDashboard.class, "AutoIngestDashboard.lbRunning.text")); // NOI18N + gridBagConstraints = new java.awt.GridBagConstraints(); + gridBagConstraints.gridx = 0; + gridBagConstraints.gridy = 4; + gridBagConstraints.gridwidth = 2; + gridBagConstraints.anchor = java.awt.GridBagConstraints.NORTHWEST; + gridBagConstraints.insets = new java.awt.Insets(0, 10, 5, 10); + mainPanel.add(lbRunning, gridBagConstraints); lbCompleted.setFont(lbCompleted.getFont().deriveFont(lbCompleted.getFont().getSize()+3f)); org.openide.awt.Mnemonics.setLocalizedText(lbCompleted, org.openide.util.NbBundle.getMessage(AutoIngestDashboard.class, "AutoIngestDashboard.lbCompleted.text")); // NOI18N + gridBagConstraints = new java.awt.GridBagConstraints(); + gridBagConstraints.gridx = 0; + gridBagConstraints.gridy = 6; + gridBagConstraints.anchor = java.awt.GridBagConstraints.NORTHWEST; + gridBagConstraints.insets = new java.awt.Insets(0, 10, 5, 10); + mainPanel.add(lbCompleted, gridBagConstraints); org.openide.awt.Mnemonics.setLocalizedText(refreshButton, org.openide.util.NbBundle.getMessage(AutoIngestDashboard.class, "AutoIngestDashboard.refreshButton.text")); // NOI18N refreshButton.setToolTipText(org.openide.util.NbBundle.getMessage(AutoIngestDashboard.class, "AutoIngestDashboard.refreshButton.toolTipText")); // NOI18N @@ -383,61 +444,39 @@ final class AutoIngestDashboard extends JPanel implements Observer { refreshButtonActionPerformed(evt); } }); + gridBagConstraints = new java.awt.GridBagConstraints(); + gridBagConstraints.gridx = 0; + gridBagConstraints.gridy = 8; + gridBagConstraints.gridwidth = 2; + gridBagConstraints.anchor = java.awt.GridBagConstraints.NORTHWEST; + gridBagConstraints.insets = new java.awt.Insets(6, 10, 10, 10); + mainPanel.add(refreshButton, gridBagConstraints); lbServicesStatus.setFont(lbServicesStatus.getFont().deriveFont(lbServicesStatus.getFont().getSize()+3f)); org.openide.awt.Mnemonics.setLocalizedText(lbServicesStatus, org.openide.util.NbBundle.getMessage(AutoIngestDashboard.class, "AutoIngestDashboard.lbServicesStatus.text")); // NOI18N + gridBagConstraints = new java.awt.GridBagConstraints(); + gridBagConstraints.gridx = 0; + gridBagConstraints.gridy = 0; + gridBagConstraints.anchor = java.awt.GridBagConstraints.NORTHWEST; + gridBagConstraints.insets = new java.awt.Insets(10, 10, 0, 10); + mainPanel.add(lbServicesStatus, gridBagConstraints); tbServicesStatusMessage.setEditable(false); tbServicesStatusMessage.setFont(tbServicesStatusMessage.getFont().deriveFont(tbServicesStatusMessage.getFont().getStyle() | java.awt.Font.BOLD, tbServicesStatusMessage.getFont().getSize()+1)); tbServicesStatusMessage.setText(org.openide.util.NbBundle.getMessage(AutoIngestDashboard.class, "AutoIngestDashboard.tbServicesStatusMessage.text")); // NOI18N tbServicesStatusMessage.setBorder(null); + gridBagConstraints = new java.awt.GridBagConstraints(); + gridBagConstraints.gridx = 1; + gridBagConstraints.gridy = 0; + gridBagConstraints.fill = java.awt.GridBagConstraints.HORIZONTAL; + gridBagConstraints.anchor = java.awt.GridBagConstraints.NORTHWEST; + gridBagConstraints.weightx = 1.0; + gridBagConstraints.insets = new java.awt.Insets(10, 0, 0, 10); + mainPanel.add(tbServicesStatusMessage, gridBagConstraints); - javax.swing.GroupLayout layout = new javax.swing.GroupLayout(this); - this.setLayout(layout); - layout.setHorizontalGroup( - layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) - .addGroup(javax.swing.GroupLayout.Alignment.TRAILING, layout.createSequentialGroup() - .addContainerGap() - .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.TRAILING) - .addComponent(pendingScrollPane, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) - .addComponent(runningScrollPane, javax.swing.GroupLayout.Alignment.LEADING) - .addComponent(completedScrollPane, javax.swing.GroupLayout.Alignment.LEADING) - .addGroup(javax.swing.GroupLayout.Alignment.LEADING, layout.createSequentialGroup() - .addComponent(lbServicesStatus) - .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED) - .addComponent(tbServicesStatusMessage, javax.swing.GroupLayout.DEFAULT_SIZE, 861, Short.MAX_VALUE)) - .addGroup(javax.swing.GroupLayout.Alignment.LEADING, layout.createSequentialGroup() - .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.TRAILING) - .addComponent(lbPending, javax.swing.GroupLayout.Alignment.LEADING) - .addComponent(lbCompleted, javax.swing.GroupLayout.Alignment.LEADING) - .addComponent(lbRunning, javax.swing.GroupLayout.Alignment.LEADING)) - .addGap(0, 0, Short.MAX_VALUE)) - .addComponent(refreshButton, javax.swing.GroupLayout.Alignment.LEADING, javax.swing.GroupLayout.PREFERRED_SIZE, 100, javax.swing.GroupLayout.PREFERRED_SIZE)) - .addContainerGap()) - ); - layout.setVerticalGroup( - layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) - .addGroup(javax.swing.GroupLayout.Alignment.TRAILING, layout.createSequentialGroup() - .addContainerGap() - .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) - .addComponent(lbServicesStatus, javax.swing.GroupLayout.PREFERRED_SIZE, 23, javax.swing.GroupLayout.PREFERRED_SIZE) - .addComponent(tbServicesStatusMessage, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)) - .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) - .addComponent(lbPending, javax.swing.GroupLayout.PREFERRED_SIZE, 23, javax.swing.GroupLayout.PREFERRED_SIZE) - .addGap(1, 1, 1) - .addComponent(pendingScrollPane, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) - .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED) - .addComponent(lbRunning) - .addGap(1, 1, 1) - .addComponent(runningScrollPane, javax.swing.GroupLayout.DEFAULT_SIZE, 133, Short.MAX_VALUE) - .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED) - .addComponent(lbCompleted) - .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) - .addComponent(completedScrollPane, javax.swing.GroupLayout.DEFAULT_SIZE, 179, Short.MAX_VALUE) - .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) - .addComponent(refreshButton) - .addContainerGap()) - ); + mainScrollPane.setViewportView(mainPanel); + + add(mainScrollPane, java.awt.BorderLayout.CENTER); }// //GEN-END:initComponents /** @@ -460,6 +499,8 @@ final class AutoIngestDashboard extends JPanel implements Observer { private javax.swing.JLabel lbPending; private javax.swing.JLabel lbRunning; private javax.swing.JLabel lbServicesStatus; + private javax.swing.JPanel mainPanel; + private javax.swing.JScrollPane mainScrollPane; private javax.swing.JScrollPane pendingScrollPane; private javax.swing.JButton refreshButton; private javax.swing.JScrollPane runningScrollPane; diff --git a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestDashboardTopComponent.java b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestDashboardTopComponent.java index f21d80fb8d..18a5a85d41 100644 --- a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestDashboardTopComponent.java +++ b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestDashboardTopComponent.java @@ -1,7 +1,7 @@ /* * Autopsy Forensic Browser * - * Copyright 2011-2018 Basis Technology Corp. + * Copyright 2011-2021 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -18,6 +18,7 @@ */ package org.sleuthkit.autopsy.experimental.autoingest; +import java.awt.BorderLayout; import java.awt.Component; import java.awt.EventQueue; import java.util.List; @@ -49,7 +50,7 @@ public final class AutoIngestDashboardTopComponent extends TopComponent { public final static String PREFERRED_ID = "AutoIngestDashboardTopComponent"; // NON-NLS private static final Logger logger = Logger.getLogger(AutoIngestDashboardTopComponent.class.getName()); private static boolean topComponentInitialized = false; - + @Messages({ "AutoIngestDashboardTopComponent.exceptionMessage.failedToCreateDashboard=Failed to create Auto Ingest Dashboard.",}) public static void openTopComponent() { @@ -69,13 +70,14 @@ public final class AutoIngestDashboardTopComponent extends TopComponent { * dashboard instance so we don't accumulate them. */ tc.removeAll(); + tc.setLayout(new BorderLayout()); /* * Create a new dashboard instance to ensure we're using the * most recent configuration. */ AutoIngestDashboard dashboard = AutoIngestDashboard.createDashboard(); - tc.add(dashboard); + tc.add(dashboard, BorderLayout.CENTER); dashboard.setSize(dashboard.getPreferredSize()); //if the user has administrator access enabled open the Node Status and cases top components as well if (AutoIngestDashboard.isAdminAutoIngestDashboard()) { diff --git a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestJobsNode.java b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestJobsNode.java index 288eff8d92..ffe8e19f01 100644 --- a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestJobsNode.java +++ b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestJobsNode.java @@ -367,6 +367,7 @@ final class AutoIngestJobsNode extends AbstractNode { break; case RUNNING_JOB: actions.add(new AutoIngestAdminActions.ProgressDialogAction(jobWrapper.getJob())); + actions.add(new AutoIngestAdminActions.GenerateThreadDump(jobWrapper.getJob())); actions.add(new AutoIngestAdminActions.CancelJobAction(jobWrapper.getJob())); // actions.add(new AutoIngestAdminActions.CancelModuleAction()); break; diff --git a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestManager.java b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestManager.java index b3a25be677..e3cace62fc 100644 --- a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestManager.java +++ b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestManager.java @@ -1,7 +1,7 @@ /* * Autopsy Forensic Browser * - * Copyright 2011-2019 Basis Technology Corp. + * Copyright 2011-2021 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -75,6 +75,7 @@ import static org.sleuthkit.autopsy.corecomponentinterfaces.DataSourceProcessorC import org.sleuthkit.autopsy.corecomponentinterfaces.DataSourceProcessorProgressMonitor; import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.coreutils.NetworkUtils; +import org.sleuthkit.autopsy.coreutils.ThreadUtils; import org.sleuthkit.autopsy.events.AutopsyEvent; import org.sleuthkit.autopsy.events.AutopsyEventException; import org.sleuthkit.autopsy.events.AutopsyEventPublisher; @@ -141,6 +142,7 @@ final class AutoIngestManager extends Observable implements PropertyChangeListen ControlEventType.PAUSE.toString(), ControlEventType.RESUME.toString(), ControlEventType.SHUTDOWN.toString(), + ControlEventType.GENERATE_THREAD_DUMP_REQUEST.toString(), Event.CANCEL_JOB.toString(), Event.REPROCESS_JOB.toString()})); private static final Set INGEST_JOB_EVENTS_OF_INTEREST = EnumSet.of(IngestManager.IngestJobEvent.COMPLETED, IngestManager.IngestJobEvent.CANCELLED); @@ -303,7 +305,7 @@ final class AutoIngestManager extends Observable implements PropertyChangeListen } else if (event instanceof AutoIngestNodeControlEvent) { handleRemoteNodeControlEvent((AutoIngestNodeControlEvent) event); } else if (event instanceof AutoIngestJobCancelEvent) { - handleRemoteJobCancelledEvent((AutoIngestJobCancelEvent) event); + handleRemoteJobCancelEvent((AutoIngestJobCancelEvent) event); } else if (event instanceof AutoIngestJobReprocessEvent) { handleRemoteJobReprocessEvent((AutoIngestJobReprocessEvent) event); } @@ -398,7 +400,7 @@ final class AutoIngestManager extends Observable implements PropertyChangeListen * * @param event */ - private void handleRemoteJobCancelledEvent(AutoIngestJobCancelEvent event) { + private void handleRemoteJobCancelEvent(AutoIngestJobCancelEvent event) { AutoIngestJob job = event.getJob(); if (job != null && job.getProcessingHostName().compareToIgnoreCase(LOCAL_HOST_NAME) == 0) { sysLogger.log(Level.INFO, "Received cancel job event for data source {0} in case {1} from user {2} on machine {3}", @@ -491,7 +493,7 @@ final class AutoIngestManager extends Observable implements PropertyChangeListen private void handleRemoteRequestNodeStateEvent() { // Re-publish last state event. eventPublisher.publishRemotely(lastPublishedStateEvent); - } + } private void handleRemoteNodeControlEvent(AutoIngestNodeControlEvent event) { if (event.getTargetNodeName().compareToIgnoreCase(LOCAL_HOST_NAME) == 0) { @@ -517,12 +519,31 @@ final class AutoIngestManager extends Observable implements PropertyChangeListen setChanged(); notifyObservers(Event.SHUTTING_DOWN); break; + case GENERATE_THREAD_DUMP_REQUEST: + handleRemoteRequestThreadDumpEvent(event); + break; default: sysLogger.log(Level.WARNING, "Received unsupported control event: {0}", event.getControlEventType()); break; } } } + + /** + * Handle a request for a thread dump. + */ + private void handleRemoteRequestThreadDumpEvent(AutoIngestNodeControlEvent event) { + + new Thread(() -> { + sysLogger.log(Level.INFO, "Generating thread dump"); + // generate thread dump + String threadDump = ThreadUtils.generateThreadDump(); + + // publish the thread dump + sysLogger.log(Level.INFO, "Sending thread dump reply to node {0}", event.getOriginatingNodeName()); + eventPublisher.publishRemotely(new ThreadDumpResponseEvent(LOCAL_HOST_NAME, event.getOriginatingNodeName(), threadDump)); + }).start(); + } /** * Shuts down auto ingest. @@ -973,7 +994,7 @@ final class AutoIngestManager extends Observable implements PropertyChangeListen * job to be shut down in an orderly fashion. */ void cancelCurrentJob() { - if (State.RUNNING != state) { + if ((State.RUNNING != state) && (State.SHUTTING_DOWN != state)) { return; } synchronized (jobsLock) { @@ -2564,6 +2585,7 @@ final class AutoIngestManager extends Observable implements PropertyChangeListen synchronized (ingestLock) { // Try each DSP in decreasing order of confidence for (AutoIngestDataSourceProcessor selectedProcessor : validDataSourceProcessors) { + currentJob.setDataSourceProcessor(selectedProcessor); UUID taskId = UUID.randomUUID(); caseForJob.notifyAddingDataSource(taskId); DataSourceProcessorCallback callBack = new AddDataSourceCallback(caseForJob, dataSource, taskId, ingestLock); @@ -3127,7 +3149,8 @@ final class AutoIngestManager extends Observable implements PropertyChangeListen SHUTDOWN, REPORT_STATE, CANCEL_JOB, - REPROCESS_JOB + REPROCESS_JOB, + GENERATE_THREAD_DUMP_RESPONSE } /** diff --git a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestMonitor.java b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestMonitor.java index b3d3fb21a8..fbca3f50ee 100644 --- a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestMonitor.java +++ b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestMonitor.java @@ -1,7 +1,7 @@ /* * Autopsy Forensic Browser * - * Copyright 2011-2018 Basis Technology Corp. + * Copyright 2011-2021 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -19,8 +19,15 @@ package org.sleuthkit.autopsy.experimental.autoingest; import com.google.common.util.concurrent.ThreadFactoryBuilder; +import java.awt.Desktop; import java.beans.PropertyChangeEvent; import java.beans.PropertyChangeListener; +import java.io.BufferedWriter; +import java.io.File; +import java.io.FileWriter; +import java.io.IOException; +import java.nio.file.Path; +import java.nio.file.Paths; import java.time.Duration; import java.time.Instant; import java.util.ArrayList; @@ -41,6 +48,8 @@ import org.sleuthkit.autopsy.coordinationservice.CoordinationService; import org.sleuthkit.autopsy.coordinationservice.CoordinationService.CoordinationServiceException; import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.coreutils.NetworkUtils; +import org.sleuthkit.autopsy.coreutils.PlatformUtil; +import org.sleuthkit.autopsy.coreutils.TimeStampUtils; import org.sleuthkit.autopsy.events.AutopsyEventException; import org.sleuthkit.autopsy.events.AutopsyEventPublisher; import org.sleuthkit.autopsy.experimental.autoingest.AutoIngestJob.ProcessingStatus; @@ -74,7 +83,8 @@ final class AutoIngestMonitor extends Observable implements PropertyChangeListen AutoIngestManager.Event.STARTING_UP.toString(), AutoIngestManager.Event.SHUTTING_DOWN.toString(), AutoIngestManager.Event.SHUTDOWN.toString(), - AutoIngestManager.Event.RESUMED.toString()})); + AutoIngestManager.Event.RESUMED.toString(), + AutoIngestManager.Event.GENERATE_THREAD_DUMP_RESPONSE.toString()})); private final AutopsyEventPublisher eventPublisher; private CoordinationService coordinationService; private final ScheduledThreadPoolExecutor coordSvcQueryExecutor; @@ -154,6 +164,8 @@ final class AutoIngestMonitor extends Observable implements PropertyChangeListen handleCaseDeletedEvent((AutoIngestCaseDeletedEvent) event); } else if (event instanceof AutoIngestNodeStateEvent) { handleAutoIngestNodeStateEvent((AutoIngestNodeStateEvent) event); + } else if (event instanceof ThreadDumpResponseEvent) { + handleRemoteThreadDumpResponseEvent((ThreadDumpResponseEvent) event); } } @@ -253,6 +265,41 @@ final class AutoIngestMonitor extends Observable implements PropertyChangeListen // Trigger a dashboard refresh. notifyObservers(oldNodeState == null ? nodeStates.get(event.getNodeName()) : oldNodeState); } + + /** + * Handles thread dump response event. + * + * @param event ThreadDumpResponseEvent + */ + private void handleRemoteThreadDumpResponseEvent(ThreadDumpResponseEvent event) { + if (event.getTargetNodeName().compareToIgnoreCase(LOCAL_HOST_NAME) == 0) { + LOGGER.log(Level.INFO, "Received thread dump response event from machine {0}", event.getOriginalNodeName()); + File dumpFile = createFilePath(event.getOriginalNodeName()).toFile(); + try { + try (BufferedWriter writer = new BufferedWriter(new FileWriter(dumpFile, true))) { + writer.write(event.getThreadDump()); + } + + Desktop.getDesktop().open(dumpFile); + } catch (IOException ex) { + if (dumpFile != null) { + LOGGER.log(Level.WARNING, "Failed to open thread dump file in external viewer: " + dumpFile.getAbsolutePath(), ex); + } else { + LOGGER.log(Level.SEVERE, "Failed to create thread dump file.", ex); + } + } + } + } + + /** + * Create the thread dump file path. + * + * @return Path for dump file. + */ + private Path createFilePath(String nodeName) { + String fileName = "ThreadDumpFromNode_" + nodeName + "_" + TimeStampUtils.createTimeStamp() + ".txt"; + return Paths.get(PlatformUtil.getLogDirectory(), fileName); + } /** * Gets the snapshot of the pending jobs queue for an auto ingest cluster. @@ -694,6 +741,15 @@ final class AutoIngestMonitor extends Observable implements PropertyChangeListen void shutdownAutoIngestNode(String nodeName) { sendControlEventToNode(ControlEventType.SHUTDOWN, nodeName); } + + /** + * Tell the specified node to generate a thread dump. + * + * @param job + */ + void generateThreadDump(String nodeName) { + sendControlEventToNode(ControlEventType.GENERATE_THREAD_DUMP_REQUEST, nodeName); + } /** * A task that updates the state maintained by the monitor. At present this diff --git a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestNodeControlEvent.java b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestNodeControlEvent.java index fbe7c9671d..6feb35ce63 100644 --- a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestNodeControlEvent.java +++ b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestNodeControlEvent.java @@ -32,7 +32,8 @@ public final class AutoIngestNodeControlEvent extends AutopsyEvent implements Se public enum ControlEventType { PAUSE, RESUME, - SHUTDOWN + SHUTDOWN, + GENERATE_THREAD_DUMP_REQUEST } private static final long serialVersionUID = 1L; diff --git a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/Bundle.properties-MERGED b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/Bundle.properties-MERGED index 56a675e256..d74f98b5a7 100755 --- a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/Bundle.properties-MERGED +++ b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/Bundle.properties-MERGED @@ -10,6 +10,7 @@ AinStatusNode.status.title=Status AinStatusNode.status.unknown=Unknown AutoIngestAdminActions.cancelJobAction.title=Cancel Job AutoIngestAdminActions.cancelModuleAction.title=Cancel Module +AutoIngestAdminActions.getThreadDump.title=Generate Thread Dump AutoIngestAdminActions.pause.title=Pause Node AutoIngestAdminActions.progressDialogAction.title=Ingest Progress AutoIngestAdminActions.reprocessJobAction.error=Failed to reprocess job diff --git a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/CasesDashboardTopComponent.form b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/CasesDashboardTopComponent.form index 3ade741f25..4e4452485b 100755 --- a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/CasesDashboardTopComponent.form +++ b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/CasesDashboardTopComponent.form @@ -13,78 +13,79 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + - - - - - - - - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + - - - - - - - - - - - - - - - - - - - - diff --git a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/CasesDashboardTopComponent.java b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/CasesDashboardTopComponent.java index dcd7fa1fab..51cc3a805b 100755 --- a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/CasesDashboardTopComponent.java +++ b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/CasesDashboardTopComponent.java @@ -1,7 +1,7 @@ /* * Autopsy Forensic Browser * - * Copyright 2019-2019 Basis Technology Corp. + * Copyright 2019-2021 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -18,6 +18,7 @@ */ package org.sleuthkit.autopsy.experimental.autoingest; +import java.awt.BorderLayout; import java.util.logging.Level; import org.openide.explorer.ExplorerManager; import org.openide.explorer.ExplorerUtils; @@ -95,8 +96,7 @@ public final class CasesDashboardTopComponent extends TopComponent implements Ex explorerManager = new ExplorerManager(); associateLookup(ExplorerUtils.createLookup(explorerManager, getActionMap())); caseBrowserPanel = new MultiUserCasesBrowserPanel(explorerManager, new CasesDashboardCustomizer()); - caseBrowserScrollPane.add(caseBrowserPanel); - caseBrowserScrollPane.setViewportView(caseBrowserPanel); + mainPanel.add(caseBrowserPanel, BorderLayout.CENTER); } @Override @@ -116,18 +116,31 @@ public final class CasesDashboardTopComponent extends TopComponent implements Ex */ // //GEN-BEGIN:initComponents private void initComponents() { + java.awt.GridBagConstraints gridBagConstraints; - refreshButton = new javax.swing.JButton(); caseBrowserScrollPane = new javax.swing.JScrollPane(); + mainPanel = new javax.swing.JPanel(); + bottomPanel = new javax.swing.JPanel(); + buttonPanel = new javax.swing.JPanel(); + refreshButton = new javax.swing.JButton(); deleteOrphanCaseNodesButton = new javax.swing.JButton(); deleteOrphanManifestNodesButton = new javax.swing.JButton(); + setLayout(new java.awt.BorderLayout()); + + mainPanel.setLayout(new java.awt.BorderLayout()); + + bottomPanel.setLayout(new java.awt.GridBagLayout()); + + buttonPanel.setLayout(new java.awt.GridLayout()); + org.openide.awt.Mnemonics.setLocalizedText(refreshButton, org.openide.util.NbBundle.getMessage(CasesDashboardTopComponent.class, "CasesDashboardTopComponent.refreshButton.text")); // NOI18N refreshButton.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { refreshButtonActionPerformed(evt); } }); + buttonPanel.add(refreshButton); org.openide.awt.Mnemonics.setLocalizedText(deleteOrphanCaseNodesButton, org.openide.util.NbBundle.getMessage(CasesDashboardTopComponent.class, "CasesDashboardTopComponent.deleteOrphanCaseNodesButton.text")); // NOI18N deleteOrphanCaseNodesButton.addActionListener(new java.awt.event.ActionListener() { @@ -135,6 +148,7 @@ public final class CasesDashboardTopComponent extends TopComponent implements Ex deleteOrphanCaseNodesButtonActionPerformed(evt); } }); + buttonPanel.add(deleteOrphanCaseNodesButton); org.openide.awt.Mnemonics.setLocalizedText(deleteOrphanManifestNodesButton, org.openide.util.NbBundle.getMessage(CasesDashboardTopComponent.class, "CasesDashboardTopComponent.deleteOrphanManifestNodesButton.text")); // NOI18N deleteOrphanManifestNodesButton.addActionListener(new java.awt.event.ActionListener() { @@ -142,43 +156,21 @@ public final class CasesDashboardTopComponent extends TopComponent implements Ex deleteOrphanManifestNodesButtonActionPerformed(evt); } }); + buttonPanel.add(deleteOrphanManifestNodesButton); - javax.swing.GroupLayout layout = new javax.swing.GroupLayout(this); - this.setLayout(layout); - layout.setHorizontalGroup( - layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) - .addGroup(layout.createSequentialGroup() - .addContainerGap() - .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) - .addGroup(layout.createSequentialGroup() - .addComponent(refreshButton) - .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) - .addComponent(deleteOrphanCaseNodesButton) - .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) - .addComponent(deleteOrphanManifestNodesButton) - .addGap(0, 0, Short.MAX_VALUE)) - .addGroup(javax.swing.GroupLayout.Alignment.TRAILING, layout.createSequentialGroup() - .addComponent(caseBrowserScrollPane) - .addContainerGap()))) - ); + gridBagConstraints = new java.awt.GridBagConstraints(); + gridBagConstraints.gridx = 0; + gridBagConstraints.gridy = 0; + gridBagConstraints.anchor = java.awt.GridBagConstraints.NORTHWEST; + gridBagConstraints.weightx = 1.0; + gridBagConstraints.insets = new java.awt.Insets(10, 10, 10, 10); + bottomPanel.add(buttonPanel, gridBagConstraints); - layout.linkSize(javax.swing.SwingConstants.HORIZONTAL, new java.awt.Component[] {deleteOrphanCaseNodesButton, deleteOrphanManifestNodesButton, refreshButton}); + mainPanel.add(bottomPanel, java.awt.BorderLayout.SOUTH); - layout.setVerticalGroup( - layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) - .addGroup(layout.createSequentialGroup() - .addContainerGap() - .addComponent(caseBrowserScrollPane, javax.swing.GroupLayout.DEFAULT_SIZE, 246, Short.MAX_VALUE) - .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED) - .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) - .addComponent(refreshButton) - .addComponent(deleteOrphanCaseNodesButton) - .addComponent(deleteOrphanManifestNodesButton)) - .addContainerGap()) - ); - - layout.linkSize(javax.swing.SwingConstants.VERTICAL, new java.awt.Component[] {deleteOrphanCaseNodesButton, deleteOrphanManifestNodesButton, refreshButton}); + caseBrowserScrollPane.setViewportView(mainPanel); + add(caseBrowserScrollPane, java.awt.BorderLayout.CENTER); }// //GEN-END:initComponents private void refreshButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_refreshButtonActionPerformed @@ -194,9 +186,12 @@ public final class CasesDashboardTopComponent extends TopComponent implements Ex }//GEN-LAST:event_deleteOrphanManifestNodesButtonActionPerformed // Variables declaration - do not modify//GEN-BEGIN:variables + private javax.swing.JPanel bottomPanel; + private javax.swing.JPanel buttonPanel; private javax.swing.JScrollPane caseBrowserScrollPane; private javax.swing.JButton deleteOrphanCaseNodesButton; private javax.swing.JButton deleteOrphanManifestNodesButton; + private javax.swing.JPanel mainPanel; private javax.swing.JButton refreshButton; // End of variables declaration//GEN-END:variables diff --git a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/ThreadDumpResponseEvent.java b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/ThreadDumpResponseEvent.java new file mode 100755 index 0000000000..876d630242 --- /dev/null +++ b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/ThreadDumpResponseEvent.java @@ -0,0 +1,55 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2021 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.experimental.autoingest; + +import java.io.Serializable; +import org.sleuthkit.autopsy.events.AutopsyEvent; + +/** + * Event published to send thread dump. + */ +public final class ThreadDumpResponseEvent extends AutopsyEvent implements Serializable { + + private static final long serialVersionUID = 1L; + private final String originalNodeName; + private final String targetNodeName; + private final String threadDump; + + public ThreadDumpResponseEvent(String originalNodeName, String targetNodeName, String threadDump) { + super(AutoIngestManager.Event.GENERATE_THREAD_DUMP_RESPONSE.toString(), null, null); + this.originalNodeName = originalNodeName; + this.targetNodeName = targetNodeName; + this.threadDump = threadDump; + } + + String getOriginalNodeName() { + return originalNodeName; + } + + String getTargetNodeName() { + return targetNodeName; + } + + /** + * @return Thread dump + */ + public String getThreadDump() { + return threadDump; + } +} diff --git a/Experimental/src/org/sleuthkit/autopsy/experimental/volatilityDSP/Bundle.properties b/Experimental/src/org/sleuthkit/autopsy/experimental/volatilityDSP/Bundle.properties index 1c7d3312da..2de432ff18 100755 --- a/Experimental/src/org/sleuthkit/autopsy/experimental/volatilityDSP/Bundle.properties +++ b/Experimental/src/org/sleuthkit/autopsy/experimental/volatilityDSP/Bundle.properties @@ -10,3 +10,5 @@ MemoryDSInputPanel.errorLabel.text=Error Label MemoryDSInputPanel.browseButton.text=Browse MemoryDSInputPanel.timeZoneLabel.text=Timezone: MemoryDSInputPanel.profileLabel.text=Profile: +MemoryDSInputPanel.selectAllButton.text=Select All +MemoryDSInputPanel.deselectAllButton.text=Deselect All diff --git a/Experimental/src/org/sleuthkit/autopsy/experimental/volatilityDSP/Bundle.properties-MERGED b/Experimental/src/org/sleuthkit/autopsy/experimental/volatilityDSP/Bundle.properties-MERGED index 4173708524..a454855ebf 100755 --- a/Experimental/src/org/sleuthkit/autopsy/experimental/volatilityDSP/Bundle.properties-MERGED +++ b/Experimental/src/org/sleuthkit/autopsy/experimental/volatilityDSP/Bundle.properties-MERGED @@ -17,6 +17,8 @@ MemoryDSInputPanel.errorLabel.text=Error Label MemoryDSInputPanel.browseButton.text=Browse MemoryDSInputPanel.timeZoneLabel.text=Timezone: MemoryDSInputPanel.profileLabel.text=Profile: +MemoryDSInputPanel.selectAllButton.text=Select All +MemoryDSInputPanel.deselectAllButton.text=Deselect All MemoryDSInputPanel_errorMsg_dataSourcePathOnCdrive=Path to multi-user data source is on "C:" drive MemoryDSInputPanel_errorMsg_noOpenCase=No open case MemoryDSProcessor.dataSourceType=Memory Image File (Volatility) diff --git a/Experimental/src/org/sleuthkit/autopsy/experimental/volatilityDSP/MemoryDSInputPanel.form b/Experimental/src/org/sleuthkit/autopsy/experimental/volatilityDSP/MemoryDSInputPanel.form index f899eeb503..60191d6f48 100644 --- a/Experimental/src/org/sleuthkit/autopsy/experimental/volatilityDSP/MemoryDSInputPanel.form +++ b/Experimental/src/org/sleuthkit/autopsy/experimental/volatilityDSP/MemoryDSInputPanel.form @@ -37,6 +37,11 @@ + + + + + @@ -78,7 +83,12 @@ - + + + + + + @@ -188,5 +198,25 @@ + + + + + + + + + + + + + + + + + + + + diff --git a/Experimental/src/org/sleuthkit/autopsy/experimental/volatilityDSP/MemoryDSInputPanel.java b/Experimental/src/org/sleuthkit/autopsy/experimental/volatilityDSP/MemoryDSInputPanel.java index 3d41adf201..ed7b4be7b5 100644 --- a/Experimental/src/org/sleuthkit/autopsy/experimental/volatilityDSP/MemoryDSInputPanel.java +++ b/Experimental/src/org/sleuthkit/autopsy/experimental/volatilityDSP/MemoryDSInputPanel.java @@ -191,6 +191,8 @@ final class MemoryDSInputPanel extends JPanel implements DocumentListener { pluginTable = new javax.swing.JTable(); profileLabel = new javax.swing.JLabel(); profileComboBox = new javax.swing.JComboBox<>(); + selectAllButton = new javax.swing.JButton(); + deselectAllButton = new javax.swing.JButton(); org.openide.awt.Mnemonics.setLocalizedText(pathLabel, org.openide.util.NbBundle.getMessage(MemoryDSInputPanel.class, "MemoryDSInputPanel.pathLabel.text")); // NOI18N @@ -238,6 +240,20 @@ final class MemoryDSInputPanel extends JPanel implements DocumentListener { } }); + org.openide.awt.Mnemonics.setLocalizedText(selectAllButton, org.openide.util.NbBundle.getMessage(MemoryDSInputPanel.class, "MemoryDSInputPanel.selectAllButton.text")); // NOI18N + selectAllButton.addActionListener(new java.awt.event.ActionListener() { + public void actionPerformed(java.awt.event.ActionEvent evt) { + selectAllButtonActionPerformed(evt); + } + }); + + org.openide.awt.Mnemonics.setLocalizedText(deselectAllButton, org.openide.util.NbBundle.getMessage(MemoryDSInputPanel.class, "MemoryDSInputPanel.deselectAllButton.text")); // NOI18N + deselectAllButton.addActionListener(new java.awt.event.ActionListener() { + public void actionPerformed(java.awt.event.ActionEvent evt) { + deselectAllButtonActionPerformed(evt); + } + }); + javax.swing.GroupLayout layout = new javax.swing.GroupLayout(this); this.setLayout(layout); layout.setHorizontalGroup( @@ -256,7 +272,11 @@ final class MemoryDSInputPanel extends JPanel implements DocumentListener { .addComponent(listsScrollPane, javax.swing.GroupLayout.PREFERRED_SIZE, 248, javax.swing.GroupLayout.PREFERRED_SIZE) .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.TRAILING, false) .addComponent(profileComboBox, javax.swing.GroupLayout.Alignment.LEADING, 0, 243, Short.MAX_VALUE) - .addComponent(timeZoneComboBox, javax.swing.GroupLayout.Alignment.LEADING, 0, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE))))) + .addComponent(timeZoneComboBox, javax.swing.GroupLayout.Alignment.LEADING, 0, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)) + .addGroup(layout.createSequentialGroup() + .addComponent(selectAllButton) + .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) + .addComponent(deselectAllButton))))) .addGap(0, 163, Short.MAX_VALUE)) .addGroup(layout.createSequentialGroup() .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) @@ -287,7 +307,11 @@ final class MemoryDSInputPanel extends JPanel implements DocumentListener { .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addComponent(PluginsToRunLabel) .addComponent(listsScrollPane, javax.swing.GroupLayout.PREFERRED_SIZE, 122, javax.swing.GroupLayout.PREFERRED_SIZE)) - .addContainerGap(73, Short.MAX_VALUE)) + .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) + .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) + .addComponent(selectAllButton) + .addComponent(deselectAllButton)) + .addContainerGap(44, Short.MAX_VALUE)) ); pathLabel.getAccessibleContext().setAccessibleName(org.openide.util.NbBundle.getMessage(MemoryDSInputPanel.class, "MemoryDSInputPanel.pathLabel.AccessibleContext.accessibleName")); // NOI18N @@ -316,9 +340,24 @@ final class MemoryDSInputPanel extends JPanel implements DocumentListener { // TODO add your handling code here: }//GEN-LAST:event_pathTextFieldActionPerformed + private void selectAllButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_selectAllButtonActionPerformed + for(String name : pluginListStates.keySet()) { + pluginListStates.put(name, Boolean.TRUE); + } + tableModel.fireTableDataChanged(); + }//GEN-LAST:event_selectAllButtonActionPerformed + + private void deselectAllButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_deselectAllButtonActionPerformed + for(String name : pluginListStates.keySet()) { + pluginListStates.put(name, Boolean.FALSE); + } + tableModel.fireTableDataChanged(); + }//GEN-LAST:event_deselectAllButtonActionPerformed + // Variables declaration - do not modify//GEN-BEGIN:variables private javax.swing.JLabel PluginsToRunLabel; private javax.swing.JButton browseButton; + private javax.swing.JButton deselectAllButton; private javax.swing.JLabel errorLabel; private javax.swing.ButtonGroup infileTypeButtonGroup; private javax.swing.JScrollPane listsScrollPane; @@ -327,6 +366,7 @@ final class MemoryDSInputPanel extends JPanel implements DocumentListener { private javax.swing.JTable pluginTable; private javax.swing.JComboBox profileComboBox; private javax.swing.JLabel profileLabel; + private javax.swing.JButton selectAllButton; private javax.swing.JComboBox timeZoneComboBox; private javax.swing.JLabel timeZoneLabel; // End of variables declaration//GEN-END:variables diff --git a/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/AddDrawableFilesTask.java b/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/AddDrawableFilesTask.java deleted file mode 100755 index 56b707ee49..0000000000 --- a/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/AddDrawableFilesTask.java +++ /dev/null @@ -1,78 +0,0 @@ -/* - * Autopsy Forensic Browser - * - * Copyright 2015-2019 Basis Technology Corp. - * Contact: carrier sleuthkit org - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.sleuthkit.autopsy.imagegallery; - -import org.netbeans.api.progress.ProgressHandle; -import org.openide.util.NbBundle; -import org.sleuthkit.autopsy.imagegallery.datamodel.DrawableDB; -import org.sleuthkit.autopsy.imagegallery.datamodel.DrawableFile; -import org.sleuthkit.datamodel.AbstractFile; -import org.sleuthkit.datamodel.SleuthkitCase; -import org.sleuthkit.datamodel.TskCoreException; -import org.sleuthkit.datamodel.TskData; - -/** - * A task that queries the case database for all files with supported - * image/video mime types or extensions and adds them to the drawables database. - */ -class AddDrawableFilesTask extends BulkDrawableFilesTask { - - private final ImageGalleryController controller; - private final DrawableDB taskDB; - - AddDrawableFilesTask(long dataSourceObjId, ImageGalleryController controller) { - super(dataSourceObjId, controller); - this.controller = controller; - this.taskDB = controller.getDrawablesDatabase(); - taskDB.buildFileMetaDataCache(); - } - - @Override - protected void cleanup() { - taskDB.freeFileMetaDataCache(); - // at the end of the task, set the stale status based on the - // cumulative status of all data sources - controller.setModelIsStale(controller.isDataSourcesTableStale()); - } - - @Override - void processFile(AbstractFile f, DrawableDB.DrawableTransaction tr, SleuthkitCase.CaseDbTransaction caseDbTransaction) throws TskCoreException { - final boolean known = f.getKnown() == TskData.FileKnown.KNOWN; - if (known) { - taskDB.removeFile(f.getId(), tr); //remove known files - } else { - // NOTE: Files are being processed because they have the right MIME type, - // so we do not need to worry about this calculating them - if (FileTypeUtils.hasDrawableMIMEType(f)) { - taskDB.updateFile(DrawableFile.create(f, true, false), tr, caseDbTransaction); - } //unsupported mimtype => analyzed but shouldn't include - else { - taskDB.removeFile(f.getId(), tr); - } - } - } - - @Override - @NbBundle.Messages({ - "AddDrawableFilesTask.populatingDb.status=populating analyzed image/video database" - }) - ProgressHandle getInitialProgressHandle() { - return ProgressHandle.createHandle(Bundle.AddDrawableFilesTask_populatingDb_status(), this); - } -} diff --git a/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/Bundle.properties-MERGED b/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/Bundle.properties-MERGED index 9b75078ae8..678dd85c44 100755 --- a/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/Bundle.properties-MERGED +++ b/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/Bundle.properties-MERGED @@ -1,12 +1,11 @@ -AddDrawableFilesTask.populatingDb.status=populating analyzed image/video database -BulkDrawableFilesTask.committingDb.status=committing image/video database -BulkDrawableFilesTask.errPopulating.errMsg=There was an error populating Image Gallery database. -BulkDrawableFilesTask.populatingDb.status=populating analyzed image/video database -BulkDrawableFilesTask.stopCopy.status=Stopping copy to drawable db task. CTL_ImageGalleryAction=Image/Video Gallery CTL_ImageGalleryTopComponent=Image/Video Gallery DrawableDbTask.InnerTask.message.name=status DrawableDbTask.InnerTask.progress.name=progress +DrawableFileUpdateTask_committingDb.status=committing image/video database +DrawableFileUpdateTask_errPopulating_errMsg=There was an error populating Image Gallery database. +DrawableFileUpdateTask_populatingDb_status=populating analyzed image/video database +DrawableFileUpdateTask_stopCopy_status=Stopping copy to drawable db task. ImageGallery.dialogTitle=Image Gallery ImageGallery.showTooManyFiles.contentText=There are too many files in the selected datasource(s) to ensure reasonable performance. ImageGallery.showTooManyFiles.headerText= diff --git a/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/BulkDrawableFilesTask.java b/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/DrawableFileUpdateTask.java similarity index 54% rename from ImageGallery/src/org/sleuthkit/autopsy/imagegallery/BulkDrawableFilesTask.java rename to ImageGallery/src/org/sleuthkit/autopsy/imagegallery/DrawableFileUpdateTask.java index 95ee3c9852..150aa04145 100755 --- a/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/BulkDrawableFilesTask.java +++ b/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/DrawableFileUpdateTask.java @@ -1,7 +1,7 @@ /* * Autopsy Forensic Browser * - * Copyright 2015-2019 Basis Technology Corp. + * Copyright 2021 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -22,100 +22,159 @@ import java.sql.SQLException; import java.util.List; import java.util.logging.Level; import org.netbeans.api.progress.ProgressHandle; -import org.openide.util.NbBundle; +import org.openide.util.NbBundle.Messages; import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil; import org.sleuthkit.autopsy.imagegallery.datamodel.DrawableDB; +import org.sleuthkit.autopsy.imagegallery.datamodel.DrawableFile; import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.SleuthkitCase; import org.sleuthkit.datamodel.TskCoreException; import org.sleuthkit.datamodel.TskData; /** - * An abstract base class for tasks that add or modify the drawables database - * records for multiple drawable files. + * A bulk update task for adding images to the image gallery. */ -@NbBundle.Messages({ - "BulkDrawableFilesTask.committingDb.status=committing image/video database", - "BulkDrawableFilesTask.stopCopy.status=Stopping copy to drawable db task.", - "BulkDrawableFilesTask.errPopulating.errMsg=There was an error populating Image Gallery database." -}) -abstract class BulkDrawableFilesTask extends DrawableDbTask { +final class DrawableFileUpdateTask extends DrawableDbTask { + + private static final Logger logger = Logger.getLogger(DrawableFileUpdateTask.class.getName()); - private static final Logger logger = Logger.getLogger(BulkDrawableFilesTask.class.getName()); private static final String MIMETYPE_CLAUSE = "(mime_type LIKE '" //NON-NLS + String.join("' OR mime_type LIKE '", FileTypeUtils.getAllSupportedMimeTypes()) //NON-NLS + "') "; - private final String drawableQuery; - private final ImageGalleryController controller; - private final DrawableDB taskDB; - private final SleuthkitCase tskCase; - private final long dataSourceObjId; - //NON-NLS - BulkDrawableFilesTask(long dataSourceObjId, ImageGalleryController controller) { + private final ImageGalleryController controller; + + /** + * Construct a new task. + * + * @param controller A handle to the IG controller. + */ + DrawableFileUpdateTask(ImageGalleryController controller) { this.controller = controller; - this.taskDB = controller.getDrawablesDatabase(); - this.tskCase = controller.getCaseDatabase(); - this.dataSourceObjId = dataSourceObjId; - drawableQuery = " (data_source_obj_id = " + dataSourceObjId + ") " + } + + @Override + public void run() { + for (Long dataSourceObjId : controller.getStaleDataSourceIds()) { + updateFileForDataSource(dataSourceObjId); + } + } + + /** + * Gets the drawables database that is part of the model for the controller. + * + * @return The the drawable db object. + */ + private DrawableDB getDrawableDB() { + return controller.getDrawablesDatabase(); + } + + /** + * Return the sleuthkit case object for the open case. + * + * @return The case db object. + */ + private SleuthkitCase getCaseDB() { + return controller.getCaseDatabase(); + } + + /** + * Returns a list of files to be processed by the task for the given + * datasource. + * + * @param dataSourceObjId + * @return + * @throws TskCoreException + */ + private List getFilesForDataSource(long dataSourceObjId) throws TskCoreException { + List list = getCaseDB().findAllFilesWhere(getDrawableQuery(dataSourceObjId)); + return list; + + } + + /** + * Process a single file for the IG drawable db. + * + * @param file The file to process. + * @param tr A valid DrawableTransaction object. + * @param caseDbTransaction A valid caseDBTransaction object. + * + * @throws TskCoreException + */ + void processFile(AbstractFile file, DrawableDB.DrawableTransaction tr, SleuthkitCase.CaseDbTransaction caseDbTransaction) throws TskCoreException { + final boolean known = file.getKnown() == TskData.FileKnown.KNOWN; + if (known) { + getDrawableDB().removeFile(file.getId(), tr); //remove known files + } else { + // NOTE: Files are being processed because they have the right MIME type, + // so we do not need to worry about this calculating them + if (FileTypeUtils.hasDrawableMIMEType(file)) { + getDrawableDB().updateFile(DrawableFile.create(file, true, false), tr, caseDbTransaction); + } //unsupported mimtype => analyzed but shouldn't include + else { + getDrawableDB().removeFile(file.getId(), tr); + } + } + } + + /** + * Returns the image query for the given data source. + * + * @param dataSourceObjId + * + * @return SQL query for given data source. + */ + private String getDrawableQuery(long dataSourceObjId) { + return " (data_source_obj_id = " + dataSourceObjId + ") " + " AND ( meta_type = " + TskData.TSK_FS_META_TYPE_ENUM.TSK_FS_META_TYPE_REG.getValue() + ")" + " AND ( " + MIMETYPE_CLAUSE //NON-NLS + " OR mime_type LIKE 'video/%' OR mime_type LIKE 'image/%' )" //NON-NLS + " ORDER BY parent_path "; } - /** - * Do any cleanup for this task. - */ - abstract void cleanup(); - - abstract void processFile(final AbstractFile f, DrawableDB.DrawableTransaction tr, SleuthkitCase.CaseDbTransaction caseDBTransaction) throws TskCoreException; - - /** - * Gets a list of files to process. - * - * @return list of files to process - * - * @throws TskCoreException - */ - List getFiles() throws TskCoreException { - return tskCase.findAllFilesWhere(drawableQuery); - } - - @Override - @NbBundle.Messages({ - "BulkDrawableFilesTask.populatingDb.status=populating analyzed image/video database" + @Messages({ + "DrawableFileUpdateTask_populatingDb_status=populating analyzed image/video database", + "DrawableFileUpdateTask_committingDb.status=committing image/video database", + "DrawableFileUpdateTask_stopCopy_status=Stopping copy to drawable db task.", + "DrawableFileUpdateTask_errPopulating_errMsg=There was an error populating Image Gallery database." }) - public void run() { + private void updateFileForDataSource(long dataSourceObjId) { ProgressHandle progressHandle = getInitialProgressHandle(); progressHandle.start(); - updateMessage(Bundle.BulkDrawableFilesTask_populatingDb_status() + " (Data Source " + dataSourceObjId + ")"); + updateMessage(Bundle.DrawableFileUpdateTask_populatingDb_status() + " (Data Source " + dataSourceObjId + ")"); + DrawableDB.DrawableTransaction drawableDbTransaction = null; SleuthkitCase.CaseDbTransaction caseDbTransaction = null; boolean hasFilesWithNoMime = true; boolean endedEarly = false; try { + + getDrawableDB().buildFileMetaDataCache(); // See if there are any files in the DS w/out a MIME TYPE hasFilesWithNoMime = controller.hasFilesWithNoMimeType(dataSourceObjId); + //grab all files with detected mime types - final List files = getFiles(); + final List files = getFilesForDataSource(dataSourceObjId); progressHandle.switchToDeterminate(files.size()); - taskDB.insertOrUpdateDataSource(dataSourceObjId, DrawableDB.DrawableDbBuildStatusEnum.IN_PROGRESS); + getDrawableDB().insertOrUpdateDataSource(dataSourceObjId, DrawableDB.DrawableDbBuildStatusEnum.IN_PROGRESS); updateProgress(0.0); int workDone = 0; // Cycle through all of the files returned and call processFile on each //do in transaction - drawableDbTransaction = taskDB.beginTransaction(); + drawableDbTransaction = getDrawableDB().beginTransaction(); /* * We are going to periodically commit the CaseDB transaction and * sleep so that the user can have Autopsy do other stuff while * these bulk tasks are ongoing. */ int caseDbCounter = 0; + for (final AbstractFile f : files) { + updateMessage(f.getName()); if (caseDbTransaction == null) { - caseDbTransaction = tskCase.beginTransaction(); + caseDbTransaction = getCaseDB().beginTransaction(); } + if (isCancelled() || Thread.interrupted()) { logger.log(Level.WARNING, "Task cancelled or interrupted: not all contents may be transfered to drawable database."); //NON-NLS endedEarly = true; @@ -132,20 +191,19 @@ abstract class BulkDrawableFilesTask extends DrawableDbTask { if ((++caseDbCounter % 200) == 0) { caseDbTransaction.commit(); caseDbTransaction = null; - Thread.sleep(500); // 1/2 second + Thread.sleep(500); // 1/2 millisecond } } progressHandle.finish(); - progressHandle = ProgressHandle.createHandle(Bundle.BulkDrawableFilesTask_committingDb_status()); - updateMessage(Bundle.BulkDrawableFilesTask_committingDb_status() + " (Data Source " + dataSourceObjId + ")"); + progressHandle = ProgressHandle.createHandle(Bundle.DrawableFileUpdateTask_committingDb_status()); + updateMessage(Bundle.DrawableFileUpdateTask_committingDb_status() + " (Data Source " + dataSourceObjId + ")"); updateProgress(1.0); - progressHandle.start(); if (caseDbTransaction != null) { caseDbTransaction.commit(); caseDbTransaction = null; } // pass true so that groupmanager is notified of the changes - taskDB.commitTransaction(drawableDbTransaction, true); + getDrawableDB().commitTransaction(drawableDbTransaction, true); drawableDbTransaction = null; } catch (TskCoreException | SQLException | InterruptedException ex) { if (null != caseDbTransaction) { @@ -157,14 +215,14 @@ abstract class BulkDrawableFilesTask extends DrawableDbTask { } if (null != drawableDbTransaction) { try { - taskDB.rollbackTransaction(drawableDbTransaction); + getDrawableDB().rollbackTransaction(drawableDbTransaction); } catch (SQLException ex2) { logger.log(Level.SEVERE, String.format("Failed to roll back drawables db transaction after error: %s", ex.getMessage()), ex2); //NON-NLS } } - progressHandle.progress(Bundle.BulkDrawableFilesTask_stopCopy_status()); + progressHandle.progress(Bundle.DrawableFileUpdateTask_stopCopy_status()); logger.log(Level.WARNING, "Stopping copy to drawable db task. Failed to transfer all database contents", ex); //NON-NLS - MessageNotifyUtil.Notify.warn(Bundle.BulkDrawableFilesTask_errPopulating_errMsg(), ex.getMessage()); + MessageNotifyUtil.Notify.warn(Bundle.DrawableFileUpdateTask_errPopulating_errMsg(), ex.getMessage()); endedEarly = true; } finally { progressHandle.finish(); @@ -172,15 +230,27 @@ abstract class BulkDrawableFilesTask extends DrawableDbTask { // if there was cancellation or errors DrawableDB.DrawableDbBuildStatusEnum datasourceDrawableDBStatus = ((hasFilesWithNoMime == true) || (endedEarly == true)) ? DrawableDB.DrawableDbBuildStatusEnum.REBUILT_STALE : DrawableDB.DrawableDbBuildStatusEnum.COMPLETE; try { - taskDB.insertOrUpdateDataSource(dataSourceObjId, datasourceDrawableDBStatus); + getDrawableDB().insertOrUpdateDataSource(dataSourceObjId, datasourceDrawableDBStatus); } catch (SQLException ex) { logger.log(Level.SEVERE, String.format("Error updating datasources table (data source object ID = %d, status = %s)", dataSourceObjId, datasourceDrawableDBStatus.toString(), ex)); //NON-NLS } updateMessage(""); updateProgress(-1.0); + + getDrawableDB().freeFileMetaDataCache(); + // at the end of the task, set the stale status based on the + // cumulative status of all data sources + controller.setModelIsStale(controller.isDataSourcesTableStale()); } - cleanup(); + } - abstract ProgressHandle getInitialProgressHandle(); + /** + * Returns a ProgressHandle. + * + * @return A new ProgressHandle. + */ + private ProgressHandle getInitialProgressHandle() { + return ProgressHandle.createHandle(Bundle.DrawableFileUpdateTask_populatingDb_status(), this); + } } diff --git a/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/HandleDataSourceAnalysisCompleteTask.java b/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/HandleDataSourceAnalysisCompleteTask.java new file mode 100644 index 0000000000..f40f026c4f --- /dev/null +++ b/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/HandleDataSourceAnalysisCompleteTask.java @@ -0,0 +1,64 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2021 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.imagegallery; + +import java.sql.SQLException; +import java.util.logging.Level; +import org.sleuthkit.autopsy.coreutils.Logger; +import org.sleuthkit.autopsy.imagegallery.datamodel.DrawableDB; +import org.sleuthkit.datamodel.TskCoreException; + +/** + * Task to run when data source analysis is complete. + */ +class HandleDataSourceAnalysisCompleteTask extends DrawableDbTask { + + private final ImageGalleryController controller; + private final long dataSourceObjId; + + private static final Logger logger = Logger.getLogger(HandleDataSourceAnalysisCompleteTask.class.getName()); + + HandleDataSourceAnalysisCompleteTask(long dataSourceObjId, ImageGalleryController controller) { + this.controller = controller; + this.dataSourceObjId = dataSourceObjId; + } + + @Override + public void run() { + controller.getGroupManager().resetCurrentPathGroup(); + try { + DrawableDB drawableDB = controller.getDrawablesDatabase(); + if (drawableDB.getDataSourceDbBuildStatus(dataSourceObjId) == DrawableDB.DrawableDbBuildStatusEnum.IN_PROGRESS) { + + // If at least one file in CaseDB has mime type, then set to COMPLETE + // Otherwise, back to UNKNOWN since we assume file type module was not run + DrawableDB.DrawableDbBuildStatusEnum datasourceDrawableDBStatus + = controller.hasFilesWithMimeType(dataSourceObjId) + ? DrawableDB.DrawableDbBuildStatusEnum.COMPLETE + : DrawableDB.DrawableDbBuildStatusEnum.UNKNOWN; + + drawableDB.insertOrUpdateDataSource(dataSourceObjId, datasourceDrawableDBStatus); + } + drawableDB.freeFileMetaDataCache(); + } catch (TskCoreException | SQLException ex) { + logger.log(Level.WARNING, "Error handling data source analysis completed event", ex); + } + } + +} diff --git a/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/ImageGalleryController.java b/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/ImageGalleryController.java index 9513163587..3743f64f1c 100644 --- a/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/ImageGalleryController.java +++ b/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/ImageGalleryController.java @@ -192,12 +192,15 @@ public final class ImageGalleryController { * @param theCase The case. */ static void shutDownController(Case theCase) { + ImageGalleryController controller = null; synchronized (controllersByCaseLock) { if (controllersByCase.containsKey(theCase.getName())) { - ImageGalleryController controller = controllersByCase.remove(theCase.getName()); - controller.shutDown(); + controller = controllersByCase.remove(theCase.getName()); } } + if (controller != null) { + controller.shutDown(); + } } /** @@ -483,8 +486,7 @@ public final class ImageGalleryController { * */ public void rebuildDrawablesDb() { - // queue a rebuild task for each stale data source - getStaleDataSourceIds().forEach(dataSourceObjId -> queueDBTask(new AddDrawableFilesTask(dataSourceObjId, this))); + queueDBTask(new DrawableFileUpdateTask(this)); } /** @@ -667,7 +669,7 @@ public final class ImageGalleryController { * * @param bgTask */ - public synchronized void queueDBTask(DrawableDbTask bgTask) { + public synchronized void queueDBTask(Runnable bgTask) { if (!dbExecutor.isShutdown()) { incrementQueueSize(); dbExecutor.submit(bgTask).addListener(this::decrementQueueSize, MoreExecutors.directExecutor()); @@ -963,19 +965,7 @@ public final class ImageGalleryController { * of the local drawables database. */ if (isListeningEnabled()) { - groupManager.resetCurrentPathGroup(); - if (drawableDB.getDataSourceDbBuildStatus(dataSourceObjId) == DrawableDB.DrawableDbBuildStatusEnum.IN_PROGRESS) { - - // If at least one file in CaseDB has mime type, then set to COMPLETE - // Otherwise, back to UNKNOWN since we assume file type module was not run - DrawableDB.DrawableDbBuildStatusEnum datasourceDrawableDBStatus - = hasFilesWithMimeType(dataSourceObjId) - ? DrawableDB.DrawableDbBuildStatusEnum.COMPLETE - : DrawableDB.DrawableDbBuildStatusEnum.UNKNOWN; - - drawableDB.insertOrUpdateDataSource(dataSource.getId(), datasourceDrawableDBStatus); - } - drawableDB.freeFileMetaDataCache(); + queueDBTask(new HandleDataSourceAnalysisCompleteTask(dataSourceObjId, this)); } } else if (((AutopsyEvent) event).getSourceType() == AutopsyEvent.SourceType.REMOTE) { /* diff --git a/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/actions/Bundle.properties-MERGED b/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/actions/Bundle.properties-MERGED index 32b96181cb..8dea95da62 100755 --- a/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/actions/Bundle.properties-MERGED +++ b/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/actions/Bundle.properties-MERGED @@ -37,7 +37,7 @@ OpenAction.noControllerDialog.text=An initialization error ocurred.\nPlease see OpenAction.notAnalyzedDlg.msg=No image/video files available to display yet.\nPlease run FileType and EXIF ingest modules. OpenAction.openTopComponent.error.message=An error occurred while attempting to open Image Gallery. OpenAction.openTopComponent.error.title=Failed to open Image Gallery -OpenAction.stale.confDlg.msg=The image / video database may be out of date. Do you want to update and listen for further ingest results?\nChoosing 'yes' will update the database and enable listening to future ingests. +OpenAction.stale.confDlg.msg=The image / video database may be out of date. Do you want to update and listen for further ingest results?\nChoosing 'yes' will update the database and enable listening to future ingests.\n\nDatabase update status will appear in the lower right corner of the application window. OpenAction.stale.confDlg.title=Image Gallery OpenExternalViewerAction.displayName=External Viewer RedoAction.name=Redo diff --git a/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/actions/OpenAction.java b/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/actions/OpenAction.java index 92ea324105..5723e8e63e 100644 --- a/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/actions/OpenAction.java +++ b/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/actions/OpenAction.java @@ -40,6 +40,7 @@ import org.openide.awt.ActionID; import org.openide.awt.ActionReference; import org.openide.awt.ActionReferences; import org.openide.awt.ActionRegistration; +import org.openide.util.Exceptions; import org.openide.util.HelpCtx; import org.openide.util.NbBundle; import org.openide.util.NbBundle.Messages; @@ -69,7 +70,8 @@ import org.sleuthkit.datamodel.TskCoreException; @Messages({"CTL_OpenAction=Images/Videos", "OpenAction.stale.confDlg.msg=The image / video database may be out of date. " + "Do you want to update and listen for further ingest results?\n" - + "Choosing 'yes' will update the database and enable listening to future ingests.", + + "Choosing 'yes' will update the database and enable listening to future ingests.\n\n" + + "Database update status will appear in the lower right corner of the application window.", "OpenAction.notAnalyzedDlg.msg=No image/video files available to display yet.\n" + "Please run FileType and EXIF ingest modules.", "OpenAction.stale.confDlg.title=Image Gallery"}) @@ -254,28 +256,7 @@ public final class OpenAction extends CallableSystemAction { // They don't want to rebuild. Just open the UI as is. // NOTE: There could be no data.... } else if (answer == ButtonType.YES) { - if (controller.getCase().getCaseType() == Case.CaseType.SINGLE_USER_CASE) { - /* - * For a single-user case, we favor user - * experience, and rebuild the database as soon - * as Image Gallery is enabled for the case. - * - * Turning listening off is necessary in order - * to invoke the listener that will call - * controller.rebuildDB(); - */ - controller.setListeningEnabled(false); - controller.setListeningEnabled(true); - } else { - /* - * For a multi-user case, we favor overall - * performance and user experience, not every - * user may want to review images, so we rebuild - * the database only when a user launches Image - * Gallery. - */ - controller.rebuildDrawablesDb(); - } + controller.rebuildDrawablesDb(); } openTopComponent(); return; diff --git a/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/datamodel/DrawableFile.java b/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/datamodel/DrawableFile.java index 112c7ab6ec..e520e2d0ec 100644 --- a/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/datamodel/DrawableFile.java +++ b/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/datamodel/DrawableFile.java @@ -100,13 +100,9 @@ public abstract class DrawableFile { private String model; - private final CategoryManager categoryManager; - protected DrawableFile(AbstractFile file, Boolean analyzed) { this.analyzed = new SimpleBooleanProperty(analyzed); this.file = file; - - categoryManager = ImageGalleryController.getController(Case.getCurrentCase()).getCategoryManager(); } public abstract boolean isVideo(); @@ -245,13 +241,19 @@ public abstract class DrawableFile { /** * Update the category property. */ - private void updateCategory() { + private void updateCategory() { try { + ImageGalleryController controllerForCase = ImageGalleryController.getController(Case.getCurrentCaseThrows()); + if (controllerForCase == null) { + // This can only happen during case closing, so return without generating an error. + return; + } + List contentTags = getContentTags(); TagName tag = null; for (ContentTag ct : contentTags) { TagName tagName = ct.getName(); - if (categoryManager.isCategoryTagName(tagName)) { + if (controllerForCase.getCategoryManager().isCategoryTagName(tagName)) { tag = tagName; break; } @@ -259,7 +261,7 @@ public abstract class DrawableFile { categoryTagName.set(tag); } catch (TskCoreException ex) { LOGGER.log(Level.WARNING, "problem looking up category for " + this.getContentPathSafe(), ex); //NON-NLS - } catch (IllegalStateException ex) { + } catch (IllegalStateException | NoCurrentCaseException ex) { // We get here many times if the case is closed during ingest, so don't print out a ton of warnings. } } diff --git a/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/gui/Toolbar.java b/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/gui/Toolbar.java index f492b5f325..1d7e7bc716 100644 --- a/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/gui/Toolbar.java +++ b/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/gui/Toolbar.java @@ -66,6 +66,7 @@ import org.sleuthkit.autopsy.imagegallery.ImageGalleryController; import org.sleuthkit.autopsy.imagegallery.actions.CategorizeGroupAction; import org.sleuthkit.autopsy.imagegallery.actions.TagGroupAction; import org.sleuthkit.autopsy.imagegallery.datamodel.DrawableAttribute; +import org.sleuthkit.autopsy.imagegallery.datamodel.DrawableDB; import org.sleuthkit.autopsy.imagegallery.datamodel.grouping.DrawableGroup; import org.sleuthkit.autopsy.imagegallery.datamodel.grouping.GroupSortBy; import org.sleuthkit.autopsy.imagegallery.datamodel.grouping.GroupViewState; @@ -236,8 +237,8 @@ public class Toolbar extends ToolBar { } private void initDataSourceComboBox() { - dataSourceComboBox.setCellFactory(param -> new DataSourceCell(dataSourcesViewable, controller.getAllDataSourcesDrawableDBStatus())); - dataSourceComboBox.setButtonCell(new DataSourceCell(dataSourcesViewable, controller.getAllDataSourcesDrawableDBStatus())); + dataSourceComboBox.setCellFactory(param -> new DataSourceCell(dataSourcesViewable, new HashMap<>())); + dataSourceComboBox.setButtonCell(new DataSourceCell(dataSourcesViewable, new HashMap<>())); dataSourceComboBox.setConverter(new StringConverter>() { @Override public String toString(Optional object) { diff --git a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/Bundle.properties b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/Bundle.properties index d2ab5f76c7..7dfee92048 100644 --- a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/Bundle.properties +++ b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/Bundle.properties @@ -320,3 +320,4 @@ ExtractedContentPanel.pageOfLabel.text=of ExtractedContentPanel.pageCurLabel.text=- ExtractedContentPanel.pagesLabel.text=Page: KeywordSearchGlobalSearchSettingsPanel.ocrCheckBox.text=Enable Optical Character Recognition (OCR) +KeywordSearchGlobalSearchSettingsPanel.limitedOcrCheckbox.text=Only process images which are over 100KB in size or extracted from a document (Beta) diff --git a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/Bundle.properties-MERGED b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/Bundle.properties-MERGED index 72de1c17d8..c60c34bade 100755 --- a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/Bundle.properties-MERGED +++ b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/Bundle.properties-MERGED @@ -23,6 +23,7 @@ ExtractAllTermsReport.search.noFilesInIdxMsg2=No files are in index yet. Try aga ExtractAllTermsReport.search.searchIngestInProgressTitle=Keyword Search Ingest in Progress ExtractAllTermsReport.startExport=Starting Unique Word Extraction ExtractedContentPanel.setMarkup.panelTxt=Loading text... Please wait +# {0} - Content name ExtractedContentPanel.SetMarkup.progress.loading=Loading text for {0} GlobalEditListPanel.editKeyword.title=Edit Keyword GlobalEditListPanel.warning.text=Boundary characters ^ and $ do not match word boundaries. Consider\nreplacing with an explicit list of boundary characters, such as [ \\.,] @@ -30,6 +31,7 @@ GlobalEditListPanel.warning.title=Warning IndexedText.errorMessage.errorGettingText=Error retrieving indexed text. IndexedText.warningMessage.knownFile=This file is a known file (based on MD5 hash) and does not have indexed text. IndexedText.warningMessage.noTextAvailable=No indexed text for this file. +KeywordSearchGlobalSearchSettingsPanel.customizeComponents.windowsLimitedOCR=Only process images which are over 100KB in size or extracted from a document. (Beta) (Requires Windows 64-bit) KeywordSearchGlobalSearchSettingsPanel.customizeComponents.windowsOCR=Enable Optical Character Recognition (OCR) (Requires Windows 64-bit) KeywordSearchGlobalSettingsPanel.Title=Global Keyword Search Settings KeywordSearchIngestModule.init.badInitMsg=Keyword search server was not properly initialized, cannot run keyword search ingest. @@ -49,7 +51,7 @@ KeywordSearchResultFactory.createNodeForKey.noResultsFound.text=No results found KeywordSearchResultFactory.query.exception.msg=Could not perform the query OpenIDE-Module-Display-Category=Ingest Module -OpenIDE-Module-Long-Description=Keyword Search ingest module.\n\nThe module indexes files found in the disk image at ingest time.\nIt then periodically runs the search on the indexed files using one or more keyword lists (containing pure words and/or regular expressions) and posts results.\n\nThe module also contains additional tools integrated in the main GUI, such as keyword list configuration, keyword search bar in the top-right corner, extracted text viewer and search results viewer showing highlighted keywords found. +OpenIDE-Module-Long-Description=Keyword Search ingest module.\n\nThe module indexes files found in the disk image at ingest time.\nIt then periodically runs the search on the indexed files using one or more keyword lists (containing pure words and/or regular expressions) and posts results.\n\n\The module also contains additional tools integrated in the main GUI, such as keyword list configuration, keyword search bar in the top-right corner, extracted text viewer and search results viewer showing highlighted keywords found. OpenIDE-Module-Name=KeywordSearch OptionsCategory_Name_KeywordSearchOptions=Keyword Search OptionsCategory_Keywords_KeywordSearchOptions=Keyword Search @@ -400,6 +402,7 @@ ExtractedContentPanel.pageOfLabel.text=of ExtractedContentPanel.pageCurLabel.text=- ExtractedContentPanel.pagesLabel.text=Page: KeywordSearchGlobalSearchSettingsPanel.ocrCheckBox.text=Enable Optical Character Recognition (OCR) +KeywordSearchGlobalSearchSettingsPanel.limitedOcrCheckbox.text=Only process images which are over 100KB in size or extracted from a document (Beta) TextZoomPanel.zoomInButton.text= TextZoomPanel.zoomOutButton.text= TextZoomPanel.zoomResetButton.text=Reset diff --git a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchGlobalSearchSettingsPanel.form b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchGlobalSearchSettingsPanel.form index 4625056adf..550ad5d442 100644 --- a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchGlobalSearchSettingsPanel.form +++ b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchGlobalSearchSettingsPanel.form @@ -31,22 +31,17 @@ - - - - - - - - - - + + + + + @@ -54,7 +49,7 @@ - + @@ -68,6 +63,10 @@ + + + + @@ -92,6 +91,8 @@ + + @@ -121,7 +122,7 @@ - + @@ -288,5 +289,15 @@ + + + + + + + + + + diff --git a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchGlobalSearchSettingsPanel.java b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchGlobalSearchSettingsPanel.java index 0741d8d6bb..b9687a32c2 100644 --- a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchGlobalSearchSettingsPanel.java +++ b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchGlobalSearchSettingsPanel.java @@ -35,7 +35,8 @@ import org.sleuthkit.autopsy.keywordsearch.KeywordSearchIngestModule.UpdateFrequ */ @SuppressWarnings("PMD.SingularField") // UI widgets cause lots of false positives class KeywordSearchGlobalSearchSettingsPanel extends javax.swing.JPanel implements OptionsPanel { - + + private static final long serialVersionUID = 1L; private final Logger logger = Logger.getLogger(KeywordSearchGlobalSearchSettingsPanel.class.getName()); /** @@ -45,17 +46,19 @@ class KeywordSearchGlobalSearchSettingsPanel extends javax.swing.JPanel implemen initComponents(); customizeComponents(); } - + private void activateWidgets() { skipNSRLCheckBox.setSelected(KeywordSearchSettings.getSkipKnown()); showSnippetsCB.setSelected(KeywordSearchSettings.getShowSnippets()); ocrCheckBox.setSelected(KeywordSearchSettings.getOcrOption()); + limitedOcrCheckbox.setSelected(KeywordSearchSettings.getLimitedOcrOption()); boolean ingestRunning = IngestManager.getInstance().isIngestRunning(); ingestWarningLabel.setVisible(ingestRunning); skipNSRLCheckBox.setEnabled(!ingestRunning); ocrCheckBox.setEnabled(!ingestRunning); + limitedOcrCheckbox.setEnabled(ocrCheckBox.isSelected() && !ingestRunning); setTimeSettingEnabled(!ingestRunning); - + final UpdateFrequency curFreq = KeywordSearchSettings.getUpdateFrequency(); switch (curFreq) { case FAST: @@ -109,6 +112,7 @@ class KeywordSearchGlobalSearchSettingsPanel extends javax.swing.JPanel implemen timeRadioButton5 = new javax.swing.JRadioButton(); ingestWarningLabel = new javax.swing.JLabel(); ocrCheckBox = new javax.swing.JCheckBox(); + limitedOcrCheckbox = new javax.swing.JCheckBox(); skipNSRLCheckBox.setText(org.openide.util.NbBundle.getMessage(KeywordSearchGlobalSearchSettingsPanel.class, "KeywordSearchGlobalSearchSettingsPanel.skipNSRLCheckBox.text")); // NOI18N skipNSRLCheckBox.setToolTipText(org.openide.util.NbBundle.getMessage(KeywordSearchGlobalSearchSettingsPanel.class, "KeywordSearchGlobalSearchSettingsPanel.skipNSRLCheckBox.toolTipText")); // NOI18N @@ -189,6 +193,13 @@ class KeywordSearchGlobalSearchSettingsPanel extends javax.swing.JPanel implemen } }); + limitedOcrCheckbox.setText(org.openide.util.NbBundle.getMessage(KeywordSearchGlobalSearchSettingsPanel.class, "KeywordSearchGlobalSearchSettingsPanel.limitedOcrCheckbox.text")); // NOI18N + limitedOcrCheckbox.addActionListener(new java.awt.event.ActionListener() { + public void actionPerformed(java.awt.event.ActionEvent evt) { + limitedOcrCheckboxActionPerformed(evt); + } + }); + javax.swing.GroupLayout layout = new javax.swing.GroupLayout(this); this.setLayout(layout); layout.setHorizontalGroup( @@ -203,26 +214,23 @@ class KeywordSearchGlobalSearchSettingsPanel extends javax.swing.JPanel implemen .addComponent(settingsLabel) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(settingsSeparator, javax.swing.GroupLayout.PREFERRED_SIZE, 326, javax.swing.GroupLayout.PREFERRED_SIZE)) - .addGroup(layout.createSequentialGroup() - .addGap(10, 10, 10) - .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) - .addComponent(skipNSRLCheckBox) - .addComponent(showSnippetsCB) - .addComponent(ocrCheckBox))) .addGroup(layout.createSequentialGroup() .addComponent(informationLabel) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(informationSeparator, javax.swing.GroupLayout.PREFERRED_SIZE, 309, javax.swing.GroupLayout.PREFERRED_SIZE)) .addGroup(layout.createSequentialGroup() - .addGap(10, 10, 10) + .addGap(16, 16, 16) .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) + .addComponent(skipNSRLCheckBox) + .addComponent(showSnippetsCB) + .addComponent(ocrCheckBox) .addGroup(layout.createSequentialGroup() .addComponent(filesIndexedLabel) .addGap(18, 18, 18) .addComponent(filesIndexedValue)) .addComponent(frequencyLabel) .addGroup(layout.createSequentialGroup() - .addGap(10, 10, 10) + .addGap(16, 16, 16) .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addComponent(timeRadioButton2) .addComponent(timeRadioButton1) @@ -232,7 +240,10 @@ class KeywordSearchGlobalSearchSettingsPanel extends javax.swing.JPanel implemen .addGroup(layout.createSequentialGroup() .addComponent(chunksLabel) .addGap(18, 18, 18) - .addComponent(chunksValLabel))))) + .addComponent(chunksValLabel)) + .addGroup(layout.createSequentialGroup() + .addGap(16, 16, 16) + .addComponent(limitedOcrCheckbox))))) .addGap(0, 0, Short.MAX_VALUE))) .addContainerGap()) ); @@ -252,6 +263,8 @@ class KeywordSearchGlobalSearchSettingsPanel extends javax.swing.JPanel implemen .addComponent(showSnippetsCB) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(ocrCheckBox) + .addGap(0, 0, 0) + .addComponent(limitedOcrCheckbox) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(frequencyLabel) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) @@ -278,7 +291,7 @@ class KeywordSearchGlobalSearchSettingsPanel extends javax.swing.JPanel implemen .addComponent(chunksValLabel)) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED) .addComponent(ingestWarningLabel) - .addContainerGap(43, Short.MAX_VALUE)) + .addContainerGap(javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)) ); }// //GEN-END:initComponents @@ -311,9 +324,14 @@ class KeywordSearchGlobalSearchSettingsPanel extends javax.swing.JPanel implemen }//GEN-LAST:event_timeRadioButton4ActionPerformed private void ocrCheckBoxActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_ocrCheckBoxActionPerformed + limitedOcrCheckbox.setEnabled(ocrCheckBox.isSelected()); firePropertyChange(OptionsPanelController.PROP_CHANGED, null, null); }//GEN-LAST:event_ocrCheckBoxActionPerformed + private void limitedOcrCheckboxActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_limitedOcrCheckboxActionPerformed + firePropertyChange(OptionsPanelController.PROP_CHANGED, null, null); + }//GEN-LAST:event_limitedOcrCheckboxActionPerformed + // Variables declaration - do not modify//GEN-BEGIN:variables private javax.swing.JLabel chunksLabel; private javax.swing.JLabel chunksValLabel; @@ -323,6 +341,7 @@ class KeywordSearchGlobalSearchSettingsPanel extends javax.swing.JPanel implemen private javax.swing.JLabel informationLabel; private javax.swing.JSeparator informationSeparator; private javax.swing.JLabel ingestWarningLabel; + private javax.swing.JCheckBox limitedOcrCheckbox; private javax.swing.JCheckBox ocrCheckBox; private javax.swing.JLabel settingsLabel; private javax.swing.JSeparator settingsSeparator; @@ -342,13 +361,14 @@ class KeywordSearchGlobalSearchSettingsPanel extends javax.swing.JPanel implemen KeywordSearchSettings.setUpdateFrequency(getSelectedTimeValue()); KeywordSearchSettings.setShowSnippets(showSnippetsCB.isSelected()); KeywordSearchSettings.setOcrOption(ocrCheckBox.isSelected()); + KeywordSearchSettings.setLimitedOcrOption(limitedOcrCheckbox.isSelected()); } - + @Override public void load() { activateWidgets(); } - + private void setTimeSettingEnabled(boolean enabled) { timeRadioButton1.setEnabled(enabled); timeRadioButton2.setEnabled(enabled); @@ -357,7 +377,7 @@ class KeywordSearchGlobalSearchSettingsPanel extends javax.swing.JPanel implemen timeRadioButton5.setEnabled(enabled); frequencyLabel.setEnabled(enabled); } - + private UpdateFrequency getSelectedTimeValue() { if (timeRadioButton1.isSelected()) { return UpdateFrequency.FAST; @@ -372,18 +392,19 @@ class KeywordSearchGlobalSearchSettingsPanel extends javax.swing.JPanel implemen } return UpdateFrequency.DEFAULT; } - - @NbBundle.Messages({"KeywordSearchGlobalSearchSettingsPanel.customizeComponents.windowsOCR=Enable Optical Character Recognition (OCR) (Requires Windows 64-bit)"}) + + @NbBundle.Messages({"KeywordSearchGlobalSearchSettingsPanel.customizeComponents.windowsOCR=Enable Optical Character Recognition (OCR) (Requires Windows 64-bit)", + "KeywordSearchGlobalSearchSettingsPanel.customizeComponents.windowsLimitedOCR=Only process images which are over 100KB in size or extracted from a document. (Beta) (Requires Windows 64-bit)"}) private void customizeComponents() { - + timeGroup.add(timeRadioButton1); timeGroup.add(timeRadioButton2); timeGroup.add(timeRadioButton3); timeGroup.add(timeRadioButton4); timeGroup.add(timeRadioButton5); - + this.skipNSRLCheckBox.setSelected(KeywordSearchSettings.getSkipKnown()); - + try { filesIndexedValue.setText(Integer.toString(KeywordSearch.getServer().queryNumIndexedFiles())); chunksValLabel.setText(Integer.toString(KeywordSearch.getServer().queryNumIndexedChunks())); @@ -395,15 +416,18 @@ class KeywordSearchGlobalSearchSettingsPanel extends javax.swing.JPanel implemen ocrCheckBox.setText(Bundle.KeywordSearchGlobalSearchSettingsPanel_customizeComponents_windowsOCR()); ocrCheckBox.setSelected(false); ocrCheckBox.setEnabled(false); - } - + limitedOcrCheckbox.setSelected(false); + limitedOcrCheckbox.setEnabled(false); + limitedOcrCheckbox.setText(Bundle.KeywordSearchGlobalSearchSettingsPanel_customizeComponents_windowsLimitedOCR()); + } + KeywordSearch.addNumIndexedFilesChangeListener( new PropertyChangeListener() { @Override public void propertyChange(PropertyChangeEvent evt) { String changed = evt.getPropertyName(); Object newValue = evt.getNewValue(); - + if (changed.equals(KeywordSearch.NUM_FILES_CHANGE_EVT)) { int newFilesIndexed = ((Integer) newValue); filesIndexedValue.setText(Integer.toString(newFilesIndexed)); @@ -416,7 +440,7 @@ class KeywordSearchGlobalSearchSettingsPanel extends javax.swing.JPanel implemen } } }); - + //allow panel to toggle its enabled status while it is open based on ingest events IngestManager.getInstance().addIngestJobEventListener(new PropertyChangeListener() { @Override diff --git a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchIngestModule.java b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchIngestModule.java index 4ef6db2a24..0d8e48e9c4 100644 --- a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchIngestModule.java +++ b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchIngestModule.java @@ -513,6 +513,7 @@ public final class KeywordSearchIngestModule implements FileIngestModule { private boolean extractTextAndIndex(AbstractFile aFile, Map extractedMetadata) throws IngesterException { ImageConfig imageConfig = new ImageConfig(); imageConfig.setOCREnabled(KeywordSearchSettings.getOcrOption()); + imageConfig.setLimitedOCREnabled(KeywordSearchSettings.getLimitedOcrOption()); ProcessTerminator terminator = () -> context.fileIngestIsCancelled(); Lookup extractionContext = Lookups.fixed(imageConfig, terminator); diff --git a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchSettings.java b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchSettings.java index 91043ee9a0..0db0b30684 100644 --- a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchSettings.java +++ b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchSettings.java @@ -39,9 +39,11 @@ class KeywordSearchSettings { static final String PROPERTIES_NSRL = NbBundle.getMessage(KeywordSearchSettings.class, "KeywordSearchSettings.propertiesNSRL.text", MODULE_NAME); static final String PROPERTIES_SCRIPTS = NbBundle.getMessage(KeywordSearchSettings.class, "KeywordSearchSettings.propertiesScripts.text", MODULE_NAME); static final String SHOW_SNIPPETS = "showSnippets"; //NON-NLS - static final boolean DEFAULT_SHOW_SNIPPETS = true; + static final boolean DEFAULT_SHOW_SNIPPETS = true; static final String OCR_ENABLED = "ocrEnabled"; //NON-NLS + static final String LIMITED_OCR_ENABLED = "limitedOcrEnabled"; //NON-NLS static final boolean OCR_ENABLED_DEFAULT = false; // NON-NLS + static final boolean LIMITED_OCR_ENABLED_DEFAULT = false; private static boolean skipKnown = true; private static final Logger logger = Logger.getLogger(KeywordSearchSettings.class.getName()); private static UpdateFrequency UpdateFreq = UpdateFrequency.DEFAULT; @@ -130,19 +132,19 @@ class KeywordSearchSettings { stringExtractOptions.put(key, val); ModuleSettings.setConfigSetting(PROPERTIES_OPTIONS, key, val); } - + /** * Save OCR setting to permanent storage - * + * * @param enabled Is OCR enabled? */ static void setOcrOption(boolean enabled) { ModuleSettings.setConfigSetting(PROPERTIES_OPTIONS, OCR_ENABLED, (enabled ? "true" : "false")); //NON-NLS - } + } /** * Get OCR setting from permanent storage - * + * * @return Is OCR enabled? */ static boolean getOcrOption() { @@ -152,7 +154,7 @@ class KeywordSearchSettings { return OCR_ENABLED_DEFAULT; } } - + static void setShowSnippets(boolean showSnippets) { ModuleSettings.setConfigSetting(PROPERTIES_OPTIONS, SHOW_SNIPPETS, (showSnippets ? "true" : "false")); //NON-NLS } @@ -248,11 +250,41 @@ class KeywordSearchSettings { if (!ModuleSettings.settingExists(KeywordSearchSettings.PROPERTIES_OPTIONS, OCR_ENABLED)) { logger.log(Level.INFO, "No configuration for OCR found, generating defaults..."); //NON-NLS KeywordSearchSettings.setOcrOption(OCR_ENABLED_DEFAULT); - } + } + //setting OCR default (disabled by default) + if (!ModuleSettings.settingExists(KeywordSearchSettings.PROPERTIES_OPTIONS, LIMITED_OCR_ENABLED)) { + logger.log(Level.INFO, "No configuration for OCR found, generating defaults..."); //NON-NLS + KeywordSearchSettings.setLimitedOcrOption(LIMITED_OCR_ENABLED_DEFAULT); + } //setting default Latin-1 Script if (!ModuleSettings.settingExists(KeywordSearchSettings.PROPERTIES_SCRIPTS, SCRIPT.LATIN_1.name())) { logger.log(Level.INFO, "No configuration for Scripts found, generating defaults..."); //NON-NLS ModuleSettings.setConfigSetting(KeywordSearchSettings.PROPERTIES_SCRIPTS, SCRIPT.LATIN_1.name(), Boolean.toString(true)); } } + + /** + * Enables the limiting OCR to be run on larger images and images which were + * extracted from documents. + * + * @param enabled Flag indicating if OCR is enabled. + */ + static void setLimitedOcrOption(boolean enabled) { + ModuleSettings.setConfigSetting(PROPERTIES_OPTIONS, LIMITED_OCR_ENABLED, (enabled ? "true" : "false")); //NON-NLS + } + + /** + * Gets the limited OCR flag to indicate if OCR should be limited to larger + * images and images which were extracted from documents. + * + * @return Flag indicating if limited OCR is enabled. True if OCR should be + * limited, false otherwise.. + */ + static boolean getLimitedOcrOption() { + if (ModuleSettings.settingExists(PROPERTIES_OPTIONS, LIMITED_OCR_ENABLED)) { + return ModuleSettings.getConfigSetting(PROPERTIES_OPTIONS, LIMITED_OCR_ENABLED).equals("true"); //NON-NLS + } else { + return LIMITED_OCR_ENABLED_DEFAULT; + } + } } diff --git a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/Server.java b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/Server.java index 36ad8c6a29..bc63f4dddf 100644 --- a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/Server.java +++ b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/Server.java @@ -1,7 +1,7 @@ /* * Autopsy Forensic Browser * - * Copyright 2011-2020 Basis Technology Corp. + * Copyright 2011-2021 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -53,6 +53,8 @@ import java.util.logging.Level; import javax.swing.AbstractAction; import org.apache.commons.io.FileUtils; import java.util.concurrent.TimeoutException; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicInteger; import java.util.stream.Collectors; import static java.util.stream.Collectors.toList; import org.apache.solr.client.solrj.SolrQuery; @@ -85,6 +87,7 @@ import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.casemodule.Case.CaseType; import org.sleuthkit.autopsy.casemodule.CaseMetadata; import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; +import org.sleuthkit.autopsy.core.RuntimeProperties; import org.sleuthkit.autopsy.core.UserPreferences; import org.sleuthkit.autopsy.coreutils.FileUtil; import org.sleuthkit.autopsy.coreutils.Logger; @@ -95,7 +98,6 @@ import org.sleuthkit.autopsy.coreutils.ThreadUtils; import org.sleuthkit.autopsy.healthmonitor.HealthMonitor; import org.sleuthkit.autopsy.healthmonitor.TimingMetric; import org.sleuthkit.autopsy.keywordsearchservice.KeywordSearchServiceException; -import org.sleuthkit.autopsy.report.GeneralReportSettings; import org.sleuthkit.autopsy.report.ReportProgressPanel; import org.sleuthkit.datamodel.Content; @@ -2030,6 +2032,13 @@ public class Server { private final List buffer; private final Object bufferLock; + /* (JIRA-7521) Sometimes we get into a situation where Solr server is no longer able to index new data. + * Typically main reason for this is Solr running out of memory. In this case we will stop trying to send new + * data to Solr (for this collection) after certain number of consecutive batches have failed. */ + private static final int MAX_NUM_CONSECUTIVE_FAILURES = 5; + private AtomicInteger numConsecutiveFailures = new AtomicInteger(0); + private AtomicBoolean skipIndexing = new AtomicBoolean(false); + private final ScheduledThreadPoolExecutor periodicTasksExecutor; private static final long PERIODIC_BATCH_SEND_INTERVAL_MINUTES = 10; private static final int NUM_BATCH_UPDATE_RETRIES = 10; @@ -2076,6 +2085,11 @@ public class Server { @Override public void run() { + + if (skipIndexing.get()) { + return; + } + List clone; synchronized (bufferLock) { @@ -2242,6 +2256,10 @@ public class Server { * @throws KeywordSearchModuleException */ void addDocument(SolrInputDocument doc) throws KeywordSearchModuleException { + + if (skipIndexing.get()) { + return; + } List clone; synchronized (bufferLock) { @@ -2268,6 +2286,10 @@ public class Server { * * @throws KeywordSearchModuleException */ + @NbBundle.Messages({ + "Collection.unableToIndexData.error=Unable to add data to text index. All future text indexing for the current case will be skipped.", + + }) private void sendBufferedDocs(List docBuffer) throws KeywordSearchModuleException { if (docBuffer.isEmpty()) { @@ -2293,6 +2315,7 @@ public class Server { } } if (success) { + numConsecutiveFailures.set(0); if (reTryAttempt > 0) { logger.log(Level.INFO, "Batch update suceeded after {0} re-try", reTryAttempt); //NON-NLS } @@ -2304,10 +2327,29 @@ public class Server { throw new KeywordSearchModuleException(NbBundle.getMessage(this.getClass(), "Server.addDocBatch.exception.msg")); //NON-NLS } catch (Exception ex) { // Solr throws a lot of unexpected exception types + numConsecutiveFailures.incrementAndGet(); logger.log(Level.SEVERE, "Could not add batched documents to index", ex); //NON-NLS + + // display message to user that that a document batch is missing from the index + MessageNotifyUtil.Notify.error( + NbBundle.getMessage(this.getClass(), "Server.addDocBatch.exception.msg"), + NbBundle.getMessage(this.getClass(), "Server.addDocBatch.exception.msg")); throw new KeywordSearchModuleException( NbBundle.getMessage(this.getClass(), "Server.addDocBatch.exception.msg"), ex); //NON-NLS } finally { + if (numConsecutiveFailures.get() >= MAX_NUM_CONSECUTIVE_FAILURES) { + // skip all future indexing + skipIndexing.set(true); + logger.log(Level.SEVERE, "Unable to add data to text index. All future text indexing for the current case will be skipped!"); //NON-NLS + + // display message to user that no more data will be added to the index + MessageNotifyUtil.Notify.error( + NbBundle.getMessage(this.getClass(), "Server.addDocBatch.exception.msg"), + Bundle.Collection_unableToIndexData_error()); + if (RuntimeProperties.runningWithGUI()) { + MessageNotifyUtil.Message.error(Bundle.Collection_unableToIndexData_error()); + } + } docBuffer.clear(); } } diff --git a/NEWS.txt b/NEWS.txt index d437c75034..d9e2f99b90 100644 --- a/NEWS.txt +++ b/NEWS.txt @@ -1,3 +1,38 @@ +---------------- VERSION 4.19.0 -------------- +Data Source Management: +- To make managing big cases easier, all data sources are now associated with a host that can be specified in the “Add Data Source” wizard. +- Hosts can be grouped by “person”, which is simply a name of the owner. +- The main tree viewer can be configured to group by person and host. + +OS Accounts: +- Operating System (OS) accounts and realms are their own data types and not generic artifacts. +- OS Accounts are created for Windows accounts found in the registry. Domain-scoped realms are not fully detected yet. +- NTFS files are associated with OS Accounts by SID. +- The Recent Activity module associates artifacts with OS Accounts based on SID or path of database. Other modules still need to be updated. +- OS accounts appear in a dedicated sub-tree of the main tree view and their properties can be viewed in the results view. +- A new content viewer in the lower right area of the main window was built to display OS account data for the item selected in the result view. + +Discovery UI: +- Domain categorization and account types are displayed in Domain Discovery results. +- The Domain Discovery results view more explicitly shows when a downloaded file no longer exists. +- Check boxes are now used to select search options instead of shift-based multi-select. + +Ingest Modules: +- File metadata updates are batched up before being saved to the case database for better performance. +- Parsing of iLEAPP and aLEAPP output was expanded to create communication relationships which can be displayed in the Communications UI. +- EML email parsing handles EML messages that are attachments (and have their own attachments). +- Domain categorization within Recent Activity can be customized by user-defined rules that can be imported and exported. + +Miscellaneous: +- A “Reset Windows” feature was created to help redock windows. +- A case-insensitive wordlist of all words in the keyword search index can be exported as a text document. +- Information from the Data Source Summary panels can be exported as an Excel spreadsheet. +- More artifacts are added to the timeline and artifacts with multiple time-based attributes are mapped to multiple timeline events. +- The Auto Ingest Dashboard is resizable. +- Added option to only perform optical character recognition on certain file types. +- Heap dumps can be saved to a custom location. +- Assorted bug fixes are included. + ---------------- VERSION 4.18.0 -------------- Keyword Search: - A major upgrade from Solr 4 to Solr 8.6.3. Single user cases continue to use the embedded server. @@ -33,6 +68,7 @@ Reporting: Misc: - Added support for Ext4 inline data and sparse blocks (via TSK fix). +- Fixed timeline controller deadlock issue - Updated PostgreSQL JDBC driver to support any recent version of PostgreSQL for multi-user cases and PostgreSQL Central Repository. - Added personas to the summary viewer in CVT. - Handling of bad characters in auto ingest manifest files. diff --git a/RecentActivity/nbproject/project.properties b/RecentActivity/nbproject/project.properties index 9736070e53..aab9fa2a60 100644 --- a/RecentActivity/nbproject/project.properties +++ b/RecentActivity/nbproject/project.properties @@ -1,3 +1,4 @@ +file.reference.Rejistry-1.1-SNAPSHOT.jar=release/modules/ext/Rejistry-1.1-SNAPSHOT.jar javac.source=1.8 javac.compilerargs=-Xlint -Xlint:-serial license.file=../LICENSE-2.0.txt diff --git a/RecentActivity/nbproject/project.xml b/RecentActivity/nbproject/project.xml index af8c2edace..8fc5e13b53 100644 --- a/RecentActivity/nbproject/project.xml +++ b/RecentActivity/nbproject/project.xml @@ -74,6 +74,10 @@ + + ext/Rejistry-1.1-SNAPSHOT.jar + release/modules/ext/Rejistry-1.1-SNAPSHOT.jar + diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Bundle.properties b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Bundle.properties index 3ddcf3d7f3..27440f0cdf 100644 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Bundle.properties +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Bundle.properties @@ -1,5 +1,5 @@ OpenIDE-Module-Display-Category=Ingest Module -OpenIDE-Module-Long-Description=Recent Activity ingest module.\n\n\The module extracts useful information about the recent user activity on the disk image being ingested, such as:\n\n- Recently open documents,\n- Web activity (sites visited, stored cookies, book marked sites, search engine queries, file downloads),\n- Recently attached devices,\n- Installed programs.\n\nThe module currently supports Windows only disk images.\nThe plugin is also fully functional when deployed on Windows version of Autopsy. +OpenIDE-Module-Long-Description=Recent Activity ingest module.\n\n The module extracts useful information about the recent user activity on the disk image being ingested, such as:\n\n- Recently open documents,\n- Web activity (sites visited, stored cookies, book marked sites, search engine queries, file downloads),\n- Recently attached devices,\n- Installed programs.\n\nThe module currently supports Windows only disk images.\nThe plugin is also fully functional when deployed on Windows version of Autopsy. OpenIDE-Module-Name=RecentActivity OpenIDE-Module-Short-Description=Recent Activity finder ingest module Chrome.moduleName=Chromium diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Bundle.properties-MERGED b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Bundle.properties-MERGED index 77d6eb550f..b796a16d26 100755 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Bundle.properties-MERGED +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Bundle.properties-MERGED @@ -16,6 +16,7 @@ DataSourceUsage_FlashDrive=Flash Drive # {0} - OS name DataSourceUsageAnalyzer.customVolume.label=OS Drive ({0}) DataSourceUsageAnalyzer.parentModuleName=Recent Activity +DefaultPriorityDomainCategorizer_searchEngineCategory=Search Engine DomainCategoryRunner_moduleName_text=DomainCategoryRunner DomainCategoryRunner_parentModuleName=Recent Activity DomainCategoryRunner_Progress_Message_Domain_Types=Finding Domain Types @@ -88,7 +89,7 @@ ExtractZone_progress_Msg=Extracting :Zone.Identifer files ExtractZone_Restricted=Restricted Sites Zone ExtractZone_Trusted=Trusted Sites Zone OpenIDE-Module-Display-Category=Ingest Module -OpenIDE-Module-Long-Description=Recent Activity ingest module.\n\nThe module extracts useful information about the recent user activity on the disk image being ingested, such as:\n\n- Recently open documents,\n- Web activity (sites visited, stored cookies, book marked sites, search engine queries, file downloads),\n- Recently attached devices,\n- Installed programs.\n\nThe module currently supports Windows only disk images.\nThe plugin is also fully functional when deployed on Windows version of Autopsy. +OpenIDE-Module-Long-Description=Recent Activity ingest module.\n\n The module extracts useful information about the recent user activity on the disk image being ingested, such as:\n\n- Recently open documents,\n- Web activity (sites visited, stored cookies, book marked sites, search engine queries, file downloads),\n- Recently attached devices,\n- Installed programs.\n\nThe module currently supports Windows only disk images.\nThe plugin is also fully functional when deployed on Windows version of Autopsy. OpenIDE-Module-Name=RecentActivity OpenIDE-Module-Short-Description=Recent Activity finder ingest module Chrome.moduleName=Chromium diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ChromeCacheExtractor.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ChromeCacheExtractor.java index 5b1585b350..292d0b9fa0 100644 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ChromeCacheExtractor.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ChromeCacheExtractor.java @@ -55,9 +55,11 @@ import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.Blackboard; import org.sleuthkit.datamodel.BlackboardArtifact; import org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE; +import static org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_CACHE; import org.sleuthkit.datamodel.BlackboardAttribute; import org.sleuthkit.datamodel.Content; import org.sleuthkit.datamodel.DerivedFile; +import org.sleuthkit.datamodel.OsAccount; import org.sleuthkit.datamodel.TimeUtilities; import org.sleuthkit.datamodel.TskCoreException; import org.sleuthkit.datamodel.TskData; @@ -173,8 +175,8 @@ final class ChromeCacheExtractor { fileManager = currentCase.getServices().getFileManager(); // Create an output folder to save any derived files - absOutputFolderName = RAImageIngestModule.getRAOutputPath(currentCase, moduleName); - relOutputFolderName = Paths.get( RAImageIngestModule.getRelModuleOutputPath(), moduleName).normalize().toString(); + absOutputFolderName = RAImageIngestModule.getRAOutputPath(currentCase, moduleName, context.getJobId()); + relOutputFolderName = Paths.get(RAImageIngestModule.getRelModuleOutputPath(currentCase, moduleName, context.getJobId())).normalize().toString(); File dir = new File(absOutputFolderName); if (dir.exists() == false) { @@ -204,7 +206,7 @@ final class ChromeCacheExtractor { outDir.mkdirs(); } - String cacheTempPath = RAImageIngestModule.getRATempPath(currentCase, moduleName) + cachePath; + String cacheTempPath = RAImageIngestModule.getRATempPath(currentCase, moduleName, context.getJobId()) + cachePath; File tempDir = new File(cacheTempPath); if (tempDir.exists() == false) { tempDir.mkdirs(); @@ -220,7 +222,7 @@ final class ChromeCacheExtractor { private void cleanup () { for (Entry entry : this.fileCopyCache.entrySet()) { - Path tempFilePath = Paths.get(RAImageIngestModule.getRATempPath(currentCase, moduleName), entry.getKey() ); + Path tempFilePath = Paths.get(RAImageIngestModule.getRATempPath(currentCase, moduleName, context.getJobId()), entry.getKey() ); try { entry.getValue().getFileCopy().getChannel().close(); entry.getValue().getFileCopy().close(); @@ -281,7 +283,9 @@ final class ChromeCacheExtractor { return; } - processCacheFolder(indexFile); + if (indexFile.getSize() > 0) { + processCacheFolder(indexFile); + } } } catch (TskCoreException ex) { @@ -521,33 +525,31 @@ final class ChromeCacheExtractor { private void addArtifacts(CacheEntry cacheEntry, AbstractFile cacheEntryFile, AbstractFile cachedItemFile, Collection artifactsAdded) throws TskCoreException { // Create a TSK_WEB_CACHE entry with the parent as data_X file that had the cache entry - BlackboardArtifact webCacheArtifact = cacheEntryFile.newArtifact(ARTIFACT_TYPE.TSK_WEB_CACHE); - if (webCacheArtifact != null) { - Collection webAttr = new ArrayList<>(); - String url = cacheEntry.getKey() != null ? cacheEntry.getKey() : ""; - webAttr.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_URL, - moduleName, url)); - webAttr.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DOMAIN, - moduleName, NetworkUtils.extractDomain(url))); - webAttr.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DATETIME_CREATED, - moduleName, cacheEntry.getCreationTime())); - webAttr.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_HEADERS, - moduleName, cacheEntry.getHTTPHeaders())); - webAttr.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PATH, - moduleName, cachedItemFile.getUniquePath())); - webAttr.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PATH_ID, - moduleName, cachedItemFile.getId())); - webCacheArtifact.addAttributes(webAttr); - artifactsAdded.add(webCacheArtifact); + Collection webAttr = new ArrayList<>(); + String url = cacheEntry.getKey() != null ? cacheEntry.getKey() : ""; + webAttr.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_URL, + moduleName, url)); + webAttr.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DOMAIN, + moduleName, NetworkUtils.extractDomain(url))); + webAttr.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DATETIME_CREATED, + moduleName, cacheEntry.getCreationTime())); + webAttr.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_HEADERS, + moduleName, cacheEntry.getHTTPHeaders())); + webAttr.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PATH, + moduleName, cachedItemFile.getUniquePath())); + webAttr.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PATH_ID, + moduleName, cachedItemFile.getId())); - // Create a TSK_ASSOCIATED_OBJECT on the f_XXX or derived file file back to the CACHE entry - BlackboardArtifact associatedObjectArtifact = cachedItemFile.newArtifact(ARTIFACT_TYPE.TSK_ASSOCIATED_OBJECT); - if (associatedObjectArtifact != null) { - associatedObjectArtifact.addAttribute( - new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_ASSOCIATED_ARTIFACT, - moduleName, webCacheArtifact.getArtifactID())); - artifactsAdded.add(associatedObjectArtifact); - } + BlackboardArtifact webCacheArtifact = cacheEntryFile.newDataArtifact(new BlackboardArtifact.Type(ARTIFACT_TYPE.TSK_WEB_CACHE), webAttr); + artifactsAdded.add(webCacheArtifact); + + // Create a TSK_ASSOCIATED_OBJECT on the f_XXX or derived file file back to the CACHE entry + BlackboardArtifact associatedObjectArtifact = cachedItemFile.newArtifact(ARTIFACT_TYPE.TSK_ASSOCIATED_OBJECT); + if (associatedObjectArtifact != null) { + associatedObjectArtifact.addAttribute( + new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_ASSOCIATED_ARTIFACT, + moduleName, webCacheArtifact.getArtifactID())); + artifactsAdded.add(associatedObjectArtifact); } } @@ -647,7 +649,7 @@ final class ChromeCacheExtractor { // write the file to disk so that we can have a memory-mapped ByteBuffer AbstractFile cacheFile = abstractFileOptional.get(); RandomAccessFile randomAccessFile = null; - String tempFilePathname = RAImageIngestModule.getRATempPath(currentCase, moduleName) + cacheFolderName + cacheFile.getName(); //NON-NLS + String tempFilePathname = RAImageIngestModule.getRATempPath(currentCase, moduleName, context.getJobId()) + cacheFolderName + cacheFile.getName(); //NON-NLS try { File newFile = new File(tempFilePathname); ContentUtils.writeToFile(cacheFile, newFile, context::dataSourceIngestIsCancelled); @@ -1034,6 +1036,9 @@ final class ChromeCacheExtractor { this.data = new byte [length]; ByteBuffer buf = cacheFileCopy.getByteBuffer(); int dataOffset = DATAFILE_HDR_SIZE + cacheAddress.getStartBlock() * cacheAddress.getBlockSize(); + if (dataOffset > buf.capacity()) { + return; + } buf.position(dataOffset); buf.get(data, 0, length); diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Chromium.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Chromium.java index 35417a9aa2..c8a3bb64cd 100644 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Chromium.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Chromium.java @@ -54,6 +54,7 @@ import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.Blackboard; import org.sleuthkit.datamodel.BlackboardArtifact; import org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE; +import static org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_BOOKMARK; import org.sleuthkit.datamodel.BlackboardAttribute; import org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE; import org.sleuthkit.datamodel.Content; @@ -90,6 +91,10 @@ class Chromium extends Extract { private static final String LOGIN_DATA_FILE_NAME = "Login Data"; private static final String WEB_DATA_FILE_NAME = "Web Data"; private static final String UC_BROWSER_NAME = "UC Browser"; + private static final String ENCRYPTED_FIELD_MESSAGE = "The data was encrypted."; + + private Boolean databaseEncrypted = false; + private Boolean fieldEncrypted = false; private final Logger logger = Logger.getLogger(this.getClass().getName()); private Content dataSource; @@ -121,7 +126,7 @@ class Chromium extends Extract { "Progress_Message_Chrome_Cache=Chrome Cache",}) Chromium() { - moduleName = NbBundle.getMessage(Chromium.class, "Chrome.moduleName"); + super(NbBundle.getMessage(Chromium.class, "Chrome.moduleName")); } @Override @@ -129,42 +134,43 @@ class Chromium extends Extract { this.dataSource = dataSource; this.context = context; dataFound = false; + long ingestJobId = context.getJobId(); for (Map.Entry browser : BROWSERS_MAP.entrySet()) { String browserName = browser.getKey(); String browserLocation = browser.getValue(); progressBar.progress(NbBundle.getMessage(this.getClass(), "Progress_Message_Chrome_History", browserName)); - this.getHistory(browser.getKey(), browser.getValue()); + this.getHistory(browser.getKey(), browser.getValue(), ingestJobId); if (context.dataSourceIngestIsCancelled()) { return; } progressBar.progress(NbBundle.getMessage(this.getClass(), "Progress_Message_Chrome_Bookmarks", browserName)); - this.getBookmark(browser.getKey(), browser.getValue()); + this.getBookmark(browser.getKey(), browser.getValue(), ingestJobId); if (context.dataSourceIngestIsCancelled()) { return; } progressBar.progress(NbBundle.getMessage(this.getClass(), "Progress_Message_Chrome_Cookies", browserName)); - this.getCookie(browser.getKey(), browser.getValue()); + this.getCookie(browser.getKey(), browser.getValue(), ingestJobId); if (context.dataSourceIngestIsCancelled()) { return; } progressBar.progress(NbBundle.getMessage(this.getClass(), "Progress_Message_Chrome_Logins", browserName)); - this.getLogins(browser.getKey(), browser.getValue()); + this.getLogins(browser.getKey(), browser.getValue(), ingestJobId); if (context.dataSourceIngestIsCancelled()) { return; } progressBar.progress(NbBundle.getMessage(this.getClass(), "Progress_Message_Chrome_AutoFill", browserName)); - this.getAutofill(browser.getKey(), browser.getValue()); + this.getAutofill(browser.getKey(), browser.getValue(), ingestJobId); if (context.dataSourceIngestIsCancelled()) { return; } progressBar.progress(NbBundle.getMessage(this.getClass(), "Progress_Message_Chrome_Downloads", browserName)); - this.getDownload(browser.getKey(), browser.getValue()); + this.getDownload(browser.getKey(), browser.getValue(), ingestJobId); if (context.dataSourceIngestIsCancelled()) { return; } @@ -178,8 +184,11 @@ class Chromium extends Extract { /** * Query for history databases and add artifacts + * @param browser + * @param browserLocation + * @param ingestJobId The ingest job id. */ - private void getHistory(String browser, String browserLocation) { + private void getHistory(String browser, String browserLocation, long ingestJobId) { FileManager fileManager = currentCase.getServices().getFileManager(); List historyFiles; String historyFileName = HISTORY_FILE_NAME; @@ -214,7 +223,7 @@ class Chromium extends Extract { Collection bbartifacts = new ArrayList<>(); int j = 0; while (j < allocatedHistoryFiles.size()) { - String temps = RAImageIngestModule.getRATempPath(currentCase, browser) + File.separator + allocatedHistoryFiles.get(j).getName() + j + ".db"; //NON-NLS + String temps = RAImageIngestModule.getRATempPath(currentCase, browser, ingestJobId) + File.separator + allocatedHistoryFiles.get(j).getName() + j + ".db"; //NON-NLS final AbstractFile historyFile = allocatedHistoryFiles.get(j++); if ((historyFile.getSize() == 0) || (historyFile.getName().toLowerCase().contains("-slack")) || (historyFile.getName().toLowerCase().contains("cache")) || (historyFile.getName().toLowerCase().contains("media")) @@ -243,9 +252,9 @@ class Chromium extends Extract { } List> tempList; tempList = this.dbConnect(temps, HISTORY_QUERY); - logger.log(Level.INFO, "{0}- Now getting history from {1} with {2}artifacts identified.", new Object[]{moduleName, temps, tempList.size()}); //NON-NLS + logger.log(Level.INFO, "{0}- Now getting history from {1} with {2} artifacts identified.", new Object[]{getName(), temps, tempList.size()}); //NON-NLS for (HashMap result : tempList) { - Collection bbattributes = new ArrayList(); + Collection bbattributes = new ArrayList<>(); bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_URL, RecentActivityExtracterModuleFactory.getModuleName(), ((result.get("url").toString() != null) ? result.get("url").toString() : ""))); //NON-NLS @@ -264,9 +273,10 @@ class Chromium extends Extract { RecentActivityExtracterModuleFactory.getModuleName(), (NetworkUtils.extractDomain((result.get("url").toString() != null) ? result.get("url").toString() : "")))); //NON-NLS - BlackboardArtifact bbart = createArtifactWithAttributes(ARTIFACT_TYPE.TSK_WEB_HISTORY, historyFile, bbattributes); - if (bbart != null) { - bbartifacts.add(bbart); + try { + bbartifacts.add(createArtifactWithAttributes(ARTIFACT_TYPE.TSK_WEB_HISTORY, historyFile, bbattributes)); + } catch (TskCoreException ex) { + logger.log(Level.SEVERE, String.format("Failed to create history artifact for file (%d)", historyFile.getId()), ex); } } dbFile.delete(); @@ -279,8 +289,11 @@ class Chromium extends Extract { /** * Search for bookmark files and make artifacts. + * @param browser + * @param browserLocation + * @param ingestJobId The ingest job id. */ - private void getBookmark(String browser, String browserLocation) { + private void getBookmark(String browser, String browserLocation, long ingestJobId) { FileManager fileManager = currentCase.getServices().getFileManager(); List bookmarkFiles; String bookmarkFileName = BOOKMARK_FILE_NAME; @@ -313,7 +326,7 @@ class Chromium extends Extract { || (bookmarkFile.getName().toLowerCase().contains("bak")) || (bookmarkFile.getParentPath().toLowerCase().contains("backup"))) { continue; } - String temps = RAImageIngestModule.getRATempPath(currentCase, browser) + File.separator + bookmarkFile.getName() + j + ".db"; //NON-NLS + String temps = RAImageIngestModule.getRATempPath(currentCase, browser, ingestJobId) + File.separator + bookmarkFile.getName() + j + ".db"; //NON-NLS try { ContentUtils.writeToFile(bookmarkFile, new File(temps), context::dataSourceIngestIsCancelled); } catch (ReadContentInputStreamException ex) { @@ -330,7 +343,7 @@ class Chromium extends Extract { continue; } - logger.log(Level.INFO, "{0}- Now getting Bookmarks from {1}", new Object[]{moduleName, temps}); //NON-NLS + logger.log(Level.INFO, "{0}- Now getting Bookmarks from {1}", new Object[]{getName(), temps}); //NON-NLS File dbFile = new File(temps); if (context.dataSourceIngestIsCancelled()) { dbFile.delete(); @@ -390,29 +403,25 @@ class Chromium extends Extract { date = Long.valueOf(0); } String domain = NetworkUtils.extractDomain(url); - try { - BlackboardArtifact bbart = bookmarkFile.newArtifact(ARTIFACT_TYPE.TSK_WEB_BOOKMARK); - Collection bbattributes = new ArrayList<>(); - //TODO Revisit usage of deprecated constructor as per TSK-583 - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_URL, - RecentActivityExtracterModuleFactory.getModuleName(), url)); - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_TITLE, - RecentActivityExtracterModuleFactory.getModuleName(), name)); - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME_CREATED, - RecentActivityExtracterModuleFactory.getModuleName(), (date / 1000000) - Long.valueOf("11644473600"))); - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PROG_NAME, - RecentActivityExtracterModuleFactory.getModuleName(), browser)); - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DOMAIN, - RecentActivityExtracterModuleFactory.getModuleName(), domain)); - bbart.addAttributes(bbattributes); + Collection bbattributes = new ArrayList<>(); + //TODO Revisit usage of deprecated constructor as per TSK-583 + bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_URL, + RecentActivityExtracterModuleFactory.getModuleName(), url)); + bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_TITLE, + RecentActivityExtracterModuleFactory.getModuleName(), name)); + bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME_CREATED, + RecentActivityExtracterModuleFactory.getModuleName(), (date / 1000000) - Long.valueOf("11644473600"))); + bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PROG_NAME, + RecentActivityExtracterModuleFactory.getModuleName(), browser)); + bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DOMAIN, + RecentActivityExtracterModuleFactory.getModuleName(), domain)); - bbartifacts.add(bbart); + try { + bbartifacts.add(createArtifactWithAttributes(TSK_WEB_BOOKMARK, bookmarkFile, bbattributes)); } catch (TskCoreException ex) { - logger.log(Level.SEVERE, "Error while trying to insert Chrome bookmark artifact{0}", ex); //NON-NLS - this.addErrorMessage( - NbBundle.getMessage(this.getClass(), "Chrome.getBookmark.errMsg.errAnalyzingFile4", - this.getName(), bookmarkFile.getName())); + logger.log(Level.SEVERE, String.format("Failed to create bookmark artifact for file (%d)", bookmarkFile.getId()), ex); } + } if(!context.dataSourceIngestIsCancelled()) { @@ -425,8 +434,11 @@ class Chromium extends Extract { /** * Queries for cookie files and adds artifacts + * @param browser + * @param browserLocation + * @param ingestJobId The ingest job id. */ - private void getCookie(String browser, String browserLocation) { + private void getCookie(String browser, String browserLocation, long ingestJobId) { FileManager fileManager = currentCase.getServices().getFileManager(); List cookiesFiles; @@ -458,7 +470,7 @@ class Chromium extends Extract { if ((cookiesFile.getSize() == 0) || (cookiesFile.getName().toLowerCase().contains("-slack"))) { continue; } - String temps = RAImageIngestModule.getRATempPath(currentCase, browser) + File.separator + cookiesFile.getName() + j + ".db"; //NON-NLS + String temps = RAImageIngestModule.getRATempPath(currentCase, browser, ingestJobId) + File.separator + cookiesFile.getName() + j + ".db"; //NON-NLS try { ContentUtils.writeToFile(cookiesFile, new File(temps), context::dataSourceIngestIsCancelled); } catch (ReadContentInputStreamException ex) { @@ -481,7 +493,7 @@ class Chromium extends Extract { } List> tempList = this.dbConnect(temps, COOKIE_QUERY); - logger.log(Level.INFO, "{0}- Now getting cookies from {1} with {2} artifacts identified.", new Object[]{moduleName, temps, tempList.size()}); //NON-NLS + logger.log(Level.INFO, "{0}- Now getting cookies from {1} with {2} artifacts identified.", new Object[]{getName(), temps, tempList.size()}); //NON-NLS for (HashMap result : tempList) { Collection bbattributes = new ArrayList<>(); bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_URL, @@ -504,9 +516,10 @@ class Chromium extends Extract { bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DOMAIN, RecentActivityExtracterModuleFactory.getModuleName(), domain)); - BlackboardArtifact bbart = createArtifactWithAttributes(ARTIFACT_TYPE.TSK_WEB_COOKIE, cookiesFile, bbattributes); - if (bbart != null) { - bbartifacts.add(bbart); + try { + bbartifacts.add(createArtifactWithAttributes(ARTIFACT_TYPE.TSK_WEB_COOKIE, cookiesFile, bbattributes)); + } catch (TskCoreException ex) { + logger.log(Level.SEVERE, String.format("Failed to create cookie artifact for file (%d)", cookiesFile.getId()), ex); } } @@ -520,8 +533,11 @@ class Chromium extends Extract { /** * Queries for download files and adds artifacts + * @param browser + * @param browserLocation + * @param ingestJobId The ingest job id. */ - private void getDownload(String browser, String browserLocation) { + private void getDownload(String browser, String browserLocation, long ingestJobId) { FileManager fileManager = currentCase.getServices().getFileManager(); List downloadFiles; String historyFileName = HISTORY_FILE_NAME; @@ -552,7 +568,7 @@ class Chromium extends Extract { continue; } - String temps = RAImageIngestModule.getRATempPath(currentCase, browser) + File.separator + downloadFile.getName() + j + ".db"; //NON-NLS + String temps = RAImageIngestModule.getRATempPath(currentCase, browser, ingestJobId) + File.separator + downloadFile.getName() + j + ".db"; //NON-NLS try { ContentUtils.writeToFile(downloadFile, new File(temps), context::dataSourceIngestIsCancelled); } catch (ReadContentInputStreamException ex) { @@ -582,7 +598,7 @@ class Chromium extends Extract { tempList = this.dbConnect(temps, DOWNLOAD_QUERY_V30); } - logger.log(Level.INFO, "{0}- Now getting downloads from {1} with {2} artifacts identified.", new Object[]{moduleName, temps, tempList.size()}); //NON-NLS + logger.log(Level.INFO, "{0}- Now getting downloads from {1} with {2} artifacts identified.", new Object[]{getName(), temps, tempList.size()}); //NON-NLS for (HashMap result : tempList) { Collection bbattributes = new ArrayList<>(); String fullPath = result.get("full_path").toString(); //NON-NLS @@ -610,25 +626,17 @@ class Chromium extends Extract { bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PROG_NAME, RecentActivityExtracterModuleFactory.getModuleName(), browser)); - BlackboardArtifact webDownloadArtifact = createArtifactWithAttributes(ARTIFACT_TYPE.TSK_WEB_DOWNLOAD, downloadFile, bbattributes); - if (webDownloadArtifact != null) { - bbartifacts.add(webDownloadArtifact); - // find the downloaded file and create a TSK_ASSOCIATED_OBJECT for it, associating it with the TSK_WEB_DOWNLOAD artifact. - try { - String normalizedFullPath = FilenameUtils.normalize(fullPath, true); - for (AbstractFile downloadedFile : fileManager.findFiles(dataSource, FilenameUtils.getName(normalizedFullPath), FilenameUtils.getPath(normalizedFullPath))) { - BlackboardArtifact associatedObjectArtifact = downloadedFile.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_ASSOCIATED_OBJECT); - associatedObjectArtifact.addAttribute( - new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_ASSOCIATED_ARTIFACT, - RecentActivityExtracterModuleFactory.getModuleName(), webDownloadArtifact.getArtifactID())); - - bbartifacts.add(associatedObjectArtifact); - break; - } - } catch (TskCoreException ex) { - logger.log(Level.SEVERE, String.format("Error creating associated object artifact for file '%s'", fullPath), ex); //NON-NLS + try { + BlackboardArtifact webDownloadArtifact = createArtifactWithAttributes(ARTIFACT_TYPE.TSK_WEB_DOWNLOAD, downloadFile, bbattributes); + bbartifacts.add(webDownloadArtifact); + String normalizedFullPath = FilenameUtils.normalize(fullPath, true); + for (AbstractFile downloadedFile : fileManager.findFiles(dataSource, FilenameUtils.getName(normalizedFullPath), FilenameUtils.getPath(normalizedFullPath))) { + bbartifacts.add(createAssociatedArtifact(downloadedFile, webDownloadArtifact)); + break; } + } catch (TskCoreException ex) { + logger.log(Level.SEVERE, String.format("Error creating associated object artifact for file '%s'", fullPath), ex); //NON-NLS } } @@ -642,8 +650,11 @@ class Chromium extends Extract { /** * Gets user logins from Login Data sqlite database + * @param browser + * @param browserLocation + * @param ingestJobId The ingest job id. */ - private void getLogins(String browser, String browserLocation) { + private void getLogins(String browser, String browserLocation, long ingestJobId) { FileManager fileManager = currentCase.getServices().getFileManager(); List loginDataFiles; @@ -674,7 +685,7 @@ class Chromium extends Extract { if ((loginDataFile.getSize() == 0) || (loginDataFile.getName().toLowerCase().contains("-slack"))) { continue; } - String temps = RAImageIngestModule.getRATempPath(currentCase, browser) + File.separator + loginDataFile.getName() + j + ".db"; //NON-NLS + String temps = RAImageIngestModule.getRATempPath(currentCase, browser, ingestJobId) + File.separator + loginDataFile.getName() + j + ".db"; //NON-NLS try { ContentUtils.writeToFile(loginDataFile, new File(temps), context::dataSourceIngestIsCancelled); } catch (ReadContentInputStreamException ex) { @@ -696,7 +707,7 @@ class Chromium extends Extract { break; } List> tempList = this.dbConnect(temps, LOGIN_QUERY); - logger.log(Level.INFO, "{0}- Now getting login information from {1} with {2}artifacts identified.", new Object[]{moduleName, temps, tempList.size()}); //NON-NLS + logger.log(Level.INFO, "{0}- Now getting login information from {1} with {2} artifacts identified.", new Object[]{getName(), temps, tempList.size()}); //NON-NLS for (HashMap result : tempList) { Collection bbattributes = new ArrayList<>(); @@ -727,9 +738,10 @@ class Chromium extends Extract { bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PROG_NAME, RecentActivityExtracterModuleFactory.getModuleName(), browser)); - BlackboardArtifact bbart = createArtifactWithAttributes(ARTIFACT_TYPE.TSK_SERVICE_ACCOUNT, loginDataFile, bbattributes); - if (bbart != null) { - bbartifacts.add(bbart); + try { + bbartifacts.add(createArtifactWithAttributes(ARTIFACT_TYPE.TSK_SERVICE_ACCOUNT, loginDataFile, bbattributes)); + } catch (TskCoreException ex) { + logger.log(Level.SEVERE, String.format("Failed to create service account artifact for file (%d)", loginDataFile.getId()), ex); } } @@ -744,8 +756,11 @@ class Chromium extends Extract { /** * Gets and parses Autofill data from 'Web Data' database, and creates * TSK_WEB_FORM_AUTOFILL, TSK_WEB_FORM_ADDRESS artifacts + * @param browser + * @param browserLocation + * @param ingestJobId The ingest job id. */ - private void getAutofill(String browser, String browserLocation) { + private void getAutofill(String browser, String browserLocation, long ingestJobId) { FileManager fileManager = currentCase.getServices().getFileManager(); List webDataFiles; @@ -772,11 +787,12 @@ class Chromium extends Extract { Collection bbartifacts = new ArrayList<>(); int j = 0; while (j < webDataFiles.size()) { + databaseEncrypted = false; AbstractFile webDataFile = webDataFiles.get(j++); if ((webDataFile.getSize() == 0) || (webDataFile.getName().toLowerCase().contains("-slack"))) { continue; } - String tempFilePath = RAImageIngestModule.getRATempPath(currentCase, browser) + File.separator + webDataFile.getName() + j + ".db"; //NON-NLS + String tempFilePath = RAImageIngestModule.getRATempPath(currentCase, browser, ingestJobId) + File.separator + webDataFile.getName() + j + ".db"; //NON-NLS try { ContentUtils.writeToFile(webDataFile, new File(tempFilePath), context::dataSourceIngestIsCancelled); } catch (ReadContentInputStreamException ex) { @@ -806,11 +822,18 @@ class Chromium extends Extract { try { // get form address atifacts getFormAddressArtifacts(webDataFile, tempFilePath, isSchemaV8X); + if (databaseEncrypted) { + Collection bbattributes = new ArrayList<>(); + bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_COMMENT, + RecentActivityExtracterModuleFactory.getModuleName(), + String.format("%s Autofill Database Encryption Detected", browser))); + bbartifacts.add(createArtifactWithAttributes(ARTIFACT_TYPE.TSK_ENCRYPTION_DETECTED, webDataFile, bbattributes)); + } } catch (NoCurrentCaseException | TskCoreException | Blackboard.BlackboardException ex) { logger.log(Level.SEVERE, String.format("Error adding artifacts to the case database " + "for chrome file %s [objId=%d]", webDataFile.getName(), webDataFile.getId()), ex); } - + dbFile.delete(); } @@ -838,7 +861,7 @@ class Chromium extends Extract { : AUTOFILL_QUERY; List> autofills = this.dbConnect(dbFilePath, autoFillquery); - logger.log(Level.INFO, "{0}- Now getting Autofill information from {1} with {2}artifacts identified.", new Object[]{moduleName, dbFilePath, autofills.size()}); //NON-NLS + logger.log(Level.INFO, "{0}- Now getting Autofill information from {1} with {2} artifacts identified.", new Object[]{getName(), dbFilePath, autofills.size()}); //NON-NLS for (HashMap result : autofills) { Collection bbattributes = new ArrayList<>(); @@ -847,9 +870,10 @@ class Chromium extends Extract { NbBundle.getMessage(this.getClass(), "Chrome.parentModuleName"), ((result.get("name").toString() != null) ? result.get("name").toString() : ""))); //NON-NLS + fieldEncrypted = false; bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_VALUE, RecentActivityExtracterModuleFactory.getModuleName(), - ((result.get("value").toString() != null) ? result.get("value").toString() : ""))); //NON-NLS + processFields(result.get("value")))); //NON-NLS bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_COUNT, RecentActivityExtracterModuleFactory.getModuleName(), @@ -868,11 +892,16 @@ class Chromium extends Extract { bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PROG_NAME, RecentActivityExtracterModuleFactory.getModuleName(), browser)); - + if (fieldEncrypted) { + bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_COMMENT, + RecentActivityExtracterModuleFactory.getModuleName(), ENCRYPTED_FIELD_MESSAGE)); + } + // Add an artifact - BlackboardArtifact bbart = createArtifactWithAttributes(ARTIFACT_TYPE.TSK_WEB_FORM_AUTOFILL, webDataFile, bbattributes); - if (bbart != null) { - bbartifacts.add(bbart); + try { + bbartifacts.add(createArtifactWithAttributes(ARTIFACT_TYPE.TSK_WEB_FORM_AUTOFILL, webDataFile, bbattributes)); + } catch (TskCoreException ex) { + logger.log(Level.SEVERE, String.format("Failed to create web form autopfill artifact for file (%d)", webDataFile.getId()), ex); } } @@ -906,23 +935,24 @@ class Chromium extends Extract { // Get Web form addresses List> addresses = this.dbConnect(dbFilePath, webformAddressQuery); - logger.log(Level.INFO, "{0}- Now getting Web form addresses from {1} with {2}artifacts identified.", new Object[]{moduleName, dbFilePath, addresses.size()}); //NON-NLS + logger.log(Level.INFO, "{0}- Now getting Web form addresses from {1} with {2} artifacts identified.", new Object[]{getName(), dbFilePath, addresses.size()}); //NON-NLS for (HashMap result : addresses) { - // get name fields - String first_name = result.get("first_name").toString() != null ? result.get("first_name").toString() : ""; - String middle_name = result.get("middle_name").toString() != null ? result.get("middle_name").toString() : ""; - String last_name = result.get("last_name").toString() != null ? result.get("last_name").toString() : ""; + fieldEncrypted = false; + + String first_name = processFields(result.get("first_name")); + String middle_name = processFields(result.get("middle_name")); + String last_name = processFields(result.get("last_name")); // get email and phone - String email_Addr = result.get("email").toString() != null ? result.get("email").toString() : ""; - String phone_number = result.get("number").toString() != null ? result.get("number").toString() : ""; + String email_Addr = processFields(result.get("email")); + String phone_number = processFields(result.get("number")); // Get the address fields - String city = result.get("city").toString() != null ? result.get("city").toString() : ""; - String state = result.get("state").toString() != null ? result.get("state").toString() : ""; - String zipcode = result.get("zipcode").toString() != null ? result.get("zipcode").toString() : ""; - String country_code = result.get("country_code").toString() != null ? result.get("country_code").toString() : ""; + String city = processFields(result.get("city")); + String state = processFields(result.get("state")); + String zipcode = processFields(result.get("zipcode")); + String country_code = processFields(result.get("country_code")); // schema version specific fields String full_name = ""; @@ -932,14 +962,15 @@ class Chromium extends Extract { long use_date = 0; if (isSchemaV8X) { - full_name = result.get("full_name").toString() != null ? result.get("full_name").toString() : ""; - street_address = result.get("street_address").toString() != null ? result.get("street_address").toString() : ""; + + full_name = processFields(result.get("full_name")); + street_address = processFields(result.get("street_address")); date_modified = result.get("date_modified").toString() != null ? Long.valueOf(result.get("date_modified").toString()) : 0; use_count = result.get("use_count").toString() != null ? Integer.valueOf(result.get("use_count").toString()) : 0; use_date = result.get("use_date").toString() != null ? Long.valueOf(result.get("use_date").toString()) : 0; } else { - String address_line_1 = result.get("address_line_1").toString() != null ? result.get("street_address").toString() : ""; - String address_line_2 = result.get("address_line_2").toString() != null ? result.get("address_line_2").toString() : ""; + String address_line_1 = processFields(result.get("address_line_1")); + String address_line_2 = processFields(result.get("address_line_2")); street_address = String.join(" ", address_line_1, address_line_2); } @@ -955,6 +986,11 @@ class Chromium extends Extract { otherAttributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME_MODIFIED, RecentActivityExtracterModuleFactory.getModuleName(), date_modified)); //NON-NLS + if (fieldEncrypted) { + otherAttributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_COMMENT, + RecentActivityExtracterModuleFactory.getModuleName(), ENCRYPTED_FIELD_MESSAGE)); //NON-NLS + + } } helper.addWebFormAddress( @@ -964,6 +1000,23 @@ class Chromium extends Extract { } } + /** + * Check the type of the object and if it is bytes then it is encrypted and return the string and + * set flag that field and file are encrypted + * @param dataValue Object to be checked, the object is from a database result set + * @return the actual string or an empty string + */ + private String processFields(Object dataValue) { + + if (dataValue instanceof byte[]) { + fieldEncrypted = true; + databaseEncrypted = true; + } + + return dataValue.toString() != null ? dataValue.toString() : ""; + + } + private boolean isChromePreVersion30(String temps) { String query = "PRAGMA table_info(downloads)"; //NON-NLS List> columns = this.dbConnect(temps, query); diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/DataSourceUsageAnalyzer.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/DataSourceUsageAnalyzer.java index c31e429601..bd6b5836d5 100644 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/DataSourceUsageAnalyzer.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/DataSourceUsageAnalyzer.java @@ -1,7 +1,7 @@ /* * Autopsy Forensic Browser * - * Copyright 2019 Basis Technology Corp. + * Copyright 2019-2021 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -148,10 +148,7 @@ class DataSourceUsageAnalyzer extends Extract { bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DESCRIPTION, Bundle.DataSourceUsageAnalyzer_parentModuleName(), dataSourceUsageDescription)); //NON-NLS - BlackboardArtifact bba = createArtifactWithAttributes(BlackboardArtifact.ARTIFACT_TYPE.TSK_DATA_SOURCE_USAGE, dataSource, bbattributes); - if (bba != null) { - postArtifact(bba); - } + postArtifact(createArtifactWithAttributes(BlackboardArtifact.ARTIFACT_TYPE.TSK_DATA_SOURCE_USAGE, dataSource, bbattributes)); } /** diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/DefaultDomainCategorizer.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/DefaultDomainCategorizer.java index 0d64661f6c..9519a37bb1 100644 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/DefaultDomainCategorizer.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/DefaultDomainCategorizer.java @@ -128,18 +128,33 @@ public class DefaultDomainCategorizer implements DomainCategorizer { private Map mapping = null; @Override - public void initialize() throws DomainCategorizerException { - if (this.mapping == null) { - try { - this.mapping = loadMapping(); - } catch (IOException ex) { - throw new DomainCategorizerException("Unable to load domain type csv for domain category analysis", ex); - } + public synchronized void initialize() throws DomainCategorizerException { + if (isInitialized()) { + return; + } + + try { + this.mapping = loadMapping(); + } catch (IOException ex) { + throw new DomainCategorizerException("Unable to load domain type csv for domain category analysis", ex); } } + /** + * Returns true if this categorizer is properly initialized. + * + * @return True if this categorizer is properly initialized. + */ + private synchronized boolean isInitialized() { + return this.mapping != null; + } + @Override - public DomainCategory getCategory(String domain, String host) throws DomainCategorizerException { + public synchronized DomainCategory getCategory(String domain, String host) throws DomainCategorizerException { + if (!isInitialized()) { + initialize(); + } + // use host; use domain as fallback if no host provided String hostToUse = StringUtils.isBlank(host) ? domain : host; @@ -162,7 +177,7 @@ public class DefaultDomainCategorizer implements DomainCategorizer { } @Override - public void close() throws Exception { + public synchronized void close() throws Exception { // clear out the mapping to release resources mapping = null; } diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/DefaultPriorityDomainCategorizer.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/DefaultPriorityDomainCategorizer.java new file mode 100644 index 0000000000..da84660cc2 --- /dev/null +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/DefaultPriorityDomainCategorizer.java @@ -0,0 +1,104 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2021 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.recentactivity; + +import java.io.IOException; +import java.util.Arrays; +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; +import java.util.stream.Stream; +import org.apache.commons.lang.StringUtils; +import org.openide.util.NbBundle.Messages; +import org.sleuthkit.autopsy.url.analytics.DomainCategorizer; +import org.sleuthkit.autopsy.url.analytics.DomainCategorizerException; +import org.sleuthkit.autopsy.url.analytics.DomainCategory; + +/** + * The autopsy provided domain category provider that overrides all domain + * category providers except the custom web domain categorizations. + */ +@Messages({ + "DefaultPriorityDomainCategorizer_searchEngineCategory=Search Engine" +}) +public class DefaultPriorityDomainCategorizer implements DomainCategorizer { + + // taken from https://www.google.com/supported_domains + private static final List GOOGLE_DOMAINS = Arrays.asList("google.com", "google.ad", "google.ae", "google.com.af", "google.com.ag", "google.com.ai", "google.al", "google.am", "google.co.ao", "google.com.ar", "google.as", "google.at", "google.com.au", "google.az", "google.ba", "google.com.bd", "google.be", "google.bf", "google.bg", "google.com.bh", "google.bi", "google.bj", "google.com.bn", "google.com.bo", "google.com.br", "google.bs", "google.bt", "google.co.bw", "google.by", "google.com.bz", "google.ca", "google.cd", "google.cf", "google.cg", "google.ch", "google.ci", "google.co.ck", "google.cl", "google.cm", "google.cn", "google.com.co", "google.co.cr", "google.com.cu", "google.cv", "google.com.cy", "google.cz", "google.de", "google.dj", "google.dk", "google.dm", "google.com.do", "google.dz", "google.com.ec", "google.ee", "google.com.eg", "google.es", "google.com.et", "google.fi", "google.com.fj", "google.fm", "google.fr", "google.ga", "google.ge", "google.gg", "google.com.gh", "google.com.gi", "google.gl", "google.gm", "google.gr", "google.com.gt", "google.gy", "google.com.hk", "google.hn", "google.hr", "google.ht", "google.hu", "google.co.id", "google.ie", "google.co.il", "google.im", "google.co.in", "google.iq", "google.is", "google.it", "google.je", "google.com.jm", "google.jo", "google.co.jp", "google.co.ke", "google.com.kh", "google.ki", "google.kg", "google.co.kr", "google.com.kw", "google.kz", "google.la", "google.com.lb", "google.li", "google.lk", "google.co.ls", "google.lt", "google.lu", "google.lv", "google.com.ly", "google.co.ma", "google.md", "google.me", "google.mg", "google.mk", "google.ml", "google.com.mm", "google.mn", "google.ms", "google.com.mt", "google.mu", "google.mv", "google.mw", "google.com.mx", "google.com.my", "google.co.mz", "google.com.na", "google.com.ng", "google.com.ni", "google.ne", "google.nl", "google.no", "google.com.np", "google.nr", "google.nu", "google.co.nz", "google.com.om", "google.com.pa", "google.com.pe", "google.com.pg", "google.com.ph", "google.com.pk", "google.pl", "google.pn", "google.com.pr", "google.ps", "google.pt", "google.com.py", "google.com.qa", "google.ro", "google.ru", "google.rw", "google.com.sa", "google.com.sb", "google.sc", "google.se", "google.com.sg", "google.sh", "google.si", "google.sk", "google.com.sl", "google.sn", "google.so", "google.sm", "google.sr", "google.st", "google.com.sv", "google.td", "google.tg", "google.co.th", "google.com.tj", "google.tl", "google.tm", "google.tn", "google.to", "google.com.tr", "google.tt", "google.com.tw", "google.co.tz", "google.com.ua", "google.co.ug", "google.co.uk", "google.com.uy", "google.co.uz", "google.com.vc", "google.co.ve", "google.vg", "google.co.vi", "google.com.vn", "google.vu", "google.ws", "google.rs", "google.co.za", "google.co.zm", "google.co.zw", "google.cat"); + + // taken from https://www.yahoo.com/everything/world + private static final List YAHOO_DOMAINS = Arrays.asList("espanol.yahoo.com", "au.yahoo.com", "be.yahoo.com", "fr-be.yahoo.com", "br.yahoo.com", "ca.yahoo.com", "espanol.yahoo.com", "espanol.yahoo.com", "de.yahoo.com", "es.yahoo.com", "espanol.yahoo.com", "fr.yahoo.com", "in.yahoo.com", "id.yahoo.com", "ie.yahoo.com", "it.yahoo.com", "en-maktoob.yahoo.com", "malaysia.yahoo.com", "espanol.yahoo.com", "nz.yahoo.com", "espanol.yahoo.com", "ph.yahoo.com", "qc.yahoo.com", "ro.yahoo.com", "sg.yahoo.com", "za.yahoo.com", "se.yahoo.com", "uk.yahoo.com", "yahoo.com", "espanol.yahoo.com", "vn.yahoo.com", "gr.yahoo.com", "maktoob.yahoo.com", "yahoo.com", "hk.yahoo.com", "tw.yahoo.com", "yahoo.co.jp"); + + private static final List OTHER_SEARCH_ENGINES = Arrays.asList( + "bing.com", + "baidu.com", + "sogou.com", + "soso.com", + "duckduckgo.com", + "swisscows.com", + "gibiru.com", + "cutestat.com", + "youdao.com", + "biglobe.ne.jp", + "givewater.com", + "ekoru.org", + "ecosia.org", + // according to https://en.wikipedia.org/wiki/Yandex + "yandex.ru", + "yandex.com" + ); + + private static final String WWW_PREFIX = "www"; + + private static final Map DOMAIN_LOOKUP + = Stream.of(GOOGLE_DOMAINS, YAHOO_DOMAINS, OTHER_SEARCH_ENGINES) + .flatMap((lst) -> lst.stream()) + .collect(Collectors.toMap((k) -> k, (k) -> Bundle.DefaultPriorityDomainCategorizer_searchEngineCategory(), (v1, v2) -> v1)); + + @Override + public void initialize() throws DomainCategorizerException { + } + + @Override + public DomainCategory getCategory(String domain, String host) throws DomainCategorizerException { + + String hostToUse = StringUtils.isBlank(host) ? domain : host; + + if (StringUtils.isBlank(hostToUse)) { + return null; + } + + List domainWords = Stream.of(hostToUse.toLowerCase().split("\\.")) + .filter(StringUtils::isNotBlank) + .map(String::trim) + .collect(Collectors.toList()); + + String sanitizedDomain = domainWords.stream() + // skip first word segment if 'www' + .skip(domainWords.size() > 0 && WWW_PREFIX.equals(domainWords.get(0)) ? 1 : 0) + .collect(Collectors.joining(".")); + + String category = DOMAIN_LOOKUP.get(sanitizedDomain); + return category == null ? null : new DomainCategory(sanitizedDomain, category); + } + + @Override + public void close() throws IOException { + } +} diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/DomainCategoryRunner.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/DomainCategoryRunner.java index cb06a525c9..d24a031a48 100644 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/DomainCategoryRunner.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/DomainCategoryRunner.java @@ -1,7 +1,7 @@ /* * Autopsy Forensic Browser * - * Copyright 2020 Basis Technology Corp. + * Copyright 2020-2021 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -20,6 +20,7 @@ package org.sleuthkit.autopsy.recentactivity; import java.net.MalformedURLException; import java.net.URL; +import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; @@ -32,6 +33,7 @@ import java.util.Set; import java.util.regex.Matcher; import java.util.regex.Pattern; import java.util.stream.Collectors; +import java.util.stream.Stream; import org.apache.commons.lang.StringUtils; import org.openide.util.Lookup; import org.openide.util.NbBundle.Messages; @@ -83,6 +85,20 @@ class DomainCategoryRunner extends Extract { private static final Logger logger = Logger.getLogger(DomainCategoryRunner.class.getName()); + // NOTE: if CustomWebCategorizer ever changes name, this will need to be changed as well. + private static final String CUSTOM_CATEGORIZER_PATH = "org.sleuthkit.autopsy.url.analytics.domaincategorization.CustomWebCategorizer"; + + // the artifact types to be searched for domain categories + private static final List DOMAIN_CATEGORIZATION_TYPES = Stream.of( + BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_BOOKMARK, + BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_CACHE, + BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_COOKIE, + BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_DOWNLOAD, + BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_HISTORY, + BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_SEARCH_QUERY) + .map(BlackboardArtifact.Type::new) + .collect(Collectors.toList()); + /** * Get seconds from epoch from the mapping for the attribute type id. * @@ -165,7 +181,7 @@ class DomainCategoryRunner extends Extract { * Main constructor. */ DomainCategoryRunner() { - moduleName = null; + } /** @@ -352,7 +368,7 @@ class DomainCategoryRunner extends Extract { Set hostSuffixesSeen = new HashSet<>(); try { List listArtifacts = currentCase.getSleuthkitCase().getBlackboard().getArtifacts( - Arrays.asList(new BlackboardArtifact.Type(ARTIFACT_TYPE.TSK_WEB_HISTORY)), + DOMAIN_CATEGORIZATION_TYPES, Arrays.asList(dataSource.getId())); logger.log(Level.INFO, "Processing {0} blackboard artifacts.", listArtifacts.size()); //NON-NLS @@ -361,7 +377,8 @@ class DomainCategoryRunner extends Extract { for (BlackboardArtifact artifact : listArtifacts) { // make sure we haven't cancelled if (context.dataSourceIngestIsCancelled()) { - break; //User cancelled the process. + //User cancelled the process. + break; } // get the pertinent details for this artifact. @@ -415,7 +432,7 @@ class DomainCategoryRunner extends Extract { * parent file). * @param domainCategory The category for this host/domain. */ - private void addCategoryArtifact(ArtifactHost artHost, String domainCategory) { + private void addCategoryArtifact(ArtifactHost artHost, String domainCategory) throws TskCoreException { String moduleName = Bundle.DomainCategoryRunner_parentModuleName(); Collection bbattributes = Arrays.asList( new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DOMAIN, moduleName, artHost.getDomain()), @@ -437,24 +454,45 @@ class DomainCategoryRunner extends Extract { @Override void configExtractor() throws IngestModule.IngestModuleException { // lookup all providers, filter null providers, and sort providers - Collection lookupList = Lookup.getDefault().lookupAll(DomainCategorizer.class); - if (lookupList == null) { - lookupList = Collections.emptyList(); - } - - List foundProviders = lookupList.stream() - .filter(provider -> provider != null) + Collection lookupCollection = Lookup.getDefault().lookupAll(DomainCategorizer.class); + Collection lookupList = (lookupCollection == null) ? + Collections.emptyList() : + lookupCollection; + + // this will be the class instance of the foundProviders + List foundProviders = new ArrayList<>(); + + // find the custom domain categories provider if present and add it first to the list + lookupList.stream() + .filter(categorizer -> categorizer.getClass().getName().contains(CUSTOM_CATEGORIZER_PATH)) + .findFirst() + .ifPresent((provider) -> foundProviders.add(provider)); + + // add the default priority categorizer + foundProviders.add(new DefaultPriorityDomainCategorizer()); + + // add all others except for the custom web domain categorizer, the default priority + // categorizer and the default categorizer + lookupList.stream() + .filter(categorizer -> categorizer != null) + .filter(categorizer -> { + String className = categorizer.getClass().getName(); + return !className.contains(CUSTOM_CATEGORIZER_PATH) && + !className.equals(DefaultPriorityDomainCategorizer.class.getName()) && + !className.equals(DefaultDomainCategorizer.class.getName()); + }) .sorted((a, b) -> a.getClass().getName().compareToIgnoreCase(b.getClass().getName())) - .collect(Collectors.toList()); - - // add the default categorizer last as a last resort + .forEach(foundProviders::add); + + // add the default categorizer last foundProviders.add(new DefaultDomainCategorizer()); - + for (DomainCategorizer provider : foundProviders) { try { provider.initialize(); } catch (DomainCategorizerException ex) { - throw new IngestModule.IngestModuleException("There was an error instantiating the provider: " + provider.getClass().getSimpleName(), ex); + throw new IngestModule.IngestModuleException("There was an error instantiating the provider: " + + provider.getClass().getSimpleName(), ex); } } diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Extract.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Extract.java index fa23d8564f..53acbc0865 100644 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Extract.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Extract.java @@ -34,6 +34,7 @@ import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.List; +import java.util.Optional; import java.util.logging.Level; import org.openide.util.NbBundle.Messages; import org.sleuthkit.autopsy.casemodule.Case; @@ -47,11 +48,12 @@ import org.sleuthkit.autopsy.ingest.IngestModule.IngestModuleException; import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.Blackboard; import org.sleuthkit.datamodel.BlackboardArtifact; +import static org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE.TSK_ASSOCIATED_OBJECT; import org.sleuthkit.datamodel.BlackboardAttribute; import org.sleuthkit.datamodel.Content; +import org.sleuthkit.datamodel.OsAccount; import org.sleuthkit.datamodel.SleuthkitCase; import org.sleuthkit.datamodel.TskCoreException; -import org.sleuthkit.datamodel.TskException; abstract class Extract { @@ -61,10 +63,16 @@ abstract class Extract { protected Blackboard blackboard; private final Logger logger = Logger.getLogger(this.getClass().getName()); private final ArrayList errorMessages = new ArrayList<>(); - String moduleName = ""; + private String moduleName = ""; boolean dataFound = false; + private RAOsAccountCache osAccountCache = null; - Extract() { + Extract() { + this(""); + } + + Extract(String moduleName) { + this.moduleName = moduleName; } final void init() throws IngestModuleException { @@ -86,6 +94,21 @@ abstract class Extract { void configExtractor() throws IngestModuleException { } + /** + * Extractor process method intended to mirror the Ingest process method. + * + * Subclasses should overload just the abstract version of the method. + * + * @param dataSource The data source object to ingest. + * @param context The the context for the current job. + * @param progressBar A handle to the progressBar for the module to update with status. + * @param osAccountCache The OsAccountCache. + */ + void process(Content dataSource, IngestJobContext context, DataSourceIngestModuleProgress progressBar, RAOsAccountCache osAccountCache) { + this.osAccountCache = osAccountCache; + process(dataSource, context, progressBar); + } + abstract void process(Content dataSource, IngestJobContext context, DataSourceIngestModuleProgress progressBar); void complete() { @@ -108,28 +131,58 @@ abstract class Extract { protected void addErrorMessage(String message) { errorMessages.add(message); } + + /** + * Generic method for creating artifacts. + * + * @param type The type of artifact. + * @param file The file the artifact originated from. + * @param attributes A list of the attributes to associate with the + * artifact. + * + * @return The newly created artifact. + */ + BlackboardArtifact createArtifactWithAttributes(BlackboardArtifact.ARTIFACT_TYPE type, Content content, Collection attributes) throws TskCoreException { + return createArtifactWithAttributes(new BlackboardArtifact.Type(type), content, attributes); + } + + /** + * Generic method for creating artifacts. + * + * @param type The type of artifact. + * @param content The file the artifact originated from. + * @param attributes A list of the attributes to associate with the + * artifact. + * + * @return The newly created artifact. + * + * @throws TskCoreException + */ + BlackboardArtifact createArtifactWithAttributes(BlackboardArtifact.Type type, Content content, Collection attributes) throws TskCoreException { + if (type.getCategory() == BlackboardArtifact.Category.DATA_ARTIFACT) { + return (content instanceof AbstractFile) + ? ((AbstractFile) content).newDataArtifact(type, attributes) + : content.newDataArtifact(type, attributes, null); + } else { + BlackboardArtifact bbart = content.newArtifact(type.getTypeID()); + bbart.addAttributes(attributes); + return bbart; + } + } /** - * Generic method for creating a blackboard artifact with attributes + * Returns and associated artifact for the given artifact. * - * @param type is a blackboard.artifact_type enum to determine which - * type the artifact should be - * @param content is the Content object that needs to have the - * artifact added for it - * @param bbattributes is the collection of blackboard attributes that need - * to be added to the artifact after the artifact has - * been created - * @return The newly-created artifact, or null on error + * @param content The content to create the artifact from. + * @param artifact The artifact to associate the new artifact with. + * + * @return The newly created artifact. + * + * @throws TskCoreException */ - protected BlackboardArtifact createArtifactWithAttributes(BlackboardArtifact.ARTIFACT_TYPE type, Content content, Collection bbattributes) { - try { - BlackboardArtifact bbart = content.newArtifact(type); - bbart.addAttributes(bbattributes); - return bbart; - } catch (TskException ex) { - logger.log(Level.WARNING, "Error while trying to add an artifact", ex); //NON-NLS - } - return null; + BlackboardArtifact createAssociatedArtifact(Content content, BlackboardArtifact artifact) throws TskCoreException { + return createArtifactWithAttributes(TSK_ASSOCIATED_OBJECT, content, Collections.singletonList(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_ASSOCIATED_ARTIFACT, + RecentActivityExtracterModuleFactory.getModuleName(), artifact.getArtifactID()))); } /** @@ -468,12 +521,13 @@ abstract class Extract { * * @param context * @param file + * @param IngestJobId The ingest job id. * @return Newly created copy of the AbstractFile * @throws IOException */ - protected File createTemporaryFile(IngestJobContext context, AbstractFile file) throws IOException{ + protected File createTemporaryFile(IngestJobContext context, AbstractFile file, long ingestJobId) throws IOException{ Path tempFilePath = Paths.get(RAImageIngestModule.getRATempPath( - getCurrentCase(), getName()), file.getName() + file.getId() + file.getNameExtension()); + getCurrentCase(), getName(), ingestJobId), file.getName() + file.getId() + file.getNameExtension()); java.io.File tempFile = tempFilePath.toFile(); try { diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractEdge.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractEdge.java index 6db3d75c02..68f280a5fa 100755 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractEdge.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractEdge.java @@ -24,7 +24,6 @@ import java.io.FileNotFoundException; import java.io.IOException; import java.nio.file.Path; import java.nio.file.Paths; -import java.util.Date; import java.text.ParseException; import java.text.SimpleDateFormat; import java.text.DateFormat; @@ -53,6 +52,9 @@ import org.sleuthkit.autopsy.ingest.DataSourceIngestModuleProgress; import org.sleuthkit.autopsy.ingest.IngestJobContext; import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.BlackboardArtifact; +import static org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_BOOKMARK; +import static org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_COOKIE; +import static org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_HISTORY; import org.sleuthkit.datamodel.Content; import org.sleuthkit.datamodel.TskCoreException; @@ -62,7 +64,6 @@ import org.sleuthkit.datamodel.TskCoreException; final class ExtractEdge extends Extract { private static final Logger LOG = Logger.getLogger(ExtractEdge.class.getName()); - private final Path moduleTempResultPath; private Content dataSource; private IngestJobContext context; private HashMap> containersTable; @@ -123,8 +124,8 @@ final class ExtractEdge extends Extract { /** * Extract the bookmarks, cookies, downloads and history from Microsoft Edge */ - ExtractEdge() throws NoCurrentCaseException { - moduleTempResultPath = Paths.get(RAImageIngestModule.getRATempPath(Case.getCurrentCaseThrows(), EDGE), EDGE_RESULT_FOLDER_NAME); + ExtractEdge() { + super(Bundle.ExtractEdge_Module_Name()); } @Override @@ -134,6 +135,9 @@ final class ExtractEdge extends Extract { @Override void process(Content dataSource, IngestJobContext context, DataSourceIngestModuleProgress progressBar) { + String moduleTempDir = RAImageIngestModule.getRATempPath(getCurrentCase(), EDGE, context.getJobId()); + String moduleTempResultDir = Paths.get(moduleTempDir, EDGE_RESULT_FOLDER_NAME).toString(); + this.dataSource = dataSource; this.context = context; this.setFoundData(false); @@ -183,7 +187,7 @@ final class ExtractEdge extends Extract { } try { - this.processWebCacheDbFile(esedumper, webCacheFiles, progressBar); + this.processWebCacheDbFile(esedumper, webCacheFiles, progressBar, moduleTempDir, moduleTempResultDir); } catch (IOException | TskCoreException ex) { LOG.log(Level.SEVERE, "Error processing 'WebCacheV01.dat' files for Microsoft Edge", ex); // NON-NLS this.addErrorMessage(Bundle.ExtractEdge_process_errMsg_webcacheFail()); @@ -191,7 +195,7 @@ final class ExtractEdge extends Extract { progressBar.progress(Bundle.Progress_Message_Edge_Bookmarks()); try { - this.processSpartanDbFile(esedumper, spartanFiles); + this.processSpartanDbFile(esedumper, spartanFiles, moduleTempDir, moduleTempResultDir); } catch (IOException | TskCoreException ex) { LOG.log(Level.SEVERE, "Error processing 'spartan.edb' files for Microsoft Edge", ex); // NON-NLS this.addErrorMessage(Bundle.ExtractEdge_process_errMsg_spartanFail()); @@ -204,10 +208,13 @@ final class ExtractEdge extends Extract { * * @param eseDumperPath Path to ESEDatabaseView.exe * @param webCacheFiles List of case WebCacheV01.dat files + * @param moduleTempDir The temp directory for this module. + * @param moduleTempResultDir The temp results directory for this module. * @throws IOException * @throws TskCoreException */ - void processWebCacheDbFile(String eseDumperPath, List webCacheFiles, DataSourceIngestModuleProgress progressBar) throws IOException, TskCoreException { + void processWebCacheDbFile(String eseDumperPath, List webCacheFiles, DataSourceIngestModuleProgress progressBar, + String moduleTempDir, String moduleTempResultDir) throws IOException, TskCoreException { for (AbstractFile webCacheFile : webCacheFiles) { @@ -220,7 +227,7 @@ final class ExtractEdge extends Extract { //Run the dumper String tempWebCacheFileName = EDGE_WEBCACHE_PREFIX + Integer.toString((int) webCacheFile.getId()) + EDGE_WEBCACHE_EXT; //NON-NLS - File tempWebCacheFile = new File(RAImageIngestModule.getRATempPath(currentCase, EDGE), tempWebCacheFileName); + File tempWebCacheFile = new File(moduleTempDir, tempWebCacheFileName); try { ContentUtils.writeToFile(webCacheFile, tempWebCacheFile, @@ -229,7 +236,7 @@ final class ExtractEdge extends Extract { throw new IOException("Error writingToFile: " + webCacheFile, ex); //NON-NLS } - File resultsDir = new File(moduleTempResultPath.toAbsolutePath() + Integer.toString((int) webCacheFile.getId())); + File resultsDir = new File(moduleTempDir, Integer.toString((int) webCacheFile.getId())); resultsDir.mkdirs(); try { executeDumper(eseDumperPath, tempWebCacheFile.getAbsolutePath(), @@ -264,10 +271,13 @@ final class ExtractEdge extends Extract { * * @param eseDumperPath Path to ESEDatabaseViewer * @param spartanFiles List of the case spartan.edb files + * @param moduleTempDir The temp directory for this module. + * @param moduleTempResultDir The temp results directory for this module. * @throws IOException * @throws TskCoreException */ - void processSpartanDbFile(String eseDumperPath, List spartanFiles) throws IOException, TskCoreException { + void processSpartanDbFile(String eseDumperPath, List spartanFiles, + String moduleTempDir, String moduleTempResultDir) throws IOException, TskCoreException { for (AbstractFile spartanFile : spartanFiles) { @@ -278,7 +288,7 @@ final class ExtractEdge extends Extract { //Run the dumper String tempSpartanFileName = EDGE_WEBCACHE_PREFIX + Integer.toString((int) spartanFile.getId()) + EDGE_WEBCACHE_EXT; - File tempSpartanFile = new File(RAImageIngestModule.getRATempPath(currentCase, EDGE), tempSpartanFileName); + File tempSpartanFile = new File(moduleTempDir, tempSpartanFileName); try { ContentUtils.writeToFile(spartanFile, tempSpartanFile, @@ -287,7 +297,7 @@ final class ExtractEdge extends Extract { throw new IOException("Error writingToFile: " + spartanFile, ex); //NON-NLS } - File resultsDir = new File(moduleTempResultPath.toAbsolutePath() + Integer.toString((int) spartanFile.getId())); + File resultsDir = new File(moduleTempResultDir, Integer.toString((int) spartanFile.getId())); resultsDir.mkdirs(); try { executeDumper(eseDumperPath, tempSpartanFile.getAbsolutePath(), @@ -628,14 +638,10 @@ final class ExtractEdge extends Extract { String accessTime = rowSplit[index].trim(); Long ftime = parseTimestamp(accessTime); - BlackboardArtifact bbart = origFile.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_HISTORY); - - bbart.addAttributes(createHistoryAttribute(url, ftime, + return createArtifactWithAttributes(TSK_WEB_HISTORY, origFile, createHistoryAttribute(url, ftime, null, null, this.getName(), NetworkUtils.extractDomain(url), user)); - - return bbart; } /** @@ -658,9 +664,7 @@ final class ExtractEdge extends Extract { String value = hexToChar(lineSplit[headers.indexOf(EDGE_HEAD_VALUE)].trim()); String url = flipDomain(domain); - BlackboardArtifact bbart = origFile.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_COOKIE); - bbart.addAttributes(createCookieAttributes(url, null, ftime, null, name, value, this.getName(), NetworkUtils.extractDomain(url))); - return bbart; + return createArtifactWithAttributes(TSK_WEB_COOKIE, origFile, createCookieAttributes(url, null, ftime, null, name, value, this.getName(), NetworkUtils.extractDomain(url))); } /** @@ -707,11 +711,9 @@ final class ExtractEdge extends Extract { if (url.isEmpty()) { return null; } - - BlackboardArtifact bbart = origFile.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_BOOKMARK); - bbart.addAttributes(createBookmarkAttributes(url, title, null, + + return createArtifactWithAttributes(TSK_WEB_BOOKMARK, origFile, createBookmarkAttributes(url, title, null, this.getName(), NetworkUtils.extractDomain(url))); - return bbart; } diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractIE.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractIE.java index 93f901d1a2..29754fcdcb 100644 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractIE.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractIE.java @@ -2,7 +2,7 @@ * * Autopsy Forensic Browser * - * Copyright 2012-2020 Basis Technology Corp. + * Copyright 2012-2021 Basis Technology Corp. * * Copyright 2012 42six Solutions. * Contact: aebadirad 42six com @@ -31,6 +31,7 @@ import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.IOException; import java.io.InputStreamReader; +import java.nio.file.Paths; import java.text.ParseException; import java.text.SimpleDateFormat; import java.util.ArrayList; @@ -43,7 +44,6 @@ import java.util.stream.Collectors; import org.openide.modules.InstalledFileLocator; import org.openide.util.NbBundle.Messages; import org.sleuthkit.autopsy.casemodule.Case; -import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; import org.sleuthkit.autopsy.casemodule.services.FileManager; import org.sleuthkit.autopsy.datamodel.ContentUtils; import org.sleuthkit.datamodel.BlackboardArtifact; @@ -56,6 +56,7 @@ import org.sleuthkit.autopsy.ingest.DataSourceIngestModuleProcessTerminator; import org.sleuthkit.autopsy.ingest.DataSourceIngestModuleProgress; import org.sleuthkit.autopsy.ingest.IngestJobContext; import org.sleuthkit.datamodel.AbstractFile; +import static org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_HISTORY; import org.sleuthkit.datamodel.ReadContentInputStream; import org.sleuthkit.datamodel.TskCoreException; @@ -66,7 +67,6 @@ import org.sleuthkit.datamodel.TskCoreException; class ExtractIE extends Extract { private static final Logger logger = Logger.getLogger(ExtractIE.class.getName()); - private final String moduleTempResultsDir; private String PASCO_LIB_PATH; private final String JAVA_PATH; private static final String RESOURCE_URL_PREFIX = "res://"; @@ -83,14 +83,16 @@ class ExtractIE extends Extract { "Progress_Message_IE_AutoFill=IE Auto Fill", "Progress_Message_IE_Logins=IE Logins",}) - ExtractIE() throws NoCurrentCaseException { - moduleName = NbBundle.getMessage(ExtractIE.class, "ExtractIE.moduleName.text"); - moduleTempResultsDir = RAImageIngestModule.getRATempPath(Case.getCurrentCaseThrows(), "IE") + File.separator + "results"; //NON-NLS + ExtractIE() { + super(NbBundle.getMessage(ExtractIE.class, "ExtractIE.moduleName.text")); JAVA_PATH = PlatformUtil.getJavaPath(); } @Override public void process(Content dataSource, IngestJobContext context, DataSourceIngestModuleProgress progressBar) { + String moduleTempDir = RAImageIngestModule.getRATempPath(getCurrentCase(), "IE", context.getJobId()); + String moduleTempResultsDir = Paths.get(moduleTempDir, "results").toString(); + this.dataSource = dataSource; this.context = context; dataFound = false; @@ -110,7 +112,7 @@ class ExtractIE extends Extract { } progressBar.progress(Bundle.Progress_Message_IE_History()); - this.getHistory(); + this.getHistory(moduleTempDir, moduleTempResultsDir); } /** @@ -168,9 +170,10 @@ class ExtractIE extends Extract { RecentActivityExtracterModuleFactory.getModuleName(), domain)); } - BlackboardArtifact bbart = createArtifactWithAttributes(ARTIFACT_TYPE.TSK_WEB_BOOKMARK, fav, bbattributes); - if (bbart != null) { - bbartifacts.add(bbart); + try { + bbartifacts.add(createArtifactWithAttributes(ARTIFACT_TYPE.TSK_WEB_BOOKMARK, fav, bbattributes)); + } catch (TskCoreException ex) { + logger.log(Level.SEVERE, String.format("Failed to create %s for file %d",ARTIFACT_TYPE.TSK_WEB_BOOKMARK.getDisplayName(), fav.getId() ), ex); } } @@ -280,9 +283,11 @@ class ExtractIE extends Extract { bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DOMAIN, RecentActivityExtracterModuleFactory.getModuleName(), domain)); } - BlackboardArtifact bbart = createArtifactWithAttributes(ARTIFACT_TYPE.TSK_WEB_COOKIE, cookiesFile, bbattributes); - if (bbart != null) { - bbartifacts.add(bbart); + + try { + bbartifacts.add(createArtifactWithAttributes(ARTIFACT_TYPE.TSK_WEB_COOKIE, cookiesFile, bbattributes)); + } catch (TskCoreException ex) { + logger.log(Level.SEVERE, String.format("Failed to create %s for file %d",ARTIFACT_TYPE.TSK_WEB_COOKIE.getDisplayName(), cookiesFile.getId() ), ex); } } @@ -293,8 +298,10 @@ class ExtractIE extends Extract { /** * Locates index.dat files, runs Pasco on them, and creates artifacts. + * @param moduleTempDir The path to the module temp directory. + * @param moduleTempResultsDir The path to the module temp results directory. */ - private void getHistory() { + private void getHistory(String moduleTempDir, String moduleTempResultsDir) { logger.log(Level.INFO, "Pasco results path: {0}", moduleTempResultsDir); //NON-NLS boolean foundHistory = false; @@ -346,7 +353,7 @@ class ExtractIE extends Extract { //BlackboardArtifact bbart = fsc.newArtifact(ARTIFACT_TYPE.TSK_WEB_HISTORY); indexFileName = "index" + Integer.toString((int) indexFile.getId()) + ".dat"; //NON-NLS //indexFileName = "index" + Long.toString(bbart.getArtifactID()) + ".dat"; - temps = RAImageIngestModule.getRATempPath(currentCase, "IE") + File.separator + indexFileName; //NON-NLS + temps = moduleTempDir + File.separator + indexFileName; //NON-NLS File datFile = new File(temps); if (context.dataSourceIngestIsCancelled()) { break; @@ -362,7 +369,7 @@ class ExtractIE extends Extract { } String filename = "pasco2Result." + indexFile.getId() + ".txt"; //NON-NLS - boolean bPascProcSuccess = executePasco(temps, filename); + boolean bPascProcSuccess = executePasco(temps, filename, moduleTempResultsDir); if (context.dataSourceIngestIsCancelled()) { return; } @@ -371,7 +378,7 @@ class ExtractIE extends Extract { //Now fetch the results, parse them and the delete the files. if (bPascProcSuccess) { // Don't add TSK_OS_ACCOUNT artifacts to the ModuleDataEvent - bbartifacts.addAll(parsePascoOutput(indexFile, filename).stream() + bbartifacts.addAll(parsePascoOutput(indexFile, filename, moduleTempResultsDir).stream() .filter(bbart -> bbart.getArtifactTypeID() == ARTIFACT_TYPE.TSK_WEB_HISTORY.getTypeID()) .collect(Collectors.toList())); if (context.dataSourceIngestIsCancelled()) { @@ -398,6 +405,7 @@ class ExtractIE extends Extract { * * @param indexFilePath Path to local index.dat file to analyze * @param outputFileName Name of file to save output to + * @param moduleTempResultsDir the path to the module temp directory. * * @return false on error */ @@ -405,7 +413,7 @@ class ExtractIE extends Extract { "# {0} - sub module name", "ExtractIE_executePasco_errMsg_errorRunningPasco={0}: Error analyzing Internet Explorer web history", }) - private boolean executePasco(String indexFilePath, String outputFileName) { + private boolean executePasco(String indexFilePath, String outputFileName, String moduleTempResultsDir) { boolean success = true; try { final String outputFileFullPath = moduleTempResultsDir + File.separator + outputFileName; @@ -447,10 +455,11 @@ class ExtractIE extends Extract { * @param origFile Original index.dat file that was analyzed to * get this output * @param pascoOutputFileName name of pasco output file + * @param moduleTempResultsDir the path to the module temp directory. * * @return A collection of created artifacts */ - private Collection parsePascoOutput(AbstractFile origFile, String pascoOutputFileName) { + private Collection parsePascoOutput(AbstractFile origFile, String pascoOutputFileName, String moduleTempResultsDir) { Collection bbartifacts = new ArrayList<>(); String fnAbs = moduleTempResultsDir + File.separator + pascoOutputFileName; @@ -558,33 +567,31 @@ class ExtractIE extends Extract { } } + Collection bbattributes = new ArrayList<>(); + bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_URL, + RecentActivityExtracterModuleFactory.getModuleName(), realurl)); + //bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_URL_DECODED.getTypeID(), "RecentActivity", EscapeUtil.decodeURL(realurl))); + + bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED, + RecentActivityExtracterModuleFactory.getModuleName(), ftime)); + bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_REFERRER, + RecentActivityExtracterModuleFactory.getModuleName(), "")); + // @@@ NOte that other browser modules are adding TITLE in here for the title + bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PROG_NAME, + RecentActivityExtracterModuleFactory.getModuleName(), + NbBundle.getMessage(this.getClass(), + "ExtractIE.moduleName.text"))); + if (domain != null && domain.isEmpty() == false) { + bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DOMAIN, + RecentActivityExtracterModuleFactory.getModuleName(), domain)); + } + bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_USER_NAME, + RecentActivityExtracterModuleFactory.getModuleName(), user)); + try { - BlackboardArtifact bbart = origFile.newArtifact(ARTIFACT_TYPE.TSK_WEB_HISTORY); - Collection bbattributes = new ArrayList<>(); - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_URL, - RecentActivityExtracterModuleFactory.getModuleName(), realurl)); - //bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_URL_DECODED.getTypeID(), "RecentActivity", EscapeUtil.decodeURL(realurl))); - - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED, - RecentActivityExtracterModuleFactory.getModuleName(), ftime)); - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_REFERRER, - RecentActivityExtracterModuleFactory.getModuleName(), "")); - // @@@ NOte that other browser modules are adding TITLE in hre for the title - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PROG_NAME, - RecentActivityExtracterModuleFactory.getModuleName(), - NbBundle.getMessage(this.getClass(), - "ExtractIE.moduleName.text"))); - if (domain != null && domain.isEmpty() == false) { - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DOMAIN, - RecentActivityExtracterModuleFactory.getModuleName(), domain)); - } - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_USER_NAME, - RecentActivityExtracterModuleFactory.getModuleName(), user)); - bbart.addAttributes(bbattributes); - - bbartifacts.add(bbart); + bbartifacts.add(createArtifactWithAttributes(TSK_WEB_HISTORY, origFile, bbattributes)); } catch (TskCoreException ex) { - logger.log(Level.SEVERE, "Error writing Internet Explorer web history artifact to the blackboard. Pasco results will be incomplete", ex); //NON-NLS + logger.log(Level.SEVERE, String.format("Failed to create %s for file %d",ARTIFACT_TYPE.TSK_WEB_HISTORY.getDisplayName(), origFile.getId() ), ex); } } fileScanner.close(); diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractOs.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractOs.java index bf6e9f5f4d..a76c1652dd 100644 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractOs.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractOs.java @@ -64,7 +64,7 @@ class ExtractOs extends Extract { private static final String LINUX_UBUNTU_PATH = "/etc/lsb-release"; private Content dataSource; - + @Override void process(Content dataSource, IngestJobContext context, DataSourceIngestModuleProgress progressBar) { this.dataSource = dataSource; diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractPrefetch.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractPrefetch.java index 0c140fed4f..3b0239fcc5 100644 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractPrefetch.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractPrefetch.java @@ -2,7 +2,7 @@ * * Autopsy Forensic Browser * - * Copyright 2020 Basis Technology Corp. + * Copyright 2020-2021 Basis Technology Corp. * * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -33,11 +33,16 @@ import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.logging.Level; +import java.util.regex.Matcher; +import java.util.regex.Pattern; +import org.apache.commons.io.FilenameUtils; import org.openide.modules.InstalledFileLocator; import org.openide.util.NbBundle.Messages; import org.sleuthkit.autopsy.casemodule.Case; +import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; import org.sleuthkit.autopsy.casemodule.services.FileManager; import org.sleuthkit.autopsy.coreutils.ExecUtil; +import static org.sleuthkit.autopsy.coreutils.FileUtil.escapeFileName; import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.coreutils.PlatformUtil; import org.sleuthkit.autopsy.coreutils.SQLiteDBConnect; @@ -46,11 +51,8 @@ import org.sleuthkit.autopsy.ingest.DataSourceIngestModuleProcessTerminator; import org.sleuthkit.autopsy.ingest.DataSourceIngestModuleProgress; import org.sleuthkit.autopsy.ingest.IngestJobContext; import org.sleuthkit.datamodel.AbstractFile; -import org.sleuthkit.datamodel.Blackboard; import org.sleuthkit.datamodel.BlackboardArtifact; -import static org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE.TSK_ASSOCIATED_OBJECT; import org.sleuthkit.datamodel.BlackboardAttribute; -import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_ASSOCIATED_ARTIFACT; import org.sleuthkit.datamodel.Content; import org.sleuthkit.datamodel.TskCoreException; @@ -65,8 +67,6 @@ final class ExtractPrefetch extends Extract { private IngestJobContext context; - private static final String MODULE_NAME = "extractPREFETCH"; //NON-NLS - private static final String PREFETCH_TSK_COMMENT = "Prefetch File"; private static final String PREFETCH_FILE_LOCATION = "/windows/prefetch"; private static final String PREFETCH_TOOL_FOLDER = "markmckinnon"; //NON-NLS @@ -85,13 +85,14 @@ final class ExtractPrefetch extends Extract { "ExtractPrefetch_errMsg_prefetchParsingFailed={0}: Error analyzing prefetch files" }) ExtractPrefetch() { - this.moduleName = Bundle.ExtractPrefetch_module_name(); + super(Bundle.ExtractPrefetch_module_name()); } @Override void process(Content dataSource, IngestJobContext context, DataSourceIngestModuleProgress progressBar) { this.context = context; + long ingestJobId = context.getJobId(); String modOutPath = Case.getCurrentCase().getModuleDirectory() + File.separator + PREFETCH_DIR_NAME; File dir = new File(modOutPath); @@ -103,7 +104,7 @@ final class ExtractPrefetch extends Extract { } } - extractPrefetchFiles(dataSource); + extractPrefetchFiles(dataSource, ingestJobId); final String prefetchDumper = getPathForPrefetchDumper(); if (prefetchDumper == null) { @@ -117,9 +118,12 @@ final class ExtractPrefetch extends Extract { String modOutFile = modOutPath + File.separator + dataSource.getName() + "-" + PREFETCH_PARSER_DB_FILE; try { - String tempDirPath = RAImageIngestModule.getRATempPath(Case.getCurrentCase(), dataSource.getName() + "-" + PREFETCH_DIR_NAME); + String tempDirPath = RAImageIngestModule.getRATempPath(Case.getCurrentCase(), dataSource.getName() + "-" + PREFETCH_DIR_NAME, ingestJobId); parsePrefetchFiles(prefetchDumper, tempDirPath, modOutFile, modOutPath); - createAppExecArtifacts(modOutFile, dataSource); + File prefetchDatabase = new File(modOutFile); + if (prefetchDatabase.exists()) { + createAppExecArtifacts(modOutFile, dataSource); + } } catch (IOException ex) { logger.log(Level.SEVERE, "Error parsing prefetch files", ex); //NON-NLS addErrorMessage(Bundle.ExtractPrefetch_errMsg_prefetchParsingFailed(Bundle.ExtractPrefetch_module_name())); @@ -132,7 +136,7 @@ final class ExtractPrefetch extends Extract { * * @param dataSource - datasource to search for prefetch files */ - void extractPrefetchFiles(Content dataSource) { + void extractPrefetchFiles(Content dataSource, long ingestJobId) { List pFiles; FileManager fileManager = Case.getCurrentCase().getServices().getFileManager(); @@ -150,8 +154,13 @@ final class ExtractPrefetch extends Extract { return; } - String prefetchFile = RAImageIngestModule.getRATempPath(Case.getCurrentCase(), dataSource.getName() + "-" + PREFETCH_DIR_NAME) + File.separator + pFile.getName(); - if (pFile.getParentPath().toLowerCase().contains(PREFETCH_FILE_LOCATION.toLowerCase())) { + if (pFile.getParentPath().toLowerCase().contains(PREFETCH_FILE_LOCATION.toLowerCase()) && pFile.getSize() > 0) { + String origFileName = pFile.getName(); + String ext = FilenameUtils.getExtension(origFileName); + String baseName = FilenameUtils.getBaseName(origFileName); + String fileName = escapeFileName(String.format("%s_%d.%s", baseName, pFile.getId(), ext)); + String baseRaTempPath = RAImageIngestModule.getRATempPath(Case.getCurrentCase(), dataSource.getName() + "-" + PREFETCH_DIR_NAME, ingestJobId); + String prefetchFile = Paths.get(baseRaTempPath, fileName).toString(); try { ContentUtils.writeToFile(pFile, new File(prefetchFile)); } catch (IOException ex) { @@ -262,10 +271,32 @@ final class ExtractPrefetch extends Extract { String timesProgramRun = resultSet.getString("Number_time_file_run"); String filePath = resultSet.getString("file_path"); - AbstractFile pfAbstractFile = getAbstractFile(prefetchFileName, PREFETCH_FILE_LOCATION, dataSource); - Set prefetchExecutionTimes = findNonZeroExecutionTimes(executionTimes); + String baseName = FilenameUtils.getBaseName(prefetchFileName); + Matcher match = Pattern.compile("_(?\\d*)\\s*$").matcher(baseName); + if (!match.find()) { + logger.log(Level.WARNING, "Invalid format for PF file: " + prefetchFileName);//NON-NLS + continue; + } + + + /** + * A prefetch file is created when a program is run and the superfetch service collected data about the first 10 + * seconds of the run, the trace data is then written to a new prefetch file or merged with an existing prefetch file. + * If the prefetch file gets deleted for some reason then a new one will be created. See 7500 in JIRA for more + * information. + */ + AbstractFile pfAbstractFile = null; + try { + Content c = Case.getCurrentCaseThrows().getSleuthkitCase().getContentById(Long.parseLong(match.group("objId"))); + if (c instanceof AbstractFile) { + pfAbstractFile = (AbstractFile) c; + } + } catch (NoCurrentCaseException | TskCoreException | NumberFormatException ex ) { + logger.log(Level.SEVERE, "Unable to find content for: " + prefetchFileName, ex); + } + if (pfAbstractFile != null) { for (Long executionTime : prefetchExecutionTimes) { @@ -285,8 +316,7 @@ final class ExtractPrefetch extends Extract { BlackboardAttribute.ATTRIBUTE_TYPE.TSK_COMMENT, getName(), PREFETCH_TSK_COMMENT)); try { - BlackboardArtifact blkBrdArt = pfAbstractFile.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_PROG_RUN); - blkBrdArt.addAttributes(blkBrdAttributes); + BlackboardArtifact blkBrdArt = createArtifactWithAttributes(BlackboardArtifact.ARTIFACT_TYPE.TSK_PROG_RUN, pfAbstractFile, blkBrdAttributes); blkBrdArtList.add(blkBrdArt); BlackboardArtifact associatedBbArtifact = createAssociatedArtifact(applicationName.toLowerCase(), filePath, blkBrdArt, dataSource); if (associatedBbArtifact != null) { @@ -302,33 +332,15 @@ final class ExtractPrefetch extends Extract { } } catch (SQLException ex) { - logger.log(Level.SEVERE, "Error while trying to read into a sqlite db.", ex);//NON-NLS + logger.log(Level.WARNING, String.format("Error while trying to read into a sqlite db %s.", prefetchDb));//NON-NLS + logger.log(Level.WARNING, ex.getMessage()); } if (!blkBrdArtList.isEmpty() && !context.dataSourceIngestIsCancelled()) { postArtifacts(blkBrdArtList); } } - - /** - * Cycle thru the execution times list and only return a new list of times - * that are greater than zero. - * - * @param executionTimes - list of prefetch execution times 8 possible - * timestamps - * - * @return List of timestamps that are greater than zero - */ - private Set findNonZeroExecutionTimes(List executionTimes) { - Set prefetchExecutionTimes = new HashSet<>(); - for (Long executionTime : executionTimes) { // only add prefetch file entries that have an actual date associated with them - if (executionTime > 0) { - prefetchExecutionTimes.add(executionTime); - } - } - return prefetchExecutionTimes; - } - + /** * Create associated artifacts using file path name and the artifact it * associates with @@ -340,20 +352,11 @@ final class ExtractPrefetch extends Extract { * * @returnv BlackboardArtifact or a null value */ - private BlackboardArtifact createAssociatedArtifact(String fileName, String filePathName, BlackboardArtifact bba, Content dataSource) { + private BlackboardArtifact createAssociatedArtifact(String fileName, String filePathName, BlackboardArtifact bba, Content dataSource) throws TskCoreException { AbstractFile sourceFile = getAbstractFile(fileName, filePathName, dataSource); if (sourceFile != null) { - Collection bbattributes2 = new ArrayList<>(); - bbattributes2.addAll(Arrays.asList( - new BlackboardAttribute(TSK_ASSOCIATED_ARTIFACT, this.getName(), - bba.getArtifactID()))); - - BlackboardArtifact associatedObjectBba = createArtifactWithAttributes(TSK_ASSOCIATED_OBJECT, sourceFile, bbattributes2); - if (associatedObjectBba != null) { - return associatedObjectBba; - } + return createAssociatedArtifact(sourceFile, bba); } - return null; } @@ -381,7 +384,6 @@ final class ExtractPrefetch extends Extract { } for (AbstractFile pFile : files) { - if (pFile.getParentPath().toLowerCase().endsWith(filePath.toLowerCase() + '/')) { return pFile; } @@ -389,6 +391,24 @@ final class ExtractPrefetch extends Extract { return null; - } + } + /** + * Cycle thru the execution times list and only return a new list of times + * that are greater than zero. + * + * @param executionTimes - list of prefetch execution times 8 possible + * timestamps + * + * @return List of timestamps that are greater than zero + */ + private Set findNonZeroExecutionTimes(List executionTimes) { + Set prefetchExecutionTimes = new HashSet<>(); + for (Long executionTime : executionTimes) { // only add prefetch file entries that have an actual date associated with them + if (executionTime > 0) { + prefetchExecutionTimes.add(executionTime); + } + } + return prefetchExecutionTimes; + } } diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractRecycleBin.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractRecycleBin.java index e5b443fc1b..5d727f5b7d 100755 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractRecycleBin.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractRecycleBin.java @@ -35,6 +35,7 @@ import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.Optional; import java.util.logging.Level; import org.joda.time.Instant; import org.openide.util.NbBundle.Messages; @@ -47,14 +48,14 @@ import org.sleuthkit.autopsy.ingest.IngestJobContext; import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.Blackboard.BlackboardException; import org.sleuthkit.datamodel.BlackboardArtifact; -import static org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE.TSK_OS_ACCOUNT; import org.sleuthkit.datamodel.BlackboardAttribute; import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DATETIME_DELETED; import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PATH; -import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_USER_ID; import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_USER_NAME; import org.sleuthkit.datamodel.Content; +import org.sleuthkit.datamodel.DataSource; import org.sleuthkit.datamodel.FsContent; +import org.sleuthkit.datamodel.OsAccount; import org.sleuthkit.datamodel.SleuthkitCase; import org.sleuthkit.datamodel.TskCoreException; import org.sleuthkit.datamodel.TskData; @@ -81,7 +82,7 @@ final class ExtractRecycleBin extends Extract { "ExtractRecycleBin_module_name=Recycle Bin" }) ExtractRecycleBin() { - this.moduleName = Bundle.ExtractRecycleBin_module_name(); + super(Bundle.ExtractRecycleBin_module_name()); } @Override @@ -135,7 +136,7 @@ final class ExtractRecycleBin extends Extract { return; // No need to continue } - String tempRARecycleBinPath = RAImageIngestModule.getRATempPath(Case.getCurrentCase(), "recyclebin"); //NON-NLS + String tempRARecycleBinPath = RAImageIngestModule.getRATempPath(Case.getCurrentCase(), "recyclebin", context.getJobId()); //NON-NLS // cycle through the $I files and process each. for (AbstractFile iFile : iFiles) { @@ -336,21 +337,11 @@ final class ExtractRecycleBin extends Extract { */ private Map makeUserNameMap(Content dataSource) throws TskCoreException { Map userNameMap = new HashMap<>(); - - List accounts = blackboard.getArtifacts(TSK_OS_ACCOUNT.getTypeID(), dataSource.getId()); - - for (BlackboardArtifact account : accounts) { - BlackboardAttribute nameAttribute = getAttributeForArtifact(account, TSK_USER_NAME); - BlackboardAttribute idAttribute = getAttributeForArtifact(account, TSK_USER_ID); - - String userName = nameAttribute != null ? nameAttribute.getDisplayString() : ""; - String userID = idAttribute != null ? idAttribute.getDisplayString() : ""; - - if (!userID.isEmpty()) { - userNameMap.put(userID, userName); - } + + for(OsAccount account: tskCase.getOsAccountManager().getOsAccounts(((DataSource)dataSource).getHost())) { + Optional userName = account.getLoginName(); + userNameMap.put(account.getName(), userName.isPresent() ? userName.get() : ""); } - return userNameMap; } @@ -446,11 +437,11 @@ final class ExtractRecycleBin extends Extract { * @throws TskCoreException */ private BlackboardArtifact createArtifact(AbstractFile rFile, BlackboardArtifact.Type type, String fileName, String userName, long dateTime) throws TskCoreException { - BlackboardArtifact bba = rFile.newArtifact(type.getTypeID()); - bba.addAttribute(new BlackboardAttribute(TSK_PATH, getName(), fileName)); - bba.addAttribute(new BlackboardAttribute(TSK_DATETIME_DELETED, getName(), dateTime)); - bba.addAttribute(new BlackboardAttribute(TSK_USER_NAME, getName(), userName == null || userName.isEmpty() ? "" : userName)); - return bba; + List attributes = new ArrayList<>(); + attributes.add(new BlackboardAttribute(TSK_PATH, getName(), fileName)); + attributes.add(new BlackboardAttribute(TSK_DATETIME_DELETED, getName(), dateTime)); + attributes.add(new BlackboardAttribute(TSK_USER_NAME, getName(), userName == null || userName.isEmpty() ? "" : userName)); + return createArtifactWithAttributes(type, rFile, attributes); } /** diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractRegistry.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractRegistry.java index 46504c1886..b0d7fe08c3 100644 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractRegistry.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractRegistry.java @@ -2,7 +2,7 @@ * * Autopsy Forensic Browser * - * Copyright 2012-2020 Basis Technology Corp. + * Copyright 2012-2021 Basis Technology Corp. * * Copyright 2012 42six Solutions. * Contact: aebadirad 42six com @@ -57,7 +57,6 @@ import org.xml.sax.SAXException; import java.nio.file.Path; import java.util.AbstractMap; import java.util.ArrayList; -import java.util.Arrays; import java.util.List; import java.util.Collection; import java.util.Date; @@ -71,8 +70,6 @@ import java.util.Optional; import static java.util.TimeZone.getTimeZone; import java.util.stream.Collectors; import org.openide.util.Lookup; -import org.sleuthkit.autopsy.casemodule.Case; -import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; import org.sleuthkit.autopsy.ingest.DataSourceIngestModuleProgress; import org.sleuthkit.autopsy.ingest.IngestModule.IngestModuleException; import org.sleuthkit.autopsy.keywordsearchservice.KeywordSearchService; @@ -81,10 +78,7 @@ import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.Account; import org.sleuthkit.datamodel.Blackboard.BlackboardException; import org.sleuthkit.datamodel.BlackboardArtifact; -import static org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE.TSK_ASSOCIATED_OBJECT; -import static org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE.TSK_OS_ACCOUNT; import org.sleuthkit.datamodel.BlackboardAttribute; -import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_ASSOCIATED_ARTIFACT; import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_COMMENT; import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DATETIME; import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED; @@ -93,16 +87,17 @@ import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DAT import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DEVICE_ID; import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_NAME; import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PATH; -import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_USER_ID; -import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_USER_NAME; import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_HOME_DIR; import org.sleuthkit.datamodel.Content; import org.sleuthkit.datamodel.DataSource; import org.sleuthkit.datamodel.Host; import org.sleuthkit.datamodel.HostManager; import org.sleuthkit.datamodel.OsAccount; -import org.sleuthkit.datamodel.OsAccountAttribute; +import org.sleuthkit.datamodel.OsAccount.OsAccountAttribute; +import org.sleuthkit.datamodel.OsAccountInstance; import org.sleuthkit.datamodel.OsAccountManager; +import org.sleuthkit.datamodel.OsAccountManager.NotUserSIDException; +import org.sleuthkit.datamodel.OsAccountManager.OsAccountUpdateResult; import org.sleuthkit.datamodel.OsAccountRealm; import org.sleuthkit.datamodel.ReadContentInputStream.ReadContentInputStreamException; import org.sleuthkit.datamodel.Report; @@ -180,22 +175,25 @@ class ExtractRegistry extends Extract { private IngestJobContext context; private Map userNameMap; + private String compName = ""; + private String domainName = ""; + private static final String SHELLBAG_ARTIFACT_NAME = "RA_SHELL_BAG"; //NON-NLS private static final String SHELLBAG_ATTRIBUTE_LAST_WRITE = "RA_SHELL_BAG_LAST_WRITE"; //NON-NLS private static final String SHELLBAG_ATTRIBUTE_KEY = "RA_SHELL_BAG_KEY"; //NON-NLS private static final SimpleDateFormat REG_RIPPER_TIME_FORMAT = new SimpleDateFormat("EEE MMM dd HH:mm:ss yyyy 'Z'", US); - BlackboardArtifact.Type shellBagArtifactType = null; - BlackboardAttribute.Type shellBagKeyAttributeType = null; - BlackboardAttribute.Type shellBagLastWriteAttributeType = null; + private BlackboardArtifact.Type shellBagArtifactType = null; + private BlackboardAttribute.Type shellBagKeyAttributeType = null; + private BlackboardAttribute.Type shellBagLastWriteAttributeType = null; static { REG_RIPPER_TIME_FORMAT.setTimeZone(getTimeZone("GMT")); } ExtractRegistry() throws IngestModuleException { - moduleName = NbBundle.getMessage(ExtractIE.class, "ExtractRegistry.moduleName.text"); + super(NbBundle.getMessage(ExtractIE.class, "ExtractRegistry.moduleName.text")); final File rrRoot = InstalledFileLocator.getDefault().locate("rr", ExtractRegistry.class.getPackage().getName(), false); //NON-NLS if (rrRoot == null) { @@ -296,14 +294,15 @@ class ExtractRegistry extends Extract { /** * Identifies registry files in the database by mtimeItem, runs regripper on * them, and parses the output. + * @param ingestJobId The ingest job id. */ - private void analyzeRegistryFiles() { + private void analyzeRegistryFiles(long ingestJobId) { List allRegistryFiles = findRegistryFiles(); // open the log file FileWriter logFile = null; try { - logFile = new FileWriter(RAImageIngestModule.getRAOutputPath(currentCase, "reg") + File.separator + "regripper-info.txt"); //NON-NLS + logFile = new FileWriter(RAImageIngestModule.getRAOutputPath(currentCase, "reg", ingestJobId) + File.separator + "regripper-info.txt"); //NON-NLS } catch (IOException ex) { logger.log(Level.SEVERE, null, ex); } @@ -315,8 +314,8 @@ class ExtractRegistry extends Extract { String regFileName = regFile.getName(); long regFileId = regFile.getId(); - String regFileNameLocal = RAImageIngestModule.getRATempPath(currentCase, "reg") + File.separator + regFileName; - String outputPathBase = RAImageIngestModule.getRAOutputPath(currentCase, "reg") + File.separator + regFileName + "-regripper-" + Long.toString(regFileId); //NON-NLS + String regFileNameLocal = RAImageIngestModule.getRATempPath(currentCase, "reg", ingestJobId) + File.separator + regFileName; + String outputPathBase = RAImageIngestModule.getRAOutputPath(currentCase, "reg", ingestJobId) + File.separator + regFileName + "-regripper-" + Long.toString(regFileId); //NON-NLS File regFileNameLocalFile = new File(regFileNameLocal); try { ContentUtils.writeToFile(regFile, regFileNameLocalFile, context::dataSourceIngestIsCancelled); @@ -348,7 +347,7 @@ class ExtractRegistry extends Extract { logger.log(Level.SEVERE, null, ex); } - logger.log(Level.INFO, "{0}- Now getting registry information from {1}", new Object[]{moduleName, regFileNameLocal}); //NON-NLS + logger.log(Level.INFO, "{0}- Now getting registry information from {1}", new Object[]{getName(), regFileNameLocal}); //NON-NLS RegOutputFiles regOutputFiles = ripRegistryFile(regFileNameLocal, outputPathBase); if (context.dataSourceIngestIsCancelled()) { break; @@ -368,7 +367,7 @@ class ExtractRegistry extends Extract { // create a report for the full output if (!regOutputFiles.fullPlugins.isEmpty()) { //parse the full regripper output from SAM hive files - if (regFileNameLocal.toLowerCase().contains("sam") && parseSamPluginOutput(regOutputFiles.fullPlugins, regFile) == false) { + if (regFileNameLocal.toLowerCase().contains("sam") && parseSamPluginOutput(regOutputFiles.fullPlugins, regFile, ingestJobId) == false) { this.addErrorMessage( NbBundle.getMessage(this.getClass(), "ExtractRegistry.analyzeRegFiles.failedParsingResults", this.getName(), regFileName)); @@ -659,16 +658,13 @@ class ExtractRegistry extends Extract { // Check if there is already an OS_INFO artifact for this file, and add to that if possible. ArrayList results = tskCase.getBlackboardArtifacts(ARTIFACT_TYPE.TSK_OS_INFO, regFile.getId()); if (results.isEmpty()) { - BlackboardArtifact bbart = regFile.newArtifact(ARTIFACT_TYPE.TSK_OS_INFO); - bbart.addAttributes(bbattributes); - - newArtifacts.add(bbart); + newArtifacts.add(createArtifactWithAttributes(ARTIFACT_TYPE.TSK_OS_INFO, regFile, bbattributes)); } else { results.get(0).addAttributes(bbattributes); } } catch (TskCoreException ex) { - logger.log(Level.SEVERE, "Error adding installed program artifact to blackboard."); //NON-NLS + logger.log(Level.SEVERE, String.format("Error adding installed program artifact to blackboard for file %d.", regFile.getId()), ex); //NON-NLS } break; case "Profiler": // NON-NLS @@ -709,20 +705,15 @@ class ExtractRegistry extends Extract { // Check if there is already an OS_INFO artifact for this file and add to that if possible ArrayList results = tskCase.getBlackboardArtifacts(ARTIFACT_TYPE.TSK_OS_INFO, regFile.getId()); if (results.isEmpty()) { - BlackboardArtifact bbart = regFile.newArtifact(ARTIFACT_TYPE.TSK_OS_INFO); - bbart.addAttributes(bbattributes); - - newArtifacts.add(bbart); + newArtifacts.add(createArtifactWithAttributes(ARTIFACT_TYPE.TSK_OS_INFO, regFile, bbattributes)); } else { results.get(0).addAttributes(bbattributes); } } catch (TskCoreException ex) { - logger.log(Level.SEVERE, "Error adding os info artifact to blackboard."); //NON-NLS + logger.log(Level.SEVERE, String.format("Error adding installed os_info to blackboard for file %d.", regFile.getId()), ex); //NON-NLS } break; case "CompName": // NON-NLS - String compName = ""; - String domain = ""; for (int j = 0; j < myartlist.getLength(); j++) { Node artchild = myartlist.item(j); // If it has attributes, then it is an Element (based off API) @@ -735,27 +726,34 @@ class ExtractRegistry extends Extract { if (name.equals("ComputerName")) { // NON-NLS compName = value; } else if (name.equals("Domain")) { // NON-NLS - domain = value; + domainName = value; } } } try { Collection bbattributes = new ArrayList<>(); bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_NAME, parentModuleName, compName)); - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DOMAIN, parentModuleName, domain)); + bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DOMAIN, parentModuleName, domainName)); // Check if there is already an OS_INFO artifact for this file and add to that if possible ArrayList results = tskCase.getBlackboardArtifacts(ARTIFACT_TYPE.TSK_OS_INFO, regFile.getId()); if (results.isEmpty()) { - BlackboardArtifact bbart = regFile.newArtifact(ARTIFACT_TYPE.TSK_OS_INFO); - bbart.addAttributes(bbattributes); - - newArtifacts.add(bbart); + newArtifacts.add(createArtifactWithAttributes(ARTIFACT_TYPE.TSK_OS_INFO, regFile, bbattributes)); } else { results.get(0).addAttributes(bbattributes); } + for (Map.Entry userMap : getUserNameMap().entrySet()) { + String sid = ""; + try{ + sid = (String)userMap.getKey(); + String userName = (String)userMap.getValue(); + createOrUpdateOsAccount(regFile, sid, userName, null); + } catch(TskCoreException | TskDataException | NotUserSIDException ex) { + logger.log(Level.WARNING, String.format("Failed to update Domain for existing OsAccount: %s, sid: %s", regFile.getId(), sid), ex); + } + } } catch (TskCoreException ex) { - logger.log(Level.SEVERE, "Error adding os info artifact to blackboard.", ex); //NON-NLS + logger.log(Level.SEVERE, String.format("Error adding os_info artifact to blackboard for file %d.", regFile.getId()), ex); //NON-NLS } break; default: @@ -780,9 +778,7 @@ class ExtractRegistry extends Extract { case "usb": //NON-NLS try { Long usbMtime = Long.parseLong(artnode.getAttribute("mtime")); //NON-NLS - usbMtime = Long.valueOf(usbMtime.toString()); - - BlackboardArtifact bbart = regFile.newArtifact(ARTIFACT_TYPE.TSK_DEVICE_ATTACHED); + usbMtime = Long.valueOf(usbMtime.toString()); bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME, parentModuleName, usbMtime)); String dev = artnode.getAttribute("dev"); //NON-NLS String make = ""; @@ -799,11 +795,9 @@ class ExtractRegistry extends Extract { bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DEVICE_MAKE, parentModuleName, make)); bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DEVICE_MODEL, parentModuleName, model)); bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DEVICE_ID, parentModuleName, value)); - bbart.addAttributes(bbattributes); - - newArtifacts.add(bbart); + newArtifacts.add(createArtifactWithAttributes(ARTIFACT_TYPE.TSK_DEVICE_ATTACHED, regFile, bbattributes)); } catch (TskCoreException ex) { - logger.log(Level.SEVERE, "Error adding device attached artifact to blackboard.", ex); //NON-NLS + logger.log(Level.SEVERE, String.format("Error adding device_attached artifact to blackboard for file %d.", regFile.getId()), ex); //NON-NLS } break; case "uninstall": //NON-NLS @@ -821,7 +815,7 @@ class ExtractRegistry extends Extract { try { bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PROG_NAME, parentModuleName, value)); bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME, parentModuleName, itemMtime)); - BlackboardArtifact bbart = regFile.newArtifact(ARTIFACT_TYPE.TSK_INSTALLED_PROG); + BlackboardArtifact bbart = regFile.newArtifact(ARTIFACT_TYPE.TSK_DELETED_PROG); bbart.addAttributes(bbattributes); newArtifacts.add(bbart); @@ -865,65 +859,14 @@ class ExtractRegistry extends Extract { break; case "ProfileList": //NON-NLS - try { - String homeDir = value; - String sid = artnode.getAttribute("sid"); //NON-NLS - String username = artnode.getAttribute("username"); //NON-NLS - - // For now both an OsAccount and the - // TSK_OS_ACCOUNT artifact will be created. - try{ - createOrUpdateOsAccount(regFile, sid, username, homeDir); - - } catch(TskCoreException | TskDataException ex) { - logger.log(Level.SEVERE, String.format("Failed to create OsAccount for file: %s, sid: %s", regFile.getId(), sid)); - } - - BlackboardArtifact bbart = null; - try { - //check if any of the existing artifacts match this username - ArrayList existingArtifacts = currentCase.getSleuthkitCase().getBlackboardArtifacts(ARTIFACT_TYPE.TSK_OS_ACCOUNT); - for (BlackboardArtifact artifact : existingArtifacts) { - if (artifact.getDataSource().getId() == regFile.getDataSourceObjectId()) { - BlackboardAttribute attribute = artifact.getAttribute(new BlackboardAttribute.Type(ATTRIBUTE_TYPE.TSK_USER_ID)); - if (attribute != null && attribute.getValueString().equals(sid)) { - bbart = artifact; - break; - } - } - } - } catch (TskCoreException ex) { - logger.log(Level.SEVERE, "Error getting existing os account artifact", ex); - } - if (bbart == null) { - //create new artifact - bbart = regFile.newArtifact(ARTIFACT_TYPE.TSK_OS_ACCOUNT); - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_USER_NAME, - parentModuleName, username)); - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_USER_ID, - parentModuleName, sid)); - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PATH, - parentModuleName, homeDir)); + String homeDir = value; + String sid = artnode.getAttribute("sid"); //NON-NLS + String username = artnode.getAttribute("username"); //NON-NLS - newArtifacts.add(bbart); - } else { - //add attributes to existing artifact - BlackboardAttribute bbattr = bbart.getAttribute(new BlackboardAttribute.Type(ATTRIBUTE_TYPE.TSK_USER_NAME)); - - if (bbattr == null) { - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_USER_NAME, - parentModuleName, username)); - } - bbattr = bbart.getAttribute(new BlackboardAttribute.Type(ATTRIBUTE_TYPE.TSK_PATH)); - if (bbattr == null) { - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PATH, - parentModuleName, homeDir)); - } - } - bbart.addAttributes(bbattributes); - - } catch (TskCoreException ex) { - logger.log(Level.SEVERE, "Error adding account artifact to blackboard.", ex); //NON-NLS + try{ + createOrUpdateOsAccount(regFile, sid, username, homeDir); + } catch(TskCoreException | TskDataException | NotUserSIDException ex) { + logger.log(Level.SEVERE, String.format("Failed to create OsAccount for file: %s, sid: %s", regFile.getId(), sid), ex); } break; @@ -1059,9 +1002,11 @@ class ExtractRegistry extends Extract { addBlueToothAttribute(line, attributes, TSK_DATETIME); line = reader.readLine(); addBlueToothAttribute(line, attributes, TSK_DATETIME_ACCESSED); - BlackboardArtifact bba = createArtifactWithAttributes(ARTIFACT_TYPE.TSK_BLUETOOTH_PAIRING, regFile, attributes); - if (bba != null) { - bbartifacts.add(bba); + + try { + bbartifacts.add(createArtifactWithAttributes(ARTIFACT_TYPE.TSK_BLUETOOTH_PAIRING, regFile, attributes)); + } catch (TskCoreException ex) { + logger.log(Level.SEVERE, String.format("Failed to create bluetooth_pairing artifact for file %d", regFile.getId()), ex); } // Read blank line between records then next read line is start of next block reader.readLine(); @@ -1113,10 +1058,12 @@ class ExtractRegistry extends Extract { * * @param regFilePath the path to the registry file being parsed * @param regAbstractFile the file to associate newly created artifacts with + * @param ingestJobId The ingest job id. * * @return true if successful, false if parsing failed at some point */ - private boolean parseSamPluginOutput(String regFilePath, AbstractFile regAbstractFile) { + private boolean parseSamPluginOutput(String regFilePath, AbstractFile regAbstractFile, long ingestJobId) { + File regfile = new File(regFilePath); List newArtifacts = new ArrayList<>(); try (BufferedReader bufferedReader = new BufferedReader(new FileReader(regfile))) { @@ -1147,11 +1094,11 @@ class ExtractRegistry extends Extract { // New OsAccount Code OsAccountManager accountMgr = tskCase.getOsAccountManager(); HostManager hostMrg = tskCase.getHostManager(); - Host host = hostMrg.getHost((DataSource)dataSource); + Host host = hostMrg.getHostByDataSource((DataSource)dataSource); - List existingAccounts = accountMgr.getAccounts(host); + List existingAccounts = accountMgr.getOsAccounts(host); for(OsAccount osAccount: existingAccounts) { - Optional optional = osAccount.getUniqueIdWithinRealm(); + Optional optional = osAccount.getAddr(); if(!optional.isPresent()) { continue; } @@ -1165,219 +1112,27 @@ class ExtractRegistry extends Extract { //add remaining userinfos as accounts; for (Map userInfo : userInfoMap.values()) { - OsAccount osAccount = accountMgr.createWindowsAccount(userInfo.get(SID_KEY), null, null, host, OsAccountRealm.RealmScope.UNKNOWN); + OsAccount osAccount = accountMgr.newWindowsOsAccount(userInfo.get(SID_KEY), null, domainName, host, domainName != null && !domainName.isEmpty() ? OsAccountRealm.RealmScope.DOMAIN : OsAccountRealm.RealmScope.UNKNOWN); + accountMgr.newOsAccountInstance(osAccount, (DataSource)dataSource, OsAccountInstance.OsAccountInstanceType.LAUNCHED); updateOsAccount(osAccount, userInfo, groupMap.get(userInfo.get(SID_KEY)), regAbstractFile); } - - // Existing TSK_OS_ACCOUNT code. - - //get all existing OS account artifacts - List existingOsAccounts = tskCase.getBlackboardArtifacts(ARTIFACT_TYPE.TSK_OS_ACCOUNT); - for (BlackboardArtifact osAccount : existingOsAccounts) { - //if the OS Account artifact was from the same data source check the user id - if (osAccount.getDataSource().getId() == regAbstractFile.getDataSourceObjectId()) { - BlackboardAttribute existingUserId = osAccount.getAttribute(new BlackboardAttribute.Type(ATTRIBUTE_TYPE.TSK_USER_ID)); - if (existingUserId != null) { - String userID = existingUserId.getValueString().trim(); - Map userInfo = userInfoMap.remove(userID); - //if the existing user id matches a user id which we parsed information for check if that information exists and if it doesn't add it - if (userInfo != null) { - osAccount.addAttributes(getAttributesForAccount(userInfo, groupMap.get(userID), true, regAbstractFile)); - } - } - } - } - - //add remaining userinfos as accounts; - for (Map userInfo : userInfoMap.values()) { - BlackboardArtifact bbart = regAbstractFile.newArtifact(ARTIFACT_TYPE.TSK_OS_ACCOUNT); - bbart.addAttributes(getAttributesForAccount(userInfo, groupMap.get(userInfo.get(SID_KEY)), false, regAbstractFile)); - // index the artifact for keyword search - newArtifacts.add(bbart); - } - // Get a mapping of user sids to user names and save globally so it can be used for other areas - // of the registry, ie: BAM key - try { - userNameMap = makeUserNameMap(dataSource); - } catch (TskCoreException ex) { - logger.log(Level.WARNING, "Unable to create OS Account user name map", ex); - // This is not the end of the world we will just continue without - // user names - userNameMap = new HashMap<>(); - } return true; } catch (FileNotFoundException ex) { logger.log(Level.WARNING, "Error finding the registry file.", ex); //NON-NLS } catch (IOException ex) { logger.log(Level.WARNING, "Error building the document parser: {0}", ex); //NON-NLS - } catch (ParseException ex) { - logger.log(Level.WARNING, "Error parsing the the date from the registry file", ex); //NON-NLS } catch (TskDataException | TskCoreException ex) { logger.log(Level.WARNING, "Error updating TSK_OS_ACCOUNT artifacts to include newly parsed data.", ex); //NON-NLS - } finally { + } catch (OsAccountManager.NotUserSIDException ex) { + logger.log(Level.WARNING, "Error creating OS Account, input SID is not a user SID.", ex); //NON-NLS + } + finally { if (!context.dataSourceIngestIsCancelled()) { postArtifacts(newArtifacts); } } return false; } - - /** - * Creates the attribute list for the given user information and group list. - * - * @param userInfo Map of key\value pairs of user information - * @param groupList List of the groups that user belongs - * @param existingUser - * - * @return List - * - * @throws ParseException - */ - Collection getAttributesForAccount(Map userInfo, List groupList, boolean existingUser, AbstractFile regAbstractFile) throws ParseException { - Collection bbattributes = new ArrayList<>(); - - SimpleDateFormat regRipperTimeFormat = new SimpleDateFormat("EEE MMM dd HH:mm:ss yyyy 'Z'", US); - regRipperTimeFormat.setTimeZone(getTimeZone("GMT")); - - if (!existingUser) { - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_USER_ID, - getRAModuleName(), userInfo.get(SID_KEY))); - - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_USER_NAME, - this.moduleName, userInfo.get(USERNAME_KEY))); - } - - String value = userInfo.get(ACCOUNT_CREATED_KEY); - if (value != null && !value.isEmpty() && !value.equals(NEVER_DATE)) { - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME_CREATED, - getRAModuleName(), regRipperTimeFormat.parse(value).getTime() / MS_IN_SEC)); - } - - value = userInfo.get(LAST_LOGIN_KEY); - if (value != null && !value.isEmpty() && !value.equals(NEVER_DATE)) { - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED, - getRAModuleName(), regRipperTimeFormat.parse(value).getTime() / MS_IN_SEC)); - } - - value = userInfo.get(LOGIN_COUNT_KEY); - if (value != null && !value.isEmpty()) { - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_COUNT, - getRAModuleName(), Integer.parseInt(value))); - } - - value = userInfo.get(ACCOUNT_TYPE_KEY); - if (value != null && !value.isEmpty()) { - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_ACCOUNT_TYPE, - getRAModuleName(), value)); - } - - value = userInfo.get(USER_COMMENT_KEY); - if (value != null && !value.isEmpty()) { - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DESCRIPTION, - getRAModuleName(), value)); - } - - value = userInfo.get(NAME_KEY); - if (value != null && !value.isEmpty()) { - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_NAME, - getRAModuleName(), value)); - } - - value = userInfo.get(INTERNET_NAME_KEY); - if (value != null && !value.isEmpty()) { - try { - // Create an account for this email, if it doesn't already exist. - Case.getCurrentCaseThrows() - .getSleuthkitCase() - .getCommunicationsManager() - .createAccountFileInstance(Account.Type.EMAIL, - value, getRAModuleName(), regAbstractFile); - } catch (NoCurrentCaseException | TskCoreException ex) { - logger.log(Level.SEVERE, - String.format("Error adding email account with value " - + "%s, to the case database for file %s [objId=%d]", - value, regAbstractFile.getName(), regAbstractFile.getId()), ex); - } - - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_EMAIL, - getRAModuleName(), value)); - } - - value = userInfo.get(FULL_NAME_KEY); - if (value != null && !value.isEmpty()) { - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DISPLAY_NAME, - getRAModuleName(), value)); - } - - value = userInfo.get(PWD_RESET_KEY); - if (value != null && !value.isEmpty() && !value.equals(NEVER_DATE)) { - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME_PASSWORD_RESET, - getRAModuleName(), regRipperTimeFormat.parse(value).getTime() / MS_IN_SEC)); - } - - value = userInfo.get(PASSWORD_HINT); - if (value != null && !value.isEmpty()) { - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PASSWORD_HINT, - getRAModuleName(), value)); - } - - value = userInfo.get(PWD_FAILE_KEY); - if (value != null && !value.isEmpty() && !value.equals(NEVER_DATE)) { - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME_PASSWORD_FAIL, - getRAModuleName(), regRipperTimeFormat.parse(value).getTime() / MS_IN_SEC)); - } - - String settingString = ""; - for (String setting : PASSWORD_SETTINGS_FLAGS) { - if (userInfo.containsKey(setting)) { - settingString += setting + ", "; - } - } - - if (!settingString.isEmpty()) { - settingString = settingString.substring(0, settingString.length() - 2); - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PASSWORD_SETTINGS, - getRAModuleName(), settingString)); - } - - settingString = ""; - for (String setting : ACCOUNT_SETTINGS_FLAGS) { - if (userInfo.containsKey(setting)) { - settingString += setting + ", "; - } - } - - if (!settingString.isEmpty()) { - settingString = settingString.substring(0, settingString.length() - 2); - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_ACCOUNT_SETTINGS, - getRAModuleName(), settingString)); - } - - settingString = ""; - for (String setting : ACCOUNT_TYPE_FLAGS) { - if (userInfo.containsKey(setting)) { - settingString += setting + ", "; - } - } - - if (!settingString.isEmpty()) { - settingString = settingString.substring(0, settingString.length() - 2); - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_FLAG, - getRAModuleName(), settingString)); - } - - if (groupList != null && groupList.isEmpty()) { - String groups = ""; - for (String group : groupList) { - groups += group + ", "; - } - - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_GROUPS, - getRAModuleName(), groups.substring(0, groups.length() - 2))); - } - - return bbattributes; - } /** * Read the User Information section of the SAM regripper plugin's output @@ -1495,7 +1250,7 @@ class ExtractRegistry extends Extract { // We can add the S- back to the string that we split on since S- is a valid beginning of a User SID String fileNameSid[] = tokens[4].split("\\s+\\(S-"); String userSid = "S-" + fileNameSid[1].substring(0, fileNameSid[1].length() - 1); - String userName = userNameMap.get(userSid); + String userName = getUserNameMap().get(userSid); if (userName == null) { userName = userSid; } @@ -1511,13 +1266,16 @@ class ExtractRegistry extends Extract { attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_USER_NAME, getName(), userName)); attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME, getName(), progRunDateTime)); attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_COMMENT, getName(), comment)); - BlackboardArtifact bba = createArtifactWithAttributes(ARTIFACT_TYPE.TSK_PROG_RUN, regFile, attributes); - if (bba != null) { + + try { + BlackboardArtifact bba = createArtifactWithAttributes(ARTIFACT_TYPE.TSK_PROG_RUN, regFile, attributes); bbartifacts.add(bba); bba = createAssociatedArtifact(FilenameUtils.normalize(fileName, true), bba); if (bba != null) { bbartifacts.add(bba); } + } catch (TskCoreException ex) { + logger.log(Level.SEVERE, String.format("Failed to create TSK_PROG_RUN artifact for file %d", regFile.getId()), ex); } line = reader.readLine(); } @@ -1576,14 +1334,18 @@ class ExtractRegistry extends Extract { attributes.add(new BlackboardAttribute(TSK_PATH, getName(), fileName)); attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED, getName(), adobeUsedTime)); attributes.add(new BlackboardAttribute(TSK_COMMENT, getName(), comment)); - BlackboardArtifact bba = createArtifactWithAttributes(ARTIFACT_TYPE.TSK_RECENT_OBJECT, regFile, attributes); - if (bba != null) { - bbartifacts.add(bba); - fileName = fileName.replace("\0", ""); - bba = createAssociatedArtifact(FilenameUtils.normalize(fileName, true), bba); + try{ + BlackboardArtifact bba = createArtifactWithAttributes(ARTIFACT_TYPE.TSK_RECENT_OBJECT, regFile, attributes); if (bba != null) { bbartifacts.add(bba); + fileName = fileName.replace("\0", ""); + bba = createAssociatedArtifact(FilenameUtils.normalize(fileName, true), bba); + if (bba != null) { + bbartifacts.add(bba); + } } + } catch(TskCoreException ex) { + logger.log(Level.SEVERE, String.format("Failed to create TSK_RECENT_OBJECT artifact for file %d", regFile.getId()), ex); } line = reader.readLine(); } @@ -1624,17 +1386,21 @@ class ExtractRegistry extends Extract { Collection attributes = new ArrayList<>(); attributes.add(new BlackboardAttribute(TSK_PATH, getName(), fileName)); attributes.add(new BlackboardAttribute(TSK_COMMENT, getName(), comment)); - BlackboardArtifact bba = createArtifactWithAttributes(ARTIFACT_TYPE.TSK_RECENT_OBJECT, regFile, attributes); - if (bba != null) { - bbartifacts.add(bba); - bba = createAssociatedArtifact(fileName, bba); + try{ + BlackboardArtifact bba = createArtifactWithAttributes(ARTIFACT_TYPE.TSK_RECENT_OBJECT, regFile, attributes); if (bba != null) { bbartifacts.add(bba); - bba = createAssociatedArtifact(FilenameUtils.normalize(fileName, true), bba); + bba = createAssociatedArtifact(fileName, bba); if (bba != null) { bbartifacts.add(bba); + bba = createAssociatedArtifact(FilenameUtils.normalize(fileName, true), bba); + if (bba != null) { + bbartifacts.add(bba); + } } } + } catch(TskCoreException ex) { + logger.log(Level.SEVERE, String.format("Failed to create TSK_RECENT_OBJECT artifact for file %d", regFile.getId()), ex); } line = reader.readLine(); } @@ -1676,13 +1442,17 @@ class ExtractRegistry extends Extract { Collection attributes = new ArrayList<>(); attributes.add(new BlackboardAttribute(TSK_PATH, getName(), fileName)); attributes.add(new BlackboardAttribute(TSK_COMMENT, getName(), comment)); - BlackboardArtifact bba = createArtifactWithAttributes(ARTIFACT_TYPE.TSK_RECENT_OBJECT, regFile, attributes); - if (bba != null) { - bbartifacts.add(bba); - bba = createAssociatedArtifact(FilenameUtils.normalize(fileName, true), bba); + try{ + BlackboardArtifact bba = createArtifactWithAttributes(ARTIFACT_TYPE.TSK_RECENT_OBJECT, regFile, attributes); if (bba != null) { bbartifacts.add(bba); + bba = createAssociatedArtifact(FilenameUtils.normalize(fileName, true), bba); + if (bba != null) { + bbartifacts.add(bba); + } } + } catch(TskCoreException ex) { + logger.log(Level.SEVERE, String.format("Failed to create TSK_RECENT_OBJECT artifact for file %d", regFile.getId()), ex); } line = reader.readLine(); } @@ -1724,13 +1494,15 @@ class ExtractRegistry extends Extract { Collection attributes = new ArrayList<>(); attributes.add(new BlackboardAttribute(TSK_PATH, getName(), fileName)); attributes.add(new BlackboardAttribute(TSK_COMMENT, getName(), comment)); - BlackboardArtifact bba = createArtifactWithAttributes(ARTIFACT_TYPE.TSK_RECENT_OBJECT, regFile, attributes); - if (bba != null) { + try{ + BlackboardArtifact bba = createArtifactWithAttributes(ARTIFACT_TYPE.TSK_RECENT_OBJECT, regFile, attributes); bbartifacts.add(bba); bba = createAssociatedArtifact(FilenameUtils.normalize(fileName, true), bba); if (bba != null) { bbartifacts.add(bba); } + } catch(TskCoreException ex) { + logger.log(Level.SEVERE, String.format("Failed to create TSK_RECENT_OBJECT artifact for file %d", regFile.getId()), ex); } line = reader.readLine(); } @@ -1767,13 +1539,16 @@ class ExtractRegistry extends Extract { Collection attributes = new ArrayList<>(); attributes.add(new BlackboardAttribute(TSK_PATH, getName(), fileName)); attributes.add(new BlackboardAttribute(TSK_COMMENT, getName(), comment)); - BlackboardArtifact bba = createArtifactWithAttributes(ARTIFACT_TYPE.TSK_RECENT_OBJECT, regFile, attributes); - if (bba != null) { + try{ + BlackboardArtifact bba = createArtifactWithAttributes(ARTIFACT_TYPE.TSK_RECENT_OBJECT, regFile, attributes); bbartifacts.add(bba); bba = createAssociatedArtifact(FilenameUtils.normalize(fileName, true), bba); if (bba != null) { bbartifacts.add(bba); } + + } catch(TskCoreException ex) { + logger.log(Level.SEVERE, String.format("Failed to create TSK_RECENT_OBJECT artifact for file %d", regFile.getId()), ex); } line = reader.readLine(); line = line.trim(); @@ -1817,13 +1592,15 @@ class ExtractRegistry extends Extract { attributes.add(new BlackboardAttribute(TSK_PATH, getName(), fileName)); attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED, getName(), docDate)); attributes.add(new BlackboardAttribute(TSK_COMMENT, getName(), comment)); - BlackboardArtifact bba = createArtifactWithAttributes(ARTIFACT_TYPE.TSK_RECENT_OBJECT, regFile, attributes); - if (bba != null) { + try{ + BlackboardArtifact bba = createArtifactWithAttributes(ARTIFACT_TYPE.TSK_RECENT_OBJECT, regFile, attributes); bbartifacts.add(bba); bba = createAssociatedArtifact(FilenameUtils.normalize(fileName, true), bba); if (bba != null) { bbartifacts.add(bba); } + } catch(TskCoreException ex) { + logger.log(Level.SEVERE, String.format("Failed to create TSK_RECENT_OBJECT artifact for file %d", regFile.getId()), ex); } line = reader.readLine(); line = line.trim(); @@ -1880,13 +1657,15 @@ class ExtractRegistry extends Extract { attributes.add(new BlackboardAttribute(TSK_PATH, getName(), fileName)); attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED, getName(), usedTime)); attributes.add(new BlackboardAttribute(TSK_COMMENT, getName(), comment)); - BlackboardArtifact bba = createArtifactWithAttributes(ARTIFACT_TYPE.TSK_RECENT_OBJECT, regFile, attributes); - if (bba != null) { + try{ + BlackboardArtifact bba = createArtifactWithAttributes(ARTIFACT_TYPE.TSK_RECENT_OBJECT, regFile, attributes); bbartifacts.add(bba); bba = createAssociatedArtifact(FilenameUtils.normalize(fileName, true), bba); if (bba != null) { bbartifacts.add(bba); } + } catch(TskCoreException ex) { + logger.log(Level.SEVERE, String.format("Failed to create TSK_RECENT_OBJECT artifact for file %d", regFile.getId()), ex); } line = line.trim(); } @@ -1916,15 +1695,7 @@ class ExtractRegistry extends Extract { if (!sourceFiles.isEmpty()) { for (AbstractFile sourceFile : sourceFiles) { if (sourceFile.getParentPath().endsWith(filePath)) { - Collection bbattributes2 = new ArrayList<>(); - bbattributes2.addAll(Arrays.asList( - new BlackboardAttribute(TSK_ASSOCIATED_ARTIFACT, this.getName(), - bba.getArtifactID()))); - - BlackboardArtifact associatedObjectBba = createArtifactWithAttributes(TSK_ASSOCIATED_OBJECT, sourceFile, bbattributes2); - if (associatedObjectBba != null) { - return associatedObjectBba; - } + return createAssociatedArtifact(sourceFile, bba); } } } @@ -1947,22 +1718,35 @@ class ExtractRegistry extends Extract { * @throws TskCoreException */ private Map makeUserNameMap(Content dataSource) throws TskCoreException { - Map userNameMap = new HashMap<>(); + Map map = new HashMap<>(); - List accounts = blackboard.getArtifacts(TSK_OS_ACCOUNT.getTypeID(), dataSource.getId()); - - for (BlackboardArtifact account : accounts) { - BlackboardAttribute nameAttribute = getAttributeForArtifact(account, TSK_USER_NAME); - BlackboardAttribute idAttribute = getAttributeForArtifact(account, TSK_USER_ID); - - String userName = nameAttribute != null ? nameAttribute.getDisplayString() : ""; - String userID = idAttribute != null ? idAttribute.getDisplayString() : ""; - - if (!userID.isEmpty()) { - userNameMap.put(userID, userName); - } + for(OsAccount account: tskCase.getOsAccountManager().getOsAccounts(((DataSource)dataSource).getHost())) { + Optional userName = account.getLoginName(); + map.put(account.getName(), userName.isPresent() ? userName.get() : ""); } + return map; + } + + /** + * Returns a mapping of user sids to user names. + * + * @return username man or empty list if none where found. + */ + private Map getUserNameMap() { + if(userNameMap == null) { + // Get a mapping of user sids to user names and save globally so it can be used for other areas + // of the registry, ie: BAM key + try { + userNameMap = makeUserNameMap(dataSource); + } catch (TskCoreException ex) { + logger.log(Level.WARNING, "Unable to create OS Account user name map", ex); + // This is not the end of the world we will just continue without + // user names + userNameMap = new HashMap<>(); + } + } + return userNameMap; } @@ -1993,7 +1777,6 @@ class ExtractRegistry extends Extract { try { for (ShellBag bag : shellbags) { Collection attributes = new ArrayList<>(); - BlackboardArtifact artifact = regFile.newArtifact(getShellBagArtifact().getTypeID()); attributes.add(new BlackboardAttribute(TSK_PATH, getName(), bag.getResource())); attributes.add(new BlackboardAttribute(getKeyAttribute(), getName(), bag.getKey())); @@ -2018,9 +1801,7 @@ class ExtractRegistry extends Extract { attributes.add(new BlackboardAttribute(TSK_DATETIME_ACCESSED, getName(), time)); } - artifact.addAttributes(attributes); - - artifacts.add(artifact); + artifacts.add(createArtifactWithAttributes(getShellBagArtifact(), regFile, attributes)); } } finally { if(!context.dataSourceIngestIsCancelled()) { @@ -2181,7 +1962,7 @@ class ExtractRegistry extends Extract { this.context = context; progressBar.progress(Bundle.Progress_Message_Analyze_Registry()); - analyzeRegistryFiles(); + analyzeRegistryFiles(context.getJobId()); } @@ -2204,30 +1985,34 @@ class ExtractRegistry extends Extract { * * @throws TskCoreException * @throws TskDataException + * @throws OsAccountManager.NotUserSIDException */ - private void createOrUpdateOsAccount(AbstractFile file, String sid, String userName, String homeDir) throws TskCoreException, TskDataException { + private void createOrUpdateOsAccount(AbstractFile file, String sid, String userName, String homeDir) throws TskCoreException, TskDataException, NotUserSIDException { OsAccountManager accountMgr = tskCase.getOsAccountManager(); HostManager hostMrg = tskCase.getHostManager(); - Host host = hostMrg.getHost((DataSource)dataSource); + Host host = hostMrg.getHostByDataSource((DataSource)dataSource); - Optional optional = accountMgr.getWindowsAccount(sid, null, null, host); + Optional optional = accountMgr.getWindowsOsAccount(sid, null, null, host); OsAccount osAccount; if (!optional.isPresent()) { - osAccount = accountMgr.createWindowsAccount(sid, userName != null && userName.isEmpty() ? null : userName, null, host, OsAccountRealm.RealmScope.UNKNOWN); + osAccount = accountMgr.newWindowsOsAccount(sid, userName != null && userName.isEmpty() ? null : userName, domainName, host, domainName != null && !domainName.isEmpty()? OsAccountRealm.RealmScope.DOMAIN : OsAccountRealm.RealmScope.UNKNOWN); + accountMgr.newOsAccountInstance(osAccount, (DataSource)dataSource, OsAccountInstance.OsAccountInstanceType.LAUNCHED); } else { osAccount = optional.get(); if (userName != null && !userName.isEmpty()) { - osAccount.setLoginName(userName); + OsAccountUpdateResult updateResult= accountMgr.updateCoreWindowsOsAccountAttributes(osAccount, null, userName, null, host); + osAccount = updateResult.getUpdatedAccount().orElse(osAccount); } } if (homeDir != null && !homeDir.isEmpty()) { List attributes = new ArrayList<>(); - attributes.add(createOsAccountAttribute(TSK_HOME_DIR, homeDir, osAccount, host, file)); - osAccount.addAttributes(attributes); + String dir = homeDir.replaceFirst("^(%\\w*%)", ""); + dir = dir.replace("\\", "/"); + attributes.add(createOsAccountAttribute(TSK_HOME_DIR, dir, osAccount, host, file)); + accountMgr.addExtendedOsAccountAttributes(osAccount, attributes); } - accountMgr.updateAccount(osAccount); } /** @@ -2278,7 +2063,7 @@ class ExtractRegistry extends Extract { * @throws TskDataException * @throws TskCoreException */ - private void updateOsAccount(OsAccount osAccount, Map userInfo, List groupList, AbstractFile regFile) throws TskDataException, TskCoreException { + private void updateOsAccount(OsAccount osAccount, Map userInfo, List groupList, AbstractFile regFile) throws TskDataException, TskCoreException, NotUserSIDException { Host host = ((DataSource)dataSource).getHost(); SimpleDateFormat regRipperTimeFormat = new SimpleDateFormat("EEE MMM dd HH:mm:ss yyyy 'Z'", US); @@ -2286,12 +2071,11 @@ class ExtractRegistry extends Extract { List attributes = new ArrayList<>(); + Long creationTime = null; + String value = userInfo.get(ACCOUNT_CREATED_KEY); if (value != null && !value.isEmpty() && !value.equals(NEVER_DATE)) { - Long time = parseRegRipTime(value); - if (time != null) { - osAccount.setCreationTime(time); - } + creationTime = parseRegRipTime(value); } value = userInfo.get(LAST_LOGIN_KEY); @@ -2303,6 +2087,12 @@ class ExtractRegistry extends Extract { osAccount, host, regFile)); } } + + String loginName = null; + value = userInfo.get(USERNAME_KEY); + if (value != null && !value.isEmpty()) { + loginName = value; + } value = userInfo.get(LOGIN_COUNT_KEY); if (value != null && !value.isEmpty()) { @@ -2335,13 +2125,14 @@ class ExtractRegistry extends Extract { } // FULL_NAME_KEY and NAME_KEY appear to be the same value. + String fullName = null; value = userInfo.get(FULL_NAME_KEY); if (value != null && !value.isEmpty()) { - osAccount.setFullName(value); + fullName = value; } else { value = userInfo.get(NAME_KEY); if (value != null && !value.isEmpty()) { - osAccount.setFullName(value); + fullName = value; } } @@ -2369,16 +2160,14 @@ class ExtractRegistry extends Extract { } } - String settingString = getSettingsFromMap(ACCOUNT_SETTINGS_FLAGS, userInfo); + String settingString = getSettingsFromMap(PASSWORD_SETTINGS_FLAGS, userInfo); if (!settingString.isEmpty()) { - settingString = settingString.substring(0, settingString.length() - 2); attributes.add(createOsAccountAttribute(ATTRIBUTE_TYPE.TSK_PASSWORD_SETTINGS, settingString, osAccount, host, regFile)); } settingString = getSettingsFromMap(ACCOUNT_SETTINGS_FLAGS, userInfo); if (!settingString.isEmpty()) { - settingString = settingString.substring(0, settingString.length() - 2); attributes.add(createOsAccountAttribute(ATTRIBUTE_TYPE.TSK_ACCOUNT_SETTINGS, settingString, osAccount, host, regFile)); } @@ -2398,8 +2187,17 @@ class ExtractRegistry extends Extract { groups, osAccount, host, regFile)); } - osAccount.addAttributes(attributes); - tskCase.getOsAccountManager().updateAccount(osAccount); + // add the attributes to account. + OsAccountManager accountMgr = tskCase.getOsAccountManager(); + accountMgr.addExtendedOsAccountAttributes(osAccount, attributes); + + // update the loginname + accountMgr.updateCoreWindowsOsAccountAttributes(osAccount, null, loginName, null, host); + + // update other standard attributes - fullname, creationdate + accountMgr.updateStandardOsAccountAttributes(osAccount, fullName, null, null, creationTime); + + } /** @@ -2439,7 +2237,7 @@ class ExtractRegistry extends Extract { * @return Newly created OsACcountAttribute */ private OsAccountAttribute createOsAccountAttribute(BlackboardAttribute.ATTRIBUTE_TYPE type, String value, OsAccount osAccount, Host host, AbstractFile file) { - return new OsAccountAttribute(new BlackboardAttribute.Type(type), value, osAccount, host, file); + return osAccount.new OsAccountAttribute(new BlackboardAttribute.Type(type), value, osAccount, host, file); } /** @@ -2454,7 +2252,7 @@ class ExtractRegistry extends Extract { * @return Newly created OsACcountAttribute */ private OsAccountAttribute createOsAccountAttribute(BlackboardAttribute.ATTRIBUTE_TYPE type, Long value, OsAccount osAccount, Host host, AbstractFile file) { - return new OsAccountAttribute(new BlackboardAttribute.Type(type), value, osAccount, host, file); + return osAccount.new OsAccountAttribute(new BlackboardAttribute.Type(type), value, osAccount, host, file); } /** @@ -2469,6 +2267,6 @@ class ExtractRegistry extends Extract { * @return Newly created OsACcountAttribute */ private OsAccountAttribute createOsAccountAttribute(BlackboardAttribute.ATTRIBUTE_TYPE type, Integer value, OsAccount osAccount, Host host, AbstractFile file) { - return new OsAccountAttribute(new BlackboardAttribute.Type(type), value, osAccount, host, file); + return osAccount.new OsAccountAttribute(new BlackboardAttribute.Type(type), value, osAccount, host, file); } } diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractSafari.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractSafari.java index 676ff7923d..2bf0351542 100755 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractSafari.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractSafari.java @@ -49,7 +49,10 @@ import org.sleuthkit.autopsy.ingest.IngestServices; import org.sleuthkit.autopsy.recentactivity.BinaryCookieReader.Cookie; import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.BlackboardArtifact; -import org.sleuthkit.datamodel.BlackboardAttribute; +import static org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_BOOKMARK; +import static org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_COOKIE; +import static org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_DOWNLOAD; +import static org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_HISTORY; import org.sleuthkit.datamodel.Content; import org.sleuthkit.datamodel.TskCoreException; import org.xml.sax.SAXException; @@ -98,14 +101,6 @@ final class ExtractSafari extends Extract { "Progress_Message_Safari_Downloads=Safari Downloads", }) - /** - * Extract the bookmarks, cookies, downloads and history from Safari. - * - */ - ExtractSafari() { - - } - @Override protected String getName() { return Bundle.ExtractSafari_Module_Name(); @@ -294,7 +289,7 @@ final class ExtractSafari extends Extract { return; } - File tempHistoryFile = createTemporaryFile(context, historyFile); + File tempHistoryFile = createTemporaryFile(context, historyFile, context.getJobId()); try { ContentUtils.writeToFile(historyFile, tempHistoryFile, context::dataSourceIngestIsCancelled); @@ -329,7 +324,7 @@ final class ExtractSafari extends Extract { return; } - File tempFile = createTemporaryFile(context, file); + File tempFile = createTemporaryFile(context, file, context.getJobId()); try { if(!context.dataSourceIngestIsCancelled()) { @@ -359,7 +354,7 @@ final class ExtractSafari extends Extract { return; } - File tempFile = createTemporaryFile(context, file); + File tempFile = createTemporaryFile(context, file, context.getJobId()); try { if(!context.dataSourceIngestIsCancelled()) { @@ -390,7 +385,7 @@ final class ExtractSafari extends Extract { File tempFile = null; try { - tempFile = createTemporaryFile(context, file); + tempFile = createTemporaryFile(context, file, context.getJobId()); if(!context.dataSourceIngestIsCancelled()) { postArtifacts(getCookieArtifacts(file, tempFile, context)); @@ -430,10 +425,12 @@ final class ExtractSafari extends Extract { String title = row.get(HEAD_TITLE).toString(); Long time = (Double.valueOf(row.get(HEAD_TIME).toString())).longValue(); - BlackboardArtifact bbart = origFile.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_HISTORY); - bbart.addAttributes(createHistoryAttribute(url, time, null, title, - this.getName(), NetworkUtils.extractDomain(url), null)); - bbartifacts.add(bbart); + bbartifacts.add( + createArtifactWithAttributes( + TSK_WEB_HISTORY, + origFile, + createHistoryAttribute(url, time, null, title, + this.getName(), NetworkUtils.extractDomain(url), null))); } return bbartifacts; @@ -564,10 +561,19 @@ final class ExtractSafari extends Extract { } Cookie cookie = iter.next(); - - BlackboardArtifact bbart = origFile.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_COOKIE); - bbart.addAttributes(createCookieAttributes(cookie.getURL(), cookie.getCreationDate(), null, cookie.getExpirationDate(), cookie.getName(), cookie.getValue(), this.getName(), NetworkUtils.extractDomain(cookie.getURL()))); - bbartifacts.add(bbart); + + bbartifacts.add( + createArtifactWithAttributes( + TSK_WEB_COOKIE, + origFile, + createCookieAttributes( + cookie.getURL(), + cookie.getCreationDate(), + null, + cookie.getExpirationDate(), + cookie.getName(), cookie.getValue(), + this.getName(), + NetworkUtils.extractDomain(cookie.getURL())))); } } @@ -615,9 +621,12 @@ final class ExtractSafari extends Extract { } if (url != null || title != null) { - BlackboardArtifact bbart = origFile.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_BOOKMARK); - bbart.addAttributes(createBookmarkAttributes(url, title, null, getName(), NetworkUtils.extractDomain(url))); - bbartifacts.add(bbart); + bbartifacts.add(createArtifactWithAttributes(TSK_WEB_BOOKMARK, origFile, + createBookmarkAttributes(url, + title, + null, + getName(), + NetworkUtils.extractDomain(url)))); } } } @@ -656,17 +665,12 @@ final class ExtractSafari extends Extract { time = date.getDate().getTime(); } - BlackboardArtifact webDownloadArtifact = origFile.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_DOWNLOAD); - webDownloadArtifact.addAttributes(this.createDownloadAttributes(path, pathID, url, time, NetworkUtils.extractDomain(url), getName())); + BlackboardArtifact webDownloadArtifact = createArtifactWithAttributes(TSK_WEB_DOWNLOAD, origFile, createDownloadAttributes(path, pathID, url, time, NetworkUtils.extractDomain(url), getName())); bbartifacts.add(webDownloadArtifact); // find the downloaded file and create a TSK_ASSOCIATED_OBJECT for it, associating it with the TSK_WEB_DOWNLOAD artifact. for (AbstractFile downloadedFile : fileManager.findFiles(dataSource, FilenameUtils.getName(path), FilenameUtils.getPath(path))) { - BlackboardArtifact associatedObjectArtifact = downloadedFile.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_ASSOCIATED_OBJECT); - associatedObjectArtifact.addAttribute( - new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_ASSOCIATED_ARTIFACT, - RecentActivityExtracterModuleFactory.getModuleName(), webDownloadArtifact.getArtifactID())); - bbartifacts.add(associatedObjectArtifact); + bbartifacts.add(createAssociatedArtifact(downloadedFile, webDownloadArtifact)); break; } diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractSru.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractSru.java index 310d5f8c62..50c2c6afd4 100644 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractSru.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractSru.java @@ -2,7 +2,7 @@ * * Autopsy Forensic Browser * - * Copyright 2020 Basis Technology Corp. + * Copyright 2020-2021 Basis Technology Corp. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -46,11 +46,8 @@ import org.sleuthkit.autopsy.ingest.DataSourceIngestModuleProcessTerminator; import org.sleuthkit.autopsy.ingest.DataSourceIngestModuleProgress; import org.sleuthkit.autopsy.ingest.IngestJobContext; import org.sleuthkit.datamodel.AbstractFile; -import org.sleuthkit.datamodel.Blackboard; import org.sleuthkit.datamodel.BlackboardArtifact; -import static org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE.TSK_ASSOCIATED_OBJECT; import org.sleuthkit.datamodel.BlackboardAttribute; -import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_ASSOCIATED_ARTIFACT; import org.sleuthkit.datamodel.Content; import org.sleuthkit.datamodel.TskCoreException; @@ -84,7 +81,7 @@ final class ExtractSru extends Extract { "ExtractSru_module_name=System Resource Usage Extractor" }) ExtractSru() { - this.moduleName = Bundle.ExtractSru_module_name(); + super(Bundle.ExtractSru_module_name()); } @Messages({ @@ -103,7 +100,7 @@ final class ExtractSru extends Extract { dir.mkdirs(); } - String tempDirPath = RAImageIngestModule.getRATempPath(Case.getCurrentCase(), "sru"); //NON-NLS + String tempDirPath = RAImageIngestModule.getRATempPath(Case.getCurrentCase(), "sru", context.getJobId()); //NON-NLS String softwareHiveFileName = getSoftwareHiveFile(dataSource, tempDirPath); if (softwareHiveFileName == null) { @@ -370,8 +367,7 @@ final class ExtractSru extends Extract { BlackboardAttribute.ATTRIBUTE_TYPE.TSK_COMMENT, getName(), NETWORK_USAGE_SOURCE_NAME)); try { - BlackboardArtifact bbart = sruAbstractFile.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_PROG_RUN); - bbart.addAttributes(bbattributes); + BlackboardArtifact bbart = createArtifactWithAttributes(BlackboardArtifact.ARTIFACT_TYPE.TSK_PROG_RUN, sruAbstractFile, bbattributes); bba.add(bbart); BlackboardArtifact associateBbArtifact = createAssociatedArtifact(applicationName.toLowerCase(), bbart); if (associateBbArtifact != null) { @@ -427,8 +423,7 @@ final class ExtractSru extends Extract { BlackboardAttribute.ATTRIBUTE_TYPE.TSK_COMMENT, getName(), APPLICATION_USAGE_SOURCE_NAME)); try { - BlackboardArtifact bbart = sruAbstractFile.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_PROG_RUN); - bbart.addAttributes(bbattributes); + BlackboardArtifact bbart = createArtifactWithAttributes(BlackboardArtifact.ARTIFACT_TYPE.TSK_PROG_RUN, sruAbstractFile, bbattributes); bba.add(bbart); BlackboardArtifact associateBbArtifact = createAssociatedArtifact(applicationName.toLowerCase(), bbart); if (associateBbArtifact != null) { @@ -459,18 +454,10 @@ final class ExtractSru extends Extract { * * @returnv BlackboardArtifact or a null value */ - private BlackboardArtifact createAssociatedArtifact(String filePathName, BlackboardArtifact bba) { + private BlackboardArtifact createAssociatedArtifact(String filePathName, BlackboardArtifact bba) throws TskCoreException { if (applicationFilesFound.containsKey(filePathName)) { AbstractFile sourceFile = applicationFilesFound.get(filePathName); - Collection bbattributes2 = new ArrayList<>(); - bbattributes2.addAll(Arrays.asList( - new BlackboardAttribute(TSK_ASSOCIATED_ARTIFACT, this.getName(), - bba.getArtifactID()))); - - BlackboardArtifact associatedObjectBba = createArtifactWithAttributes(TSK_ASSOCIATED_OBJECT, sourceFile, bbattributes2); - if (associatedObjectBba != null) { - return associatedObjectBba; - } + return createAssociatedArtifact(sourceFile, bba); } return null; diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractWebAccountType.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractWebAccountType.java index 54e50e9f01..50cf7ce90b 100644 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractWebAccountType.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractWebAccountType.java @@ -1,7 +1,7 @@ /* * Autopsy Forensic Browser * - * Copyright 2020 Basis Technology Corp. + * Copyright 2020-2021 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -45,7 +45,7 @@ class ExtractWebAccountType extends Extract { private static final Logger logger = Logger.getLogger(ExtractWebAccountType.class.getName()); ExtractWebAccountType() { - moduleName = NbBundle.getMessage(ExtractWebAccountType.class, "ExtractWebAccountType.moduleName.text"); + super(NbBundle.getMessage(ExtractWebAccountType.class, "ExtractWebAccountType.moduleName.text")); } private static final List QUERY_ARTIFACTS = Arrays.asList( diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractZoneIdentifier.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractZoneIdentifier.java index aaedc654b1..b8c8d18dba 100755 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractZoneIdentifier.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractZoneIdentifier.java @@ -2,7 +2,7 @@ * * Autopsy Forensic Browser * - * Copyright 2019 Basis Technology Corp. + * Copyright 2019-2021 Basis Technology Corp. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -21,7 +21,6 @@ package org.sleuthkit.autopsy.recentactivity; import java.io.FileNotFoundException; import java.io.IOException; import java.util.ArrayList; -import java.util.Arrays; import java.util.Collection; import java.util.HashSet; import java.util.List; @@ -38,16 +37,15 @@ import org.sleuthkit.datamodel.BlackboardArtifact; import static org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE.TSK_ASSOCIATED_OBJECT; import static org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_DOWNLOAD; import org.sleuthkit.datamodel.BlackboardAttribute; -import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_ASSOCIATED_ARTIFACT; import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PATH_ID; import org.sleuthkit.datamodel.Content; import org.sleuthkit.datamodel.ReadContentInputStream; import org.sleuthkit.datamodel.TskCoreException; /** - * Extract the :Zone.Indentifier alternate data stream files. A file with - * a :Zone.Indentifier extention contains information about the similarly - * named (with out zone identifer extension) downloaded file. + * Extract the :Zone.Identifier alternate data stream files. A file with + * a :Zone.Identifier extension contains information about the similarly + * named (with out zone identifier extension) downloaded file. */ final class ExtractZoneIdentifier extends Extract { @@ -152,15 +150,10 @@ final class ExtractZoneIdentifier extends Extract { // The zone identifier file is the parent of this artifact // because it is the file we parsed to get the data BlackboardArtifact downloadBba = createDownloadArtifact(zoneFile, zoneInfo, downloadFile); - if (downloadBba != null) { - downloadArtifacts.add(downloadBba); - // create a TSK_ASSOCIATED_OBJECT for the downloaded file, associating it with the TSK_WEB_DOWNLOAD artifact. - if (downloadFile.getArtifactsCount(TSK_ASSOCIATED_OBJECT) == 0) { - BlackboardArtifact associatedObjectBba = createAssociatedObjectArtifact(downloadFile, downloadBba); - if (associatedObjectBba != null) { - associatedObjectArtifacts.add(associatedObjectBba); - } - } + downloadArtifacts.add(downloadBba); + // create a TSK_ASSOCIATED_OBJECT for the downloaded file, associating it with the TSK_WEB_DOWNLOAD artifact. + if (downloadFile.getArtifactsCount(TSK_ASSOCIATED_OBJECT) == 0) { + associatedObjectArtifacts.add(createAssociatedArtifact(downloadFile, downloadBba)); } } @@ -201,29 +194,6 @@ final class ExtractZoneIdentifier extends Extract { return downloadFile; } - /** - * Create a Associated Object Artifact for the given ZoneIdentifierInfo - * object. - * - * @param downloadFile AbstractFile representing the file downloaded, not - * the zone identifier file. - * @param downloadBba TSK_WEB_DOWNLOAD artifact to associate with. - * - * @return TSK_ASSOCIATED_OBJECT artifact. - */ - private BlackboardArtifact createAssociatedObjectArtifact(AbstractFile downloadFile, BlackboardArtifact downloadBba) { - - Collection bbattributes = new ArrayList<>(); - - bbattributes.addAll(Arrays.asList( - new BlackboardAttribute(TSK_ASSOCIATED_ARTIFACT, - RecentActivityExtracterModuleFactory.getModuleName(), - downloadBba.getArtifactID()) - )); - - return createArtifactWithAttributes(TSK_ASSOCIATED_OBJECT, downloadFile, bbattributes); - } - /** * Create a TSK_WEB_DOWNLOAD Artifact for the given zone identifier file. * @@ -233,7 +203,7 @@ final class ExtractZoneIdentifier extends Extract { * * @return BlackboardArifact for the given parameters */ - private BlackboardArtifact createDownloadArtifact(AbstractFile zoneFile, ZoneIdentifierInfo zoneInfo, AbstractFile downloadFile) { + private BlackboardArtifact createDownloadArtifact(AbstractFile zoneFile, ZoneIdentifierInfo zoneInfo, AbstractFile downloadFile) throws TskCoreException { String downloadFilePath = downloadFile.getParentPath() + downloadFile.getName(); diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Firefox.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Firefox.java index 684d373519..6dd30ed4cc 100644 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Firefox.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Firefox.java @@ -100,7 +100,7 @@ class Firefox extends Extract { private IngestJobContext context; Firefox() { - moduleName = NbBundle.getMessage(Firefox.class, "Firefox.moduleName"); + super(NbBundle.getMessage(Firefox.class, "Firefox.moduleName")); } @Override @@ -108,47 +108,52 @@ class Firefox extends Extract { this.dataSource = dataSource; this.context = context; dataFound = false; + long ingestJobId = context.getJobId(); progressBar.progress(Bundle.Progress_Message_Firefox_History()); - this.getHistory(); + this.getHistory(context.getJobId()); if (context.dataSourceIngestIsCancelled()) { return; } progressBar.progress(Bundle.Progress_Message_Firefox_Bookmarks()); - this.getBookmark(); + this.getBookmark(ingestJobId); if (context.dataSourceIngestIsCancelled()) { return; } progressBar.progress(Bundle.Progress_Message_Firefox_Downloads()); - this.getDownload(); + this.getDownload(ingestJobId); if (context.dataSourceIngestIsCancelled()) { return; } progressBar.progress(Bundle.Progress_Message_Firefox_Cookies()); - this.getCookie(); + this.getCookie(ingestJobId); if (context.dataSourceIngestIsCancelled()) { return; } progressBar.progress(Bundle.Progress_Message_Firefox_FormHistory()); - this.getFormsHistory(); + this.getFormsHistory(ingestJobId); if (context.dataSourceIngestIsCancelled()) { return; } progressBar.progress(Bundle.Progress_Message_Firefox_AutoFill()); - this.getAutofillProfiles(); + this.getAutofillProfiles(ingestJobId); } - private void getHistory() { + /** + * Get Firefox history. + * @param ingestJobId The ingest job id. + */ + private void getHistory(long ingestJobId) { FileManager fileManager = currentCase.getServices().getFileManager(); List historyFiles; try { @@ -180,7 +185,7 @@ class Firefox extends Extract { } String fileName = historyFile.getName(); - String temps = RAImageIngestModule.getRATempPath(currentCase, "firefox") + File.separator + fileName + j + ".db"; //NON-NLS + String temps = RAImageIngestModule.getRATempPath(currentCase, "firefox", ingestJobId) + File.separator + fileName + j + ".db"; //NON-NLS try { ContentUtils.writeToFile(historyFile, new File(temps), context::dataSourceIngestIsCancelled); } catch (ReadContentInputStreamException ex) { @@ -204,7 +209,7 @@ class Firefox extends Extract { break; } List> tempList = this.dbConnect(temps, HISTORY_QUERY); - logger.log(Level.INFO, "{0} - Now getting history from {1} with {2} artifacts identified.", new Object[]{moduleName, temps, tempList.size()}); //NON-NLS + logger.log(Level.INFO, "{0} - Now getting history from {1} with {2} artifacts identified.", new Object[]{getName(), temps, tempList.size()}); //NON-NLS for (HashMap result : tempList) { if (context.dataSourceIngestIsCancelled()) { @@ -236,9 +241,11 @@ class Firefox extends Extract { RecentActivityExtracterModuleFactory.getModuleName(), domain)); //NON-NLS } - BlackboardArtifact bbart = createArtifactWithAttributes(ARTIFACT_TYPE.TSK_WEB_HISTORY, historyFile, bbattributes); - if (bbart != null) { - bbartifacts.add(bbart); + + try { + bbartifacts.add(createArtifactWithAttributes(ARTIFACT_TYPE.TSK_WEB_HISTORY, historyFile, bbattributes)); + } catch (TskCoreException ex) { + logger.log(Level.SEVERE, String.format("Failed to create TSK_WEB_HISTORY artifact for file %d", historyFile.getId()), ex); } } ++j; @@ -252,8 +259,9 @@ class Firefox extends Extract { /** * Queries for bookmark files and adds artifacts + * @param ingestJobId The ingest job id. */ - private void getBookmark() { + private void getBookmark(long ingestJobId) { FileManager fileManager = currentCase.getServices().getFileManager(); List bookmarkFiles; @@ -279,7 +287,7 @@ class Firefox extends Extract { continue; } String fileName = bookmarkFile.getName(); - String temps = RAImageIngestModule.getRATempPath(currentCase, "firefox") + File.separator + fileName + j + ".db"; //NON-NLS + String temps = RAImageIngestModule.getRATempPath(currentCase, "firefox", ingestJobId) + File.separator + fileName + j + ".db"; //NON-NLS try { ContentUtils.writeToFile(bookmarkFile, new File(temps), context::dataSourceIngestIsCancelled); } catch (ReadContentInputStreamException ex) { @@ -302,7 +310,7 @@ class Firefox extends Extract { break; } List> tempList = this.dbConnect(temps, BOOKMARK_QUERY); - logger.log(Level.INFO, "{0} - Now getting bookmarks from {1} with {2} artifacts identified.", new Object[]{moduleName, temps, tempList.size()}); //NON-NLS + logger.log(Level.INFO, "{0} - Now getting bookmarks from {1} with {2} artifacts identified.", new Object[]{getName(), temps, tempList.size()}); //NON-NLS for (HashMap result : tempList) { if (context.dataSourceIngestIsCancelled()) { @@ -332,9 +340,10 @@ class Firefox extends Extract { RecentActivityExtracterModuleFactory.getModuleName(), domain)); //NON-NLS } - BlackboardArtifact bbart = createArtifactWithAttributes(ARTIFACT_TYPE.TSK_WEB_BOOKMARK, bookmarkFile, bbattributes); - if (bbart != null) { - bbartifacts.add(bbart); + try { + bbartifacts.add(createArtifactWithAttributes(ARTIFACT_TYPE.TSK_WEB_BOOKMARK, bookmarkFile, bbattributes)); + } catch (TskCoreException ex) { + logger.log(Level.SEVERE, String.format("Failed to create TSK_WEB_BOOKMARK artifact for file %d", bookmarkFile.getId()), ex); } } ++j; @@ -348,8 +357,9 @@ class Firefox extends Extract { /** * Queries for cookies file and adds artifacts + * @param ingestJobId The ingest job id. */ - private void getCookie() { + private void getCookie(long ingestJobId) { FileManager fileManager = currentCase.getServices().getFileManager(); List cookiesFiles; try { @@ -378,7 +388,7 @@ class Firefox extends Extract { continue; } String fileName = cookiesFile.getName(); - String temps = RAImageIngestModule.getRATempPath(currentCase, "firefox") + File.separator + fileName + j + ".db"; //NON-NLS + String temps = RAImageIngestModule.getRATempPath(currentCase, "firefox", ingestJobId) + File.separator + fileName + j + ".db"; //NON-NLS try { ContentUtils.writeToFile(cookiesFile, new File(temps), context::dataSourceIngestIsCancelled); } catch (ReadContentInputStreamException ex) { @@ -410,7 +420,7 @@ class Firefox extends Extract { } List> tempList = this.dbConnect(temps, query); - logger.log(Level.INFO, "{0} - Now getting cookies from {1} with {2} artifacts identified.", new Object[]{moduleName, temps, tempList.size()}); //NON-NLS + logger.log(Level.INFO, "{0} - Now getting cookies from {1} with {2} artifacts identified.", new Object[]{getName(), temps, tempList.size()}); //NON-NLS for (HashMap result : tempList) { if (context.dataSourceIngestIsCancelled()) { @@ -448,9 +458,10 @@ class Firefox extends Extract { RecentActivityExtracterModuleFactory.getModuleName(), domain)); } - BlackboardArtifact bbart = createArtifactWithAttributes(ARTIFACT_TYPE.TSK_WEB_COOKIE, cookiesFile, bbattributes); - if (bbart != null) { - bbartifacts.add(bbart); + try { + bbartifacts.add(createArtifactWithAttributes(ARTIFACT_TYPE.TSK_WEB_COOKIE, cookiesFile, bbattributes)); + } catch (TskCoreException ex) { + logger.log(Level.SEVERE, String.format("Failed to create TSK_WEB_COOKIE artifact for file %d", cookiesFile.getId()), ex); } } ++j; @@ -464,18 +475,20 @@ class Firefox extends Extract { /** * Queries for downloads files and adds artifacts + * @param ingestJobId The ingest job id. */ - private void getDownload() { - getDownloadPreVersion24(); - getDownloadVersion24(); + private void getDownload(long ingestJobId) { + getDownloadPreVersion24(ingestJobId); + getDownloadVersion24(ingestJobId); } /** * Finds downloads artifacts from Firefox data from versions before 24.0. * * Downloads were stored in a separate downloads database. + * @param ingestJobId The ingest job id. */ - private void getDownloadPreVersion24() { + private void getDownloadPreVersion24(long ingestJobId) { FileManager fileManager = currentCase.getServices().getFileManager(); List downloadsFiles; @@ -501,7 +514,7 @@ class Firefox extends Extract { continue; } String fileName = downloadsFile.getName(); - String temps = RAImageIngestModule.getRATempPath(currentCase, "firefox") + File.separator + fileName + j + ".db"; //NON-NLS + String temps = RAImageIngestModule.getRATempPath(currentCase, "firefox", ingestJobId) + File.separator + fileName + j + ".db"; //NON-NLS int errors = 0; try { ContentUtils.writeToFile(downloadsFile, new File(temps), context::dataSourceIngestIsCancelled); @@ -526,7 +539,7 @@ class Firefox extends Extract { } List> tempList = this.dbConnect(temps, DOWNLOAD_QUERY); - logger.log(Level.INFO, "{0}- Now getting downloads from {1} with {2} artifacts identified.", new Object[]{moduleName, temps, tempList.size()}); //NON-NLS + logger.log(Level.INFO, "{0}- Now getting downloads from {1} with {2} artifacts identified.", new Object[]{getName(), temps, tempList.size()}); //NON-NLS for (HashMap result : tempList) { if (context.dataSourceIngestIsCancelled()) { @@ -574,27 +587,20 @@ class Firefox extends Extract { RecentActivityExtracterModuleFactory.getModuleName(), domain)); //NON-NLS } - - BlackboardArtifact webDownloadArtifact = createArtifactWithAttributes(ARTIFACT_TYPE.TSK_WEB_DOWNLOAD, downloadsFile, bbattributes); - if (webDownloadArtifact != null) { + try { + BlackboardArtifact webDownloadArtifact = createArtifactWithAttributes(ARTIFACT_TYPE.TSK_WEB_DOWNLOAD, downloadsFile, bbattributes); bbartifacts.add(webDownloadArtifact); // find the downloaded file and create a TSK_ASSOCIATED_OBJECT for it, associating it with the TSK_WEB_DOWNLOAD artifact. - try { - for (AbstractFile downloadedFile : fileManager.findFiles(dataSource, FilenameUtils.getName(downloadedFilePath), FilenameUtils.getPath(downloadedFilePath))) { - BlackboardArtifact associatedObjectArtifact = downloadedFile.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_ASSOCIATED_OBJECT); - associatedObjectArtifact.addAttribute( - new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_ASSOCIATED_ARTIFACT, - RecentActivityExtracterModuleFactory.getModuleName(), webDownloadArtifact.getArtifactID())); - - bbartifacts.add(associatedObjectArtifact); - break; - } - } catch (TskCoreException ex) { - logger.log(Level.SEVERE, String.format("Error creating associated object artifact for file '%s'", - downloadedFilePath), ex); //NON-NLS + for (AbstractFile downloadedFile : fileManager.findFiles(dataSource, FilenameUtils.getName(downloadedFilePath), FilenameUtils.getPath(downloadedFilePath))) { + bbartifacts.add(createAssociatedArtifact(downloadedFile, webDownloadArtifact)); + break; } + } catch (TskCoreException ex) { + logger.log(Level.SEVERE, String.format("Error creating TSK_WEB_DOWNLOAD or TSK_ASSOCIATED_ARTIFACT artifact for file '%d'", + downloadsFile.getId()), ex); //NON-NLS } + } if (errors > 0) { this.addErrorMessage( @@ -603,7 +609,6 @@ class Firefox extends Extract { } j++; dbFile.delete(); - break; } if(!context.dataSourceIngestIsCancelled()) { @@ -615,8 +620,9 @@ class Firefox extends Extract { * Gets download artifacts from Firefox data from version 24. * * Downloads are stored in the places database. + * @param ingestJobId The ingest job id. */ - private void getDownloadVersion24() { + private void getDownloadVersion24(long ingestJobId) { FileManager fileManager = currentCase.getServices().getFileManager(); List downloadsFiles; try { @@ -641,7 +647,7 @@ class Firefox extends Extract { continue; } String fileName = downloadsFile.getName(); - String temps = RAImageIngestModule.getRATempPath(currentCase, "firefox") + File.separator + fileName + "-downloads" + j + ".db"; //NON-NLS + String temps = RAImageIngestModule.getRATempPath(currentCase, "firefox", ingestJobId) + File.separator + fileName + "-downloads" + j + ".db"; //NON-NLS int errors = 0; try { ContentUtils.writeToFile(downloadsFile, new File(temps), context::dataSourceIngestIsCancelled); @@ -668,7 +674,7 @@ class Firefox extends Extract { List> tempList = this.dbConnect(temps, DOWNLOAD_QUERY_V24); - logger.log(Level.INFO, "{0} - Now getting downloads from {1} with {2} artifacts identified.", new Object[]{moduleName, temps, tempList.size()}); //NON-NLS + logger.log(Level.INFO, "{0} - Now getting downloads from {1} with {2} artifacts identified.", new Object[]{getName(), temps, tempList.size()}); //NON-NLS for (HashMap result : tempList) { if (context.dataSourceIngestIsCancelled()) { @@ -716,26 +722,19 @@ class Firefox extends Extract { bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DOMAIN, RecentActivityExtracterModuleFactory.getModuleName(), domain)); //NON-NLS } - - BlackboardArtifact webDownloadArtifact = createArtifactWithAttributes(ARTIFACT_TYPE.TSK_WEB_DOWNLOAD, downloadsFile, bbattributes); - if (webDownloadArtifact != null) { + try { + BlackboardArtifact webDownloadArtifact = createArtifactWithAttributes(ARTIFACT_TYPE.TSK_WEB_DOWNLOAD, downloadsFile, bbattributes); bbartifacts.add(webDownloadArtifact); - + // find the downloaded file and create a TSK_ASSOCIATED_OBJECT for it, associating it with the TSK_WEB_DOWNLOAD artifact. - try { - for (AbstractFile downloadedFile : fileManager.findFiles(dataSource, FilenameUtils.getName(downloadedFilePath), FilenameUtils.getPath(downloadedFilePath))) { - BlackboardArtifact associatedObjectArtifact = downloadedFile.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_ASSOCIATED_OBJECT); - associatedObjectArtifact.addAttribute( - new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_ASSOCIATED_ARTIFACT, - RecentActivityExtracterModuleFactory.getModuleName(), webDownloadArtifact.getArtifactID())); - bbartifacts.add(associatedObjectArtifact); - break; - } - } catch (TskCoreException ex) { - logger.log(Level.SEVERE, String.format("Error creating associated object artifact for file '%s'", - downloadedFilePath), ex); //NON-NLS + for (AbstractFile downloadedFile : fileManager.findFiles(dataSource, FilenameUtils.getName(downloadedFilePath), FilenameUtils.getPath(downloadedFilePath))) { + bbartifacts.add(createAssociatedArtifact(downloadedFile, webDownloadArtifact)); + break; } - } + } catch (TskCoreException ex) { + logger.log(Level.SEVERE, String.format("Error creating associated object artifact for file '%s'", + downloadedFilePath), ex); //NON-NLS + } } if (errors > 0) { this.addErrorMessage(NbBundle.getMessage(this.getClass(), "Firefox.getDlV24.errMsg.errParsingArtifacts", @@ -743,7 +742,6 @@ class Firefox extends Extract { } j++; dbFile.delete(); - break; } if(!context.dataSourceIngestIsCancelled()) { @@ -754,8 +752,9 @@ class Firefox extends Extract { /** * Gets data from formshistory.sqlite database. * Parses and creates artifacts. + * @param ingestJobId The ingest job id. */ - private void getFormsHistory() { + private void getFormsHistory(long ingestJobId) { FileManager fileManager = currentCase.getServices().getFileManager(); List formHistoryFiles; @@ -789,7 +788,7 @@ class Firefox extends Extract { } String fileName = formHistoryFile.getName(); - String tempFilePath = RAImageIngestModule.getRATempPath(currentCase, "firefox") + File.separator + fileName + j + ".db"; //NON-NLS + String tempFilePath = RAImageIngestModule.getRATempPath(currentCase, "firefox", ingestJobId) + File.separator + fileName + j + ".db"; //NON-NLS try { ContentUtils.writeToFile(formHistoryFile, new File(tempFilePath), context::dataSourceIngestIsCancelled); } catch (ReadContentInputStreamException ex) { @@ -818,7 +817,7 @@ class Firefox extends Extract { String formHistoryQuery = (isFirefoxV64) ? FORMHISTORY_QUERY_V64 : FORMHISTORY_QUERY; List> tempList = this.dbConnect(tempFilePath, formHistoryQuery); - logger.log(Level.INFO, "{0} - Now getting history from {1} with {2} artifacts identified.", new Object[]{moduleName, tempFilePath, tempList.size()}); //NON-NLS + logger.log(Level.INFO, "{0} - Now getting history from {1} with {2} artifacts identified.", new Object[]{getName(), tempFilePath, tempList.size()}); //NON-NLS for (HashMap result : tempList) { if (context.dataSourceIngestIsCancelled()) { @@ -856,10 +855,11 @@ class Firefox extends Extract { (Integer.valueOf(result.get("timesUsed").toString())))); //NON-NLS } - // Add artifact - BlackboardArtifact bbart = createArtifactWithAttributes(ARTIFACT_TYPE.TSK_WEB_FORM_AUTOFILL, formHistoryFile, bbattributes); - if (bbart != null) { - bbartifacts.add(bbart); + try { + // Add artifact + bbartifacts.add(createArtifactWithAttributes(ARTIFACT_TYPE.TSK_WEB_FORM_AUTOFILL, formHistoryFile, bbattributes)); + } catch (TskCoreException ex) { + logger.log(Level.SEVERE, String.format("Failed to create TSK_WEB_FORM_AUTOFILL artifact for file %d", formHistoryFile.getId()), ex); } } ++j; @@ -875,9 +875,9 @@ class Firefox extends Extract { /** * Gets data from autofill-profiles.json file. * Parses file and makes artifacts. - * + * @param ingestJobId The ingest job id. */ - private void getAutofillProfiles() { + private void getAutofillProfiles(long ingestJobId) { FileManager fileManager = currentCase.getServices().getFileManager(); List autofillProfilesFiles; try { @@ -902,7 +902,7 @@ class Firefox extends Extract { if (profileFile.getSize() == 0) { continue; } - String temps = RAImageIngestModule.getRATempPath(currentCase, "Firefox") + File.separator + profileFile.getName() + j + ".json"; //NON-NLS + String temps = RAImageIngestModule.getRATempPath(currentCase, "Firefox", ingestJobId) + File.separator + profileFile.getName() + j + ".json"; //NON-NLS try { ContentUtils.writeToFile(profileFile, new File(temps), context::dataSourceIngestIsCancelled); } catch (ReadContentInputStreamException ex) { @@ -919,7 +919,7 @@ class Firefox extends Extract { continue; } - logger.log(Level.INFO, "{0}- Now getting Bookmarks from {1}", new Object[]{moduleName, temps}); //NON-NLS + logger.log(Level.INFO, "{0}- Now getting Bookmarks from {1}", new Object[]{getName(), temps}); //NON-NLS File dbFile = new File(temps); if (context.dataSourceIngestIsCancelled()) { dbFile.delete(); diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/RAImageIngestModule.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/RAImageIngestModule.java index 54474ab677..a18ced587e 100644 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/RAImageIngestModule.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/RAImageIngestModule.java @@ -23,6 +23,7 @@ package org.sleuthkit.autopsy.recentactivity; import java.io.File; +import java.nio.file.Path; import java.nio.file.Paths; import java.util.ArrayList; import java.util.List; @@ -31,6 +32,7 @@ import org.openide.util.NbBundle; import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; import org.sleuthkit.autopsy.coreutils.Logger; +import org.sleuthkit.autopsy.coreutils.TimeStampUtils; import org.sleuthkit.autopsy.ingest.DataSourceIngestModule; import org.sleuthkit.autopsy.ingest.DataSourceIngestModuleProgress; import org.sleuthkit.autopsy.ingest.IngestServices; @@ -39,18 +41,23 @@ import org.sleuthkit.autopsy.ingest.IngestMessage.MessageType; import org.sleuthkit.datamodel.Content; import org.sleuthkit.autopsy.ingest.IngestModule.ProcessResult; import org.sleuthkit.autopsy.ingest.IngestJobContext; +import org.sleuthkit.datamodel.DataSource; +import org.sleuthkit.datamodel.SleuthkitCase; /** * Recent activity image ingest module */ public final class RAImageIngestModule implements DataSourceIngestModule { + private static final String RECENT_ACTIVITY_FOLDER = "RecentActivity"; private static final Logger logger = Logger.getLogger(RAImageIngestModule.class.getName()); private final List extractors = new ArrayList<>(); private final List browserExtractors = new ArrayList<>(); private final IngestServices services = IngestServices.getInstance(); private IngestJobContext context; private final StringBuilder subCompleted = new StringBuilder(); + protected SleuthkitCase tskCase; + private RAOsAccountCache accountCache = new RAOsAccountCache(); RAImageIngestModule() { } @@ -59,15 +66,10 @@ public final class RAImageIngestModule implements DataSourceIngestModule { public void startUp(IngestJobContext context) throws IngestModuleException { this.context = context; - Extract iexplore; - Extract edge; - try { - iexplore = new ExtractIE(); - edge = new ExtractEdge(); - } catch (NoCurrentCaseException ex) { - throw new IngestModuleException(ex.getMessage(), ex); - } + tskCase = Case.getCurrentCase().getSleuthkitCase(); + Extract iexplore = new ExtractIE(); + Extract edge = new ExtractEdge(); Extract registry = new ExtractRegistry(); Extract recentDocuments = new RecentDocumentsByLnk(); Extract chrome = new Chromium(); @@ -83,23 +85,23 @@ public final class RAImageIngestModule implements DataSourceIngestModule { Extract webAccountType = new ExtractWebAccountType(); Extract messageDomainType = new DomainCategoryRunner(); + extractors.add(recentDocuments); + extractors.add(registry); // needs to run before the DataSourceUsageAnalyzer + extractors.add(osExtract); // this needs to run before the DataSourceUsageAnalyzer + extractors.add(dataSourceAnalyzer); //this needs to run after ExtractRegistry and ExtractOs extractors.add(chrome); extractors.add(firefox); extractors.add(iexplore); extractors.add(edge); extractors.add(safari); - extractors.add(recentDocuments); extractors.add(SEUQA); // this needs to run after the web browser modules extractors.add(webAccountType); // this needs to run after the web browser modules - extractors.add(registry); // this should run after quicker modules like the browser modules and needs to run before the DataSourceUsageAnalyzer - extractors.add(osExtract); // this needs to run before the DataSourceUsageAnalyzer - extractors.add(dataSourceAnalyzer); //this needs to run after ExtractRegistry and ExtractOs extractors.add(zoneInfo); // this needs to run after the web browser modules extractors.add(recycleBin); // this needs to run after ExtractRegistry and ExtractOS - extractors.add(sru); + extractors.add(sru); extractors.add(prefetch); extractors.add(messageDomainType); - + browserExtractors.add(chrome); browserExtractors.add(firefox); browserExtractors.add(iexplore); @@ -132,7 +134,10 @@ public final class RAImageIngestModule implements DataSourceIngestModule { progressBar.progress(extracter.getName(), i); try { - extracter.process(dataSource, context, progressBar); + extracter.process(dataSource, context, progressBar, accountCache); + if (extracter instanceof ExtractRegistry) { + accountCache.initialize(tskCase, ((DataSource) dataSource).getHost()); + } } catch (Exception ex) { logger.log(Level.SEVERE, "Exception occurred in " + extracter.getName(), ex); //NON-NLS subCompleted.append(NbBundle.getMessage(this.getClass(), "RAImageIngestModule.process.errModFailed", @@ -210,23 +215,39 @@ public final class RAImageIngestModule implements DataSourceIngestModule { return ProcessResult.OK; } + /** + * Makes a path of the format + * [basePath]/[RECENT_ACTIVITY_FOLDER]/[module]_[ingest job id] if it does not + * already exist and returns the created folder. + * + * @param basePath The base path (a case-related folder like temp or + * output). + * @param module The module name to include in the folder name. + * @param ingestJobId The id of the ingest job. + * @return The path to the folder. + */ + private static String getAndMakeRAPath(String basePath, String module, long ingestJobId) { + String moduleFolder = String.format("%s_%d", module, ingestJobId); + Path tmpPath = Paths.get(basePath, RECENT_ACTIVITY_FOLDER, moduleFolder); + File dir = tmpPath.toFile(); + if (dir.exists() == false) { + dir.mkdirs(); + } + return tmpPath.toString(); + } + /** * Get the temp path for a specific sub-module in recent activity. Will * create the dir if it doesn't exist. * * @param a_case Case that directory is for - * @param mod Module name that will be used for a sub folder in the temp - * folder to prevent name collisions + * @param mod Module name that will be used for a sub folder in the temp + * folder to prevent name collisions * * @return Path to directory */ - protected static String getRATempPath(Case a_case, String mod) { - String tmpDir = a_case.getTempDirectory() + File.separator + "RecentActivity" + File.separator + mod; //NON-NLS - File dir = new File(tmpDir); - if (dir.exists() == false) { - dir.mkdirs(); - } - return tmpDir; + static String getRATempPath(Case a_case, String mod, long ingestJobId) { + return getAndMakeRAPath(a_case.getTempDirectory(), mod, ingestJobId); } /** @@ -234,28 +255,24 @@ public final class RAImageIngestModule implements DataSourceIngestModule { * create the dir if it doesn't exist. * * @param a_case Case that directory is for - * @param mod Module name that will be used for a sub folder in the temp - * folder to prevent name collisions + * @param mod Module name that will be used for a sub folder in the temp + * folder to prevent name collisions * * @return Path to directory */ - protected static String getRAOutputPath(Case a_case, String mod) { - String tmpDir = a_case.getModuleDirectory() + File.separator + "RecentActivity" + File.separator + mod; //NON-NLS - File dir = new File(tmpDir); - if (dir.exists() == false) { - dir.mkdirs(); - } - return tmpDir; + static String getRAOutputPath(Case a_case, String mod, long ingestJobId) { + return getAndMakeRAPath(a_case.getModuleDirectory(), mod, ingestJobId); } - + /** * Get relative path for module output folder. * * @throws NoCurrentCaseException if there is no open case. * @return the relative path of the module output folder */ - static String getRelModuleOutputPath() throws NoCurrentCaseException { - return Paths.get(Case.getCurrentCaseThrows().getModuleOutputDirectoryRelativePath(), - "RecentActivity").normalize().toString() ; //NON-NLS + static String getRelModuleOutputPath(Case autCase, String mod, long ingestJobId) { + return Paths.get(getAndMakeRAPath(autCase.getModuleOutputDirectoryRelativePath(), mod, ingestJobId)) + .normalize() + .toString(); } } diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/RAOsAccountCache.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/RAOsAccountCache.java new file mode 100755 index 0000000000..c1206c82f6 --- /dev/null +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/RAOsAccountCache.java @@ -0,0 +1,131 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2021 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.recentactivity; + +import java.nio.file.Path; +import java.nio.file.Paths; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import org.sleuthkit.autopsy.casemodule.Case; +import org.sleuthkit.datamodel.AbstractFile; +import org.sleuthkit.datamodel.BlackboardAttribute; +import org.sleuthkit.datamodel.Host; +import org.sleuthkit.datamodel.OsAccount; +import org.sleuthkit.datamodel.OsAccount.OsAccountAttribute; +import org.sleuthkit.datamodel.SleuthkitCase; +import org.sleuthkit.datamodel.TskCoreException; + +/** + * Cache of OsAccounts for a given host to be used by the various Recent + * Activity Extractors. + * + */ +final class RAOsAccountCache { + + private final Map accountCache = new HashMap<>(); + + /** + * initialize the account map for the given host. This should be done after + * the ExtractRegistry is run. + * + * @param tskCase + * @param host + * + * @throws TskCoreException + */ + void initialize(SleuthkitCase tskCase, Host host) throws TskCoreException { + buildAccountMap(tskCase, host); + } + + /** + * Returns the appropriate OsAccount for the given file. + * + * If the file is not associated with an OsAccount, try to find one based on + * the location of the file. + * + * If the file is associated with the system account of S-1-5-32-544 use the + * file path to determine which user account to associate the file with. + * + * + * @param file The file to match with appropriate OsAccount. + * + * @return Optional OsAccount, may not be present if one is not found. + * + * @throws TskCoreException + */ + Optional getOsAccount(AbstractFile file) throws TskCoreException { + Optional optional = file.getOsAccountObjectId(); + + if (!optional.isPresent()) { + return getAccountForPath(file.getParentPath()); + } + + OsAccount osAccount = Case.getCurrentCase().getSleuthkitCase().getOsAccountManager().getOsAccountByObjectId(optional.get()); + if (osAccount.getName().equals("S-1-5-32-544")) { + return getAccountForPath(file.getParentPath()); + } + + return Optional.ofNullable(osAccount); + } + + /** + * Return a user account if the given path's parent directory is a user + * account home directory. + * + * @param path Path to search. + * + * @return An Optional OsAccount if one was found. + */ + private Optional getAccountForPath(String path) { + Path filePath = Paths.get(path.toLowerCase()); + // Check if the path might be a user path. + if (filePath.startsWith(Paths.get("/users")) || filePath.startsWith("/document and settings")) { + for (String key : accountCache.keySet()) { + if (filePath.startsWith(Paths.get(key))) { + return Optional.of(accountCache.get(key)); + } + } + } + return Optional.empty(); + } + + /** + * Build a map of user home directories to OsAccounts for the given host. + * + * @throws TskCoreException + */ + private void buildAccountMap(SleuthkitCase tskCase, Host host) throws TskCoreException { + BlackboardAttribute.Type homeDir = new BlackboardAttribute.Type(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_HOME_DIR); + List accounts = tskCase.getOsAccountManager().getOsAccounts(host); + + for (OsAccount account : accounts) { + List attributeList = account.getExtendedOsAccountAttributes(); + + for (OsAccountAttribute attribute : attributeList) { + if (attribute.getHostId().isPresent() + && attribute.getHostId().get().equals(host.getHostId()) + && attribute.getAttributeType().equals(homeDir)) { + accountCache.put(attribute.getValueString(), account); + } + } + } + } +} diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/RecentDocumentsByLnk.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/RecentDocumentsByLnk.java index 32341727cb..b0c119ae25 100644 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/RecentDocumentsByLnk.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/RecentDocumentsByLnk.java @@ -2,7 +2,7 @@ * * Autopsy Forensic Browser * - * Copyright 2012-2014 Basis Technology Corp. + * Copyright 2012-2021 Basis Technology Corp. * * Copyright 2012 42six Solutions. * Contact: aebadirad 42six com @@ -23,7 +23,6 @@ package org.sleuthkit.autopsy.recentactivity; import java.util.ArrayList; -import java.util.Arrays; import java.util.List; import java.util.logging.Level; import org.apache.commons.io.FilenameUtils; @@ -44,8 +43,6 @@ import org.sleuthkit.datamodel.Content; import org.sleuthkit.datamodel.TskCoreException; import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.ReadContentInputStream; -import static org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE.TSK_ASSOCIATED_OBJECT; -import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_ASSOCIATED_ARTIFACT; import org.sleuthkit.datamodel.TskData; /** @@ -126,13 +123,17 @@ class RecentDocumentsByLnk extends Extract { NbBundle.getMessage(this.getClass(), "RecentDocumentsByLnk.parentModuleName.noSpace"), recentFile.getCrtime())); - BlackboardArtifact bba = createArtifactWithAttributes(ARTIFACT_TYPE.TSK_RECENT_OBJECT, recentFile, bbattributes); - if(bba != null) { - bbartifacts.add(bba); - bba = createAssociatedArtifact(path, bba); - if (bba != null) { + try{ + BlackboardArtifact bba = createArtifactWithAttributes(ARTIFACT_TYPE.TSK_RECENT_OBJECT, recentFile, bbattributes); + if(bba != null) { bbartifacts.add(bba); + bba = createAssociatedArtifact(path, bba); + if (bba != null) { + bbartifacts.add(bba); + } } + } catch(TskCoreException ex) { + logger.log(Level.SEVERE, String.format("Failed to create TSK_RECENT_OBJECT artifact for file %d", recentFile.getId()), ex); } } @@ -160,19 +161,11 @@ class RecentDocumentsByLnk extends Extract { sourceFiles = fileManager.findFiles(dataSource, fileName, filePath); //NON-NLS for (AbstractFile sourceFile : sourceFiles) { if (sourceFile.getParentPath().endsWith(filePath)) { - Collection bbattributes2 = new ArrayList<>(); - bbattributes2.addAll(Arrays.asList( - new BlackboardAttribute(TSK_ASSOCIATED_ARTIFACT, this.getName(), - bba.getArtifactID()))); - - BlackboardArtifact associatedObjectBba = createArtifactWithAttributes(TSK_ASSOCIATED_OBJECT, sourceFile, bbattributes2); - if (associatedObjectBba != null) { - return associatedObjectBba; - } + return createAssociatedArtifact(sourceFile, bba); } } } catch (TskCoreException ex) { - logger.log(Level.WARNING, String.format("Error finding actual file %s. file may not exist", filePathName)); //NON-NLS + logger.log(Level.WARNING, String.format("Error finding actual file %s. file may not exist", filePathName), ex); //NON-NLS } return null; diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/SearchEngineURLQueryAnalyzer.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/SearchEngineURLQueryAnalyzer.java index 4166120823..b57717e648 100644 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/SearchEngineURLQueryAnalyzer.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/SearchEngineURLQueryAnalyzer.java @@ -80,7 +80,7 @@ class SearchEngineURLQueryAnalyzer extends Extract { private IngestJobContext context; SearchEngineURLQueryAnalyzer() { - moduleName = NbBundle.getMessage(ExtractIE.class, "SearchEngineURLQueryAnalyzer.moduleName.text"); + super(NbBundle.getMessage(ExtractIE.class, "SearchEngineURLQueryAnalyzer.moduleName.text")); } /** diff --git a/apidiff.py b/apidiff.py new file mode 100644 index 0000000000..3b69d6f59a --- /dev/null +++ b/apidiff.py @@ -0,0 +1,295 @@ +""" +Generates an api diff from one commit to another. This script relies on gitpython and similarly require git +installed on the system. This script also requires python 3. + +This script can be called as follows: + +python apidiff.py -r -o + +If the '-o' flag is not specified, this script will create a folder at apidiff_output in the same directory as the +script. For full list of options call: + +python apidiff.py -h +""" + +import os +import subprocess +import sys +import time +from pathlib import Path +from typing import Tuple, Iterator, List + +import argparse as argparse +from git import Repo, Blob, Tree + +""" +These are exit codes for jdiff: +return code 1 = error in jdiff +return code 100 = no changes +return code 101 = compatible changes +return code 102 = incompatible changes +""" +NO_CHANGES = 100 +COMPATIBLE = 101 +NON_COMPATIBLE = 102 +ERROR = 1 + + +def compare_xml(jdiff_path: str, root_dir: str, output_folder: str, oldapi_folder: str, + newapi_folder: str, api_file_name: str, log_path: str) -> int: + """ + Compares xml generated by jdiff using jdiff. + :param jdiff_path: Path to jdiff jar. + :param root_dir: directory for output . + :param output_folder: Folder for diff output. + :param oldapi_folder: Folder name of old api (i.e. release-4.10.2). + :param newapi_folder: Folder name of new api (i.e. release-4.10.2). + :param api_file_name: Name of xml file name (i.e. if output.xml, just 'output') + :param log_path: Path to log file. + :return: jdiff exit code. + """ + jdiff_parent = os.path.dirname(jdiff_path) + + null_file = fix_path(os.path.join(jdiff_parent, "lib", "Null.java")) + + # comments are expected in a specific place + make_dir(os.path.join(root_dir, + output_folder, + f"user_comments_for_{oldapi_folder}", + f"{api_file_name}_to_{newapi_folder}")) + + log = open(log_path, "w") + cmd = ["javadoc", + "-doclet", "jdiff.JDiff", + "-docletpath", fix_path(jdiff_path), + "-d", fix_path(output_folder), + "-oldapi", fix_path(os.path.join(oldapi_folder, api_file_name)), + "-newapi", fix_path(os.path.join(newapi_folder, api_file_name)), + "-script", + null_file] + + code = None + try: + jdiff = subprocess.Popen(cmd, stdout=log, stderr=log, cwd=root_dir) + jdiff.wait() + code = jdiff.returncode + except Exception as e: + log_and_print(log, f"Error executing javadoc: {str(e)}\nExiting...") + exit(1) + log.close() + + print(f"Compared XML for {oldapi_folder} {newapi_folder}") + if code == NO_CHANGES: + print(" No API changes") + elif code == COMPATIBLE: + print(" API Changes are backwards compatible") + elif code == NON_COMPATIBLE: + print(" API Changes are not backwards compatible") + else: + print(" *Error in XML, most likely an empty module") + sys.stdout.flush() + return code + + +def gen_xml(jdiff_path: str, output_path: str, log_output_path: str, src: str, packages: List[str]): + """ + Uses jdiff to generate an xml representation of the source code. + :param jdiff_path: Path to jdiff jar. + :param output_path: Path to output path of diff. + :param log_output_path: The log output path. + :param src: The path to the source code. + :param packages: The packages to process. + """ + make_dir(output_path) + + log = open_log_file(log_output_path) + log_and_print(log, f"Generating XML for: {src} outputting to: {output_path}") + cmd = ["javadoc", + "-doclet", "jdiff.JDiff", + "-docletpath", fix_path(jdiff_path), + "-apiname", fix_path(output_path), + "-sourcepath", fix_path(src)] + cmd = cmd + packages + try: + jdiff = subprocess.Popen(cmd, stdout=log, stderr=log) + jdiff.wait() + except Exception as e: + log_and_print(log, f"Error executing javadoc {str(e)}\nExiting...") + exit(1) + + log_and_print(log, f"Generated XML for: " + str(packages)) + log.close() + sys.stdout.flush() + + +def _list_paths(root_tree: Tree, src_folder, path: Path = None) -> Iterator[Tuple[str, Blob]]: + """ + Given the root path to serve as a prefix, walks the tree of a git commit returning all files and blobs. + Repurposed from: https://www.enricozini.org/blog/2019/debian/gitpython-list-all-files-in-a-git-commit/ + Args: + root_tree: The tree of the commit to walk. + src_folder: relative path in repo to source folder that will be copied. + path: The path to use as a prefix. + Returns: A tuple iterator where each tuple consists of the path as a string and a blob of the file. + """ + for blob in root_tree.blobs: + next_path = Path(path) / blob.name if path else blob.name + if Path(src_folder) in Path(next_path).parents: + ret_item = (next_path, blob) + yield ret_item + for tree in root_tree.trees: + next_path = Path(path) / tree.name if path else tree.name + yield from _list_paths(tree, src_folder, next_path) + + +def _get_tree(repo_path: str, commit_id: str) -> Tree: + """ + Retrieves the git tree that can be walked for files and file content at the specified commit. + Args: + repo_path: The path to the repo or a child directory of the repo. + commit_id: The commit id. + Returns: The tree. + """ + repo = Repo(repo_path, search_parent_directories=True) + commit = repo.commit(commit_id.strip()) + return commit.tree + + +def copy_commit_paths(repo_path, commit_id, src_folder, output_folder): + """ + Copies all files located within a repo in the folder 'src_folder' to 'output_folder'. + :param repo_path: The path to the repo. + :param commit_id: The commit id. + :param src_folder: The relative path in the repo to the source folder. + :param output_folder: The output folder where the source will be copied. + """ + tree = _get_tree(repo_path, commit_id) + for rel_path, blob in _list_paths(tree, src_folder): + output_path = os.path.join(output_folder, os.path.relpath(rel_path, src_folder)) + parent_folder = os.path.dirname(output_path) + make_dir(parent_folder) + output_file = open(output_path, 'w') + output_file.write(blob.data_stream.read().decode('utf-8')) + output_file.close() + + +def open_log_file(log_path): + """ + Opens a path to a lof file for appending. Creating directories and log file as necessary. + :param log_path: The path to the log file. + :return: The log file opened for writing. + """ + if not os.path.exists(log_path): + make_dir(os.path.dirname(log_path)) + Path(log_path).touch() + + return open(log_path, 'a+') + + +def fix_path(path): + """ + Generates a path that is escaped from cygwin paths if present. + :param path: Path (possibly including cygdrive). + :return: The normalized path. + """ + if "cygdrive" in path: + new_path = path[11:] + return "C:/" + new_path + else: + return path + + +def log_and_print(log, message): + """ + Creates a log entry and prints to stdout. + :param log: The log file object. + :param message: The string to be printed. + """ + time_stamp = time.strftime('%Y-%m-%d %H:%M:%S') + print(f"{time_stamp}: {message}") + log.write(f"{time_stamp}: {message}\n") + + +def make_dir(dir_path: str): + """ + Create the given directory, if it doesn't already exist. + :param dir_path: The path to the directory. + :return: True if created. + """ + try: + if not os.path.isdir(dir_path): + os.makedirs(dir_path) + if os.path.isdir(dir_path): + return True + return False + except IOError: + print("Exception thrown when creating directory: " + dir_path) + return False + + +def run_compare(output_path: str, jdiff_path: str, repo_path: str, src_rel_path: str, prev_commit_id: str, + latest_commit_id: str, packages: List[str]): + """ + Runs a comparison of the api between two different commits/branches/tags of the same repo generating a jdiff diff. + :param output_path: The output path for artifacts. + :param jdiff_path: The path to the jdiff jar. + :param repo_path: The path to the repo. + :param src_rel_path: The relative path in the repo to the source directory. + :param prev_commit_id: The previous commit/branch/tag id. + :param latest_commit_id: The latest commit/branch/tag id. + :param packages: The packages to be considered for the api diff. + """ + log_path = os.path.join(output_path, "messages.log") + output_file_name = "output" + diff_dir = "diff" + src_folder = "src" + + for commit_id in [prev_commit_id, latest_commit_id]: + src_copy = os.path.join(output_path, src_folder, commit_id) + copy_commit_paths(repo_path, commit_id, src_rel_path, src_copy) + gen_xml(jdiff_path, os.path.join(output_path, commit_id, output_file_name), log_path, src_copy, packages) + + # compare the two + compare_xml(jdiff_path, output_path, os.path.join(output_path, diff_dir), + prev_commit_id, latest_commit_id, output_file_name, log_path) + + +def main(): + parser = argparse.ArgumentParser(description="Generates a jdiff diff of the java api between two commits in a " + "repo.", + formatter_class=argparse.ArgumentDefaultsHelpFormatter) + parser.add_argument(dest='prev_commit', type=str, help=r'The git commit id/branch/tag to be used for the first ' + r'commit') + parser.add_argument(dest='latest_commit', type=str, help=r'The git commit id/branch/tag to be used for the latest ' + r'commit') + parser.add_argument('-r', '--repo', dest='repo_path', type=str, required=True, + help='The path to the repo. If not specified, path of script is used.') + + parser.add_argument('-o', '--output', dest='output_path', type=str, required=False, + help='The location for output of all artifacts. Defaults to an output folder in same directory' + 'as script') + parser.add_argument('-s', '--src', dest='src_rel_folder', type=str, required=False, default="bindings/java/src", + help='The relative path within the repo of the src folder.') + # list of packages can be specified like this: + # https://stackoverflow.com/questions/15753701/how-can-i-pass-a-list-as-a-command-line-argument-with-argparse + parser.add_argument('-p', '--packages', dest='packages', nargs='+', required=False, + default=["org.sleuthkit.datamodel"], help='The packages to consider in api diff.') + parser.add_argument('-j', '--jdiff', dest='jdiff_path', type=str, required=False, + help='The packages to consider in api diff.') + + args = parser.parse_args() + script_path = os.path.dirname(os.path.realpath(__file__)) + repo_path = args.repo_path if args.repo_path else script_path + output_path = args.output_path if args.output_path else os.path.join(script_path, "apidiff_output") + jdiff_path = args.jdiff_path if args.jdiff_path else os.path.join(script_path, + "thirdparty/jdiff/v-custom/jdiff.jar") + run_compare(output_path=output_path, + jdiff_path=jdiff_path, + repo_path=repo_path, + packages=args.packages, + src_rel_path=args.src_rel_folder, + prev_commit_id=args.prev_commit, + latest_commit_id=args.latest_commit) + + +main() diff --git a/build-windows-installer.xml b/build-windows-installer.xml index e07d110568..5bf9bdb67a 100644 --- a/build-windows-installer.xml +++ b/build-windows-installer.xml @@ -173,7 +173,7 @@ - + diff --git a/docs/doxygen-user/configuration.dox b/docs/doxygen-user/configuration.dox index e3ef07a2ac..271e359207 100644 --- a/docs/doxygen-user/configuration.dox +++ b/docs/doxygen-user/configuration.dox @@ -14,11 +14,16 @@ The first tab on the options panel is for general application settings. \image html options_application.png -The top section lets you adjust how much memory is used by Autopsy and how many log files to keep. Generally each Autopsy session generates one log file, though it can generate more if the log file becomes too large. +The top section lets you adjust how much memory is used by Autopsy and how many log files to keep. Generally each Autopsy session generates one log file, though it can generate more if the log file becomes too large. You can also specify a custom location to write heap dumps to. -The next section lets you specify where Autopsy should store temporary files. These files will be deleted when a case is closed. +The next section lets you specify where Autopsy should store temporary files. These files will be deleted when a case is closed. There are three options: +
    +
  • Local temp directory - Uses the system temp folder (On Windows, typically C:\\Users\\(user name)\\AppData\\Local\\Temp\\Autopsy) +
  • Temp folder in case directory - Puts temporariy files in the "temp" directory in the case folder +
  • Custom - Will use the given folder as a base for the temporary files +
-The final section lets you set a custom logo. +The next section lets you set a custom logo. \image html options_logo.png @@ -26,6 +31,8 @@ This logo will be displayed in any generated \ref report_html "HTML reports". \image html options_logo_report.jpg +The final section lists instructions on how to change scaling for high DPI Windows systems. + \section config_view View Options See the \ref view_options_page page for a description of how you can customize what data is displayed in Autopsy. diff --git a/docs/doxygen-user/content_viewer.dox b/docs/doxygen-user/content_viewer.dox index 8a717e9c2d..be5d5b5a33 100644 --- a/docs/doxygen-user/content_viewer.dox +++ b/docs/doxygen-user/content_viewer.dox @@ -75,23 +75,17 @@ Registry hive files can be viewed in a format similar to a registry editor. \image html content_viewer_registry.png -\section cv_message Message - -The Message tab shows details of emails and SMS messages. - -\image html content_viewer_message.png - \section cv_metadata File Metadata The File Metadata tab displays basic information about the file, such as type, size, and hash. It also displays the output of the Sleuth Kit istat tool. \image html content_viewer_metadata.png -\section cv_context Context +\section cv_os_account OS Accounts -The Context tab shows information on where a file came from and allows you to navigate to the original result. For example, it can show the the URL for downloaded files and the email message a file was attached to. In the image below you can see the context for an image that was sent as an email attachment. +The OS Accounts tab displays information on the OS account associated with a given result, if present. It is also used to give details on accounts listed under the OS Accounts node in the tree. -\image html content_viewer_context.png +\image html content_viewer_os_account.png \section cv_results Results @@ -101,6 +95,12 @@ The Results tab is active when selecting items with associated results such as k
\image html content_viewer_results_bookmark.png +\section cv_context Context + +The Context tab shows information on where a file came from and allows you to navigate to the original result. For example, it can show the the URL for downloaded files and the email message a file was attached to. In the image below you can see the context for an image that was sent as an email attachment. + +\image html content_viewer_context.png + \section cv_annotations Annotations The Annotations tab shows information added by an analyst about a file or result. It displays any tags and comments associated with the file or result, and if the \ref central_repo_page is enabled it will also display any comments saved to the Central Repository. diff --git a/docs/doxygen-user/data_source_summary.dox b/docs/doxygen-user/data_source_summary.dox index 459dd7f611..01befa9a2b 100644 --- a/docs/doxygen-user/data_source_summary.dox +++ b/docs/doxygen-user/data_source_summary.dox @@ -80,4 +80,10 @@ The Container tab displays information on the data source itself, such as the si \image html ds_summary_container.png +\subsection ds_summary_export Export + +The Export tab allows you to export the contents of the other data source summary tabs to an Excel-formatted file. + +\image html ds_summary_export.png + */ \ No newline at end of file diff --git a/docs/doxygen-user/data_sources.dox b/docs/doxygen-user/data_sources.dox index f8aefc63c8..81336e74ab 100644 --- a/docs/doxygen-user/data_sources.dox +++ b/docs/doxygen-user/data_sources.dox @@ -25,6 +25,18 @@ The data source must remain accessible for the duration of the analysis because Regardless of the type of data source, there are some common steps in the process:
    + +
  1. You will choose the host for the data source you are going to add. See the \ref host_page "hosts page" for more information about hosts. + +\image html data_source_host_select.png + +There are three options: +
      +
    • Generate new host based on data source name - this will typically create a host with a name similar to your data source with the ID used in the database appended for uniqueness. +
    • Specify new host name - this allows you to enter a host name. +
    • Use existing host - this allows you to choose a host name already in use in the current case. +
    +
  2. You will select the type of data source. \image html select-data-source-type.PNG diff --git a/docs/doxygen-user/file_discovery.dox b/docs/doxygen-user/file_discovery.dox index 08ff568d9e..fdf700a758 100644 --- a/docs/doxygen-user/file_discovery.dox +++ b/docs/doxygen-user/file_discovery.dox @@ -13,7 +13,7 @@ We suggest running all \ref ingest_page "ingest modules" before launching discov Required ingest modules:
    • \ref file_type_identification_page for image, video, and document searches -
    • \ref recent_activity_page or \ref ileapp_page for domain searches +
    • \ref recent_activity_page or one of the mobile parsers (\ref android_analyzer_page, \ref ileapp_page, \ref aleapp_page) for domain searches
    Optional ingest modules: @@ -50,7 +50,7 @@ The first step is choosing whether you want to display images, videos, documents \subsection file_disc_filtering Filtering -The second step is to select and configure your filters. The available filters will vary depending on the result type. For most filters, you enable them using the checkbox on the left and then select your options. Multiple options can be selected by using CTRL + left click. Results must pass all enabled filters to be displayed. +The second step is to select and configure your filters. The available filters will vary depending on the result type. For most filters, you enable them using the checkbox on the left and then select the checkboxes next to the options you want to be enabled. The "Check All" and "Uncheck All" buttons can be used to check or uncheck all options in the list. Results must pass all enabled filters to be displayed. \subsubsection file_disc_size_filter File Size Filter @@ -132,7 +132,7 @@ The previously notable filter is for domain searches only and is used to restric \subsubsection file_disc_known_account_filter Known Account Type Filter -The previously notable filter is for domain searches only and is used to restrict results to only those domains that have a known account type. +The known account type filter is for domain searches only and is used to restrict results to only those domains that have a known account type. \image html FileDiscovery/fd_knownAccountFilter.png diff --git a/docs/doxygen-user/hosts.dox b/docs/doxygen-user/hosts.dox new file mode 100644 index 0000000000..1b3ff6f4e2 --- /dev/null +++ b/docs/doxygen-user/hosts.dox @@ -0,0 +1,50 @@ +/*! \page host_page Hosts + + +[TOC] + +\section host_use Using Hosts + +\subsection host_wizard Associating a Data Source With a Host + +Every data source must be associated with a host. The first step in the \ref ds_add "add data source process" is to select a host for the data source you are about to add to the case. This host can be auto-generated, entered by the user, or selected from the list of hosts already present in the case. + +\image html data_source_host_select.png + +\subsection host_view Viewing Hosts + +Hosts are displayed in the \ref tree_viewer_page. Depending on the \ref view_options_page selected, hosts may be grouped together under persons. + +\image html ui_tree_top_ds.png + +\subsection host_os_accounts OS Accounts + +OS accounts can be viewed in the OS Accounts node under Results. Each OS account is associated with a host, and the host information is displayed in the OS Account tab of the content viewer. + +\image html host_os_accounts.png + +\section host_management Managing Hosts + +\subsection host_menu Manage Hosts Menu + +Go to Case->Manage Hosts to open the host management panel. + +\image html manage_hosts.png + +Here you can see all hosts in the case, add new hosts, change the name of an existing host, and delete hosts that are not in use. + +\subsection host_merge Merging Hosts + +Over the course of processing a case, it may become clear that two (or more) hosts should be combined. Merging one host into another will move all data sources from the source host into the destination host and move or combine any OS accounts found. + + +To merge hosts, right-click on the host you want to merge into another host. + +\image html host_merge.png + +A confirmation dialog will display stating that this can not be undone. After proceeding, the hosts will be merged together and the tree viewer node will update showing the combined data. + +\image html host_merge_result.png + + +*/ \ No newline at end of file diff --git a/docs/doxygen-user/images/DataSourceSummary/ds_summary_analysis.png b/docs/doxygen-user/images/DataSourceSummary/ds_summary_analysis.png index a1e6bfb842..4232c8f434 100644 Binary files a/docs/doxygen-user/images/DataSourceSummary/ds_summary_analysis.png and b/docs/doxygen-user/images/DataSourceSummary/ds_summary_analysis.png differ diff --git a/docs/doxygen-user/images/DataSourceSummary/ds_summary_container.png b/docs/doxygen-user/images/DataSourceSummary/ds_summary_container.png index 3d51efee73..328b233dc3 100644 Binary files a/docs/doxygen-user/images/DataSourceSummary/ds_summary_container.png and b/docs/doxygen-user/images/DataSourceSummary/ds_summary_container.png differ diff --git a/docs/doxygen-user/images/DataSourceSummary/ds_summary_export.png b/docs/doxygen-user/images/DataSourceSummary/ds_summary_export.png new file mode 100644 index 0000000000..da7601f32e Binary files /dev/null and b/docs/doxygen-user/images/DataSourceSummary/ds_summary_export.png differ diff --git a/docs/doxygen-user/images/DataSourceSummary/ds_summary_geo.png b/docs/doxygen-user/images/DataSourceSummary/ds_summary_geo.png index 4ab4440b55..7e49cac8c3 100644 Binary files a/docs/doxygen-user/images/DataSourceSummary/ds_summary_geo.png and b/docs/doxygen-user/images/DataSourceSummary/ds_summary_geo.png differ diff --git a/docs/doxygen-user/images/DataSourceSummary/ds_summary_ingest.png b/docs/doxygen-user/images/DataSourceSummary/ds_summary_ingest.png index d3a1a970a9..6ed19b77c0 100644 Binary files a/docs/doxygen-user/images/DataSourceSummary/ds_summary_ingest.png and b/docs/doxygen-user/images/DataSourceSummary/ds_summary_ingest.png differ diff --git a/docs/doxygen-user/images/DataSourceSummary/ds_summary_past_cases.png b/docs/doxygen-user/images/DataSourceSummary/ds_summary_past_cases.png index 3b47578b0e..4ac2623e6e 100644 Binary files a/docs/doxygen-user/images/DataSourceSummary/ds_summary_past_cases.png and b/docs/doxygen-user/images/DataSourceSummary/ds_summary_past_cases.png differ diff --git a/docs/doxygen-user/images/DataSourceSummary/ds_summary_recent_files.png b/docs/doxygen-user/images/DataSourceSummary/ds_summary_recent_files.png index 3eeaeffd89..c9edf412ff 100644 Binary files a/docs/doxygen-user/images/DataSourceSummary/ds_summary_recent_files.png and b/docs/doxygen-user/images/DataSourceSummary/ds_summary_recent_files.png differ diff --git a/docs/doxygen-user/images/DataSourceSummary/ds_summary_timeline.png b/docs/doxygen-user/images/DataSourceSummary/ds_summary_timeline.png index 1a376723d6..891802da42 100644 Binary files a/docs/doxygen-user/images/DataSourceSummary/ds_summary_timeline.png and b/docs/doxygen-user/images/DataSourceSummary/ds_summary_timeline.png differ diff --git a/docs/doxygen-user/images/DataSourceSummary/ds_summary_types.png b/docs/doxygen-user/images/DataSourceSummary/ds_summary_types.png index 2bf93d3f90..b8dca4a984 100644 Binary files a/docs/doxygen-user/images/DataSourceSummary/ds_summary_types.png and b/docs/doxygen-user/images/DataSourceSummary/ds_summary_types.png differ diff --git a/docs/doxygen-user/images/DataSourceSummary/ds_summary_user_activity.png b/docs/doxygen-user/images/DataSourceSummary/ds_summary_user_activity.png index 7b6d4d09f5..089345f85b 100644 Binary files a/docs/doxygen-user/images/DataSourceSummary/ds_summary_user_activity.png and b/docs/doxygen-user/images/DataSourceSummary/ds_summary_user_activity.png differ diff --git a/docs/doxygen-user/images/DataSourceSummary/ds_summary_window.png b/docs/doxygen-user/images/DataSourceSummary/ds_summary_window.png index a9d61e9084..547f42fab8 100644 Binary files a/docs/doxygen-user/images/DataSourceSummary/ds_summary_window.png and b/docs/doxygen-user/images/DataSourceSummary/ds_summary_window.png differ diff --git a/docs/doxygen-user/images/FileDiscovery/fd_dataSourceFilter.png b/docs/doxygen-user/images/FileDiscovery/fd_dataSourceFilter.png index 9c42198839..fed4932bf0 100644 Binary files a/docs/doxygen-user/images/FileDiscovery/fd_dataSourceFilter.png and b/docs/doxygen-user/images/FileDiscovery/fd_dataSourceFilter.png differ diff --git a/docs/doxygen-user/images/FileDiscovery/fd_domainResultFilter.png b/docs/doxygen-user/images/FileDiscovery/fd_domainResultFilter.png index e1227105ee..34ad45439d 100644 Binary files a/docs/doxygen-user/images/FileDiscovery/fd_domainResultFilter.png and b/docs/doxygen-user/images/FileDiscovery/fd_domainResultFilter.png differ diff --git a/docs/doxygen-user/images/FileDiscovery/fd_domains.png b/docs/doxygen-user/images/FileDiscovery/fd_domains.png index f86e07251c..88df1ccb17 100644 Binary files a/docs/doxygen-user/images/FileDiscovery/fd_domains.png and b/docs/doxygen-user/images/FileDiscovery/fd_domains.png differ diff --git a/docs/doxygen-user/images/FileDiscovery/fd_fileSizeFilter.png b/docs/doxygen-user/images/FileDiscovery/fd_fileSizeFilter.png index 1e2bdb7f84..d352f941a4 100644 Binary files a/docs/doxygen-user/images/FileDiscovery/fd_fileSizeFilter.png and b/docs/doxygen-user/images/FileDiscovery/fd_fileSizeFilter.png differ diff --git a/docs/doxygen-user/images/FileDiscovery/fd_hashSetFilter.png b/docs/doxygen-user/images/FileDiscovery/fd_hashSetFilter.png index 8c05e72524..b8d9a1f3fd 100644 Binary files a/docs/doxygen-user/images/FileDiscovery/fd_hashSetFilter.png and b/docs/doxygen-user/images/FileDiscovery/fd_hashSetFilter.png differ diff --git a/docs/doxygen-user/images/FileDiscovery/fd_interestingItemsFilter.png b/docs/doxygen-user/images/FileDiscovery/fd_interestingItemsFilter.png index 4362a904d9..df3d443575 100644 Binary files a/docs/doxygen-user/images/FileDiscovery/fd_interestingItemsFilter.png and b/docs/doxygen-user/images/FileDiscovery/fd_interestingItemsFilter.png differ diff --git a/docs/doxygen-user/images/FileDiscovery/fd_objectFilter.png b/docs/doxygen-user/images/FileDiscovery/fd_objectFilter.png index 42efcee8fa..fae7f9a3ce 100644 Binary files a/docs/doxygen-user/images/FileDiscovery/fd_objectFilter.png and b/docs/doxygen-user/images/FileDiscovery/fd_objectFilter.png differ diff --git a/docs/doxygen-user/images/FileDiscovery/fd_pastOccur.png b/docs/doxygen-user/images/FileDiscovery/fd_pastOccur.png index 9b04f882c9..5f816cc4f8 100644 Binary files a/docs/doxygen-user/images/FileDiscovery/fd_pastOccur.png and b/docs/doxygen-user/images/FileDiscovery/fd_pastOccur.png differ diff --git a/docs/doxygen-user/images/FileDiscovery/fd_setup.png b/docs/doxygen-user/images/FileDiscovery/fd_setup.png index ee4fe27c1e..4b73514a9f 100644 Binary files a/docs/doxygen-user/images/FileDiscovery/fd_setup.png and b/docs/doxygen-user/images/FileDiscovery/fd_setup.png differ diff --git a/docs/doxygen-user/images/activeMQ_node_cleanup.png b/docs/doxygen-user/images/activeMQ_node_cleanup.png index 739d6e6ad7..3df88436b5 100644 Binary files a/docs/doxygen-user/images/activeMQ_node_cleanup.png and b/docs/doxygen-user/images/activeMQ_node_cleanup.png differ diff --git a/docs/doxygen-user/images/content_viewer_annotations.png b/docs/doxygen-user/images/content_viewer_annotations.png index aa37590379..445b96372e 100644 Binary files a/docs/doxygen-user/images/content_viewer_annotations.png and b/docs/doxygen-user/images/content_viewer_annotations.png differ diff --git a/docs/doxygen-user/images/content_viewer_app_image.png b/docs/doxygen-user/images/content_viewer_app_image.png index c3ba7a0052..50bfe7473d 100644 Binary files a/docs/doxygen-user/images/content_viewer_app_image.png and b/docs/doxygen-user/images/content_viewer_app_image.png differ diff --git a/docs/doxygen-user/images/content_viewer_app_plist.png b/docs/doxygen-user/images/content_viewer_app_plist.png index 815b5d2ed9..749a5d4f93 100644 Binary files a/docs/doxygen-user/images/content_viewer_app_plist.png and b/docs/doxygen-user/images/content_viewer_app_plist.png differ diff --git a/docs/doxygen-user/images/content_viewer_app_sqlite.png b/docs/doxygen-user/images/content_viewer_app_sqlite.png index 9bc708593f..b377395cf1 100644 Binary files a/docs/doxygen-user/images/content_viewer_app_sqlite.png and b/docs/doxygen-user/images/content_viewer_app_sqlite.png differ diff --git a/docs/doxygen-user/images/content_viewer_context.png b/docs/doxygen-user/images/content_viewer_context.png index 50953c6d56..758d1f2aed 100644 Binary files a/docs/doxygen-user/images/content_viewer_context.png and b/docs/doxygen-user/images/content_viewer_context.png differ diff --git a/docs/doxygen-user/images/content_viewer_hex.png b/docs/doxygen-user/images/content_viewer_hex.png index 0843548450..ec68e2a521 100644 Binary files a/docs/doxygen-user/images/content_viewer_hex.png and b/docs/doxygen-user/images/content_viewer_hex.png differ diff --git a/docs/doxygen-user/images/content_viewer_html.png b/docs/doxygen-user/images/content_viewer_html.png index 4a24941951..f42feba21d 100644 Binary files a/docs/doxygen-user/images/content_viewer_html.png and b/docs/doxygen-user/images/content_viewer_html.png differ diff --git a/docs/doxygen-user/images/content_viewer_indexed_text.png b/docs/doxygen-user/images/content_viewer_indexed_text.png index e25cbee5c5..11e4da475f 100644 Binary files a/docs/doxygen-user/images/content_viewer_indexed_text.png and b/docs/doxygen-user/images/content_viewer_indexed_text.png differ diff --git a/docs/doxygen-user/images/content_viewer_metadata.png b/docs/doxygen-user/images/content_viewer_metadata.png index fd38248776..67ed3e0186 100644 Binary files a/docs/doxygen-user/images/content_viewer_metadata.png and b/docs/doxygen-user/images/content_viewer_metadata.png differ diff --git a/docs/doxygen-user/images/content_viewer_os_account.png b/docs/doxygen-user/images/content_viewer_os_account.png new file mode 100644 index 0000000000..74bb53c366 Binary files /dev/null and b/docs/doxygen-user/images/content_viewer_os_account.png differ diff --git a/docs/doxygen-user/images/content_viewer_other_occurrences.png b/docs/doxygen-user/images/content_viewer_other_occurrences.png index c8a69e15d2..cb23535502 100644 Binary files a/docs/doxygen-user/images/content_viewer_other_occurrences.png and b/docs/doxygen-user/images/content_viewer_other_occurrences.png differ diff --git a/docs/doxygen-user/images/content_viewer_registry.png b/docs/doxygen-user/images/content_viewer_registry.png index ee45181c85..1d48467916 100644 Binary files a/docs/doxygen-user/images/content_viewer_registry.png and b/docs/doxygen-user/images/content_viewer_registry.png differ diff --git a/docs/doxygen-user/images/content_viewer_results_bookmark.png b/docs/doxygen-user/images/content_viewer_results_bookmark.png index 7dbc61bc7a..2c0d3cb736 100644 Binary files a/docs/doxygen-user/images/content_viewer_results_bookmark.png and b/docs/doxygen-user/images/content_viewer_results_bookmark.png differ diff --git a/docs/doxygen-user/images/content_viewer_results_call.png b/docs/doxygen-user/images/content_viewer_results_call.png index fa554e873a..3bc104af0a 100644 Binary files a/docs/doxygen-user/images/content_viewer_results_call.png and b/docs/doxygen-user/images/content_viewer_results_call.png differ diff --git a/docs/doxygen-user/images/content_viewer_strings_cyrillic.png b/docs/doxygen-user/images/content_viewer_strings_cyrillic.png index 0318a383d6..e9c1945e7f 100644 Binary files a/docs/doxygen-user/images/content_viewer_strings_cyrillic.png and b/docs/doxygen-user/images/content_viewer_strings_cyrillic.png differ diff --git a/docs/doxygen-user/images/content_viewer_strings_latin.png b/docs/doxygen-user/images/content_viewer_strings_latin.png index ba4403e518..4fbf4dd576 100644 Binary files a/docs/doxygen-user/images/content_viewer_strings_latin.png and b/docs/doxygen-user/images/content_viewer_strings_latin.png differ diff --git a/docs/doxygen-user/images/content_viewer_video.png b/docs/doxygen-user/images/content_viewer_video.png index f3a26ef8f6..09d19bdfde 100644 Binary files a/docs/doxygen-user/images/content_viewer_video.png and b/docs/doxygen-user/images/content_viewer_video.png differ diff --git a/docs/doxygen-user/images/custom_web_categories.png b/docs/doxygen-user/images/custom_web_categories.png new file mode 100644 index 0000000000..a49a1d2852 Binary files /dev/null and b/docs/doxygen-user/images/custom_web_categories.png differ diff --git a/docs/doxygen-user/images/custom_web_categories_results.png b/docs/doxygen-user/images/custom_web_categories_results.png new file mode 100644 index 0000000000..68cc44bbfd Binary files /dev/null and b/docs/doxygen-user/images/custom_web_categories_results.png differ diff --git a/docs/doxygen-user/images/data_source_host_select.png b/docs/doxygen-user/images/data_source_host_select.png new file mode 100644 index 0000000000..f9275d9387 Binary files /dev/null and b/docs/doxygen-user/images/data_source_host_select.png differ diff --git a/docs/doxygen-user/images/host_merge.png b/docs/doxygen-user/images/host_merge.png new file mode 100644 index 0000000000..b5235eb8d4 Binary files /dev/null and b/docs/doxygen-user/images/host_merge.png differ diff --git a/docs/doxygen-user/images/host_merge_result.png b/docs/doxygen-user/images/host_merge_result.png new file mode 100644 index 0000000000..bb22063c3d Binary files /dev/null and b/docs/doxygen-user/images/host_merge_result.png differ diff --git a/docs/doxygen-user/images/host_os_accounts.png b/docs/doxygen-user/images/host_os_accounts.png new file mode 100644 index 0000000000..d73505fa5c Binary files /dev/null and b/docs/doxygen-user/images/host_os_accounts.png differ diff --git a/docs/doxygen-user/images/keyword-search-configuration-dialog-general.PNG b/docs/doxygen-user/images/keyword-search-configuration-dialog-general.PNG index 360ec860b9..4dbb566faa 100644 Binary files a/docs/doxygen-user/images/keyword-search-configuration-dialog-general.PNG and b/docs/doxygen-user/images/keyword-search-configuration-dialog-general.PNG differ diff --git a/docs/doxygen-user/images/keyword-search-configuration-dialog-string-extraction.PNG b/docs/doxygen-user/images/keyword-search-configuration-dialog-string-extraction.PNG index 5dcc5e3d03..51df8bd5bd 100644 Binary files a/docs/doxygen-user/images/keyword-search-configuration-dialog-string-extraction.PNG and b/docs/doxygen-user/images/keyword-search-configuration-dialog-string-extraction.PNG differ diff --git a/docs/doxygen-user/images/manage_hosts.png b/docs/doxygen-user/images/manage_hosts.png new file mode 100644 index 0000000000..14f4805027 Binary files /dev/null and b/docs/doxygen-user/images/manage_hosts.png differ diff --git a/docs/doxygen-user/images/options_application.png b/docs/doxygen-user/images/options_application.png index 977c63e77a..3823eb7a2e 100644 Binary files a/docs/doxygen-user/images/options_application.png and b/docs/doxygen-user/images/options_application.png differ diff --git a/docs/doxygen-user/images/reports_select.png b/docs/doxygen-user/images/reports_select.png index fe4f1f10bd..97e440adeb 100644 Binary files a/docs/doxygen-user/images/reports_select.png and b/docs/doxygen-user/images/reports_select.png differ diff --git a/docs/doxygen-user/images/reset_windows.png b/docs/doxygen-user/images/reset_windows.png new file mode 100644 index 0000000000..0b1a7ef411 Binary files /dev/null and b/docs/doxygen-user/images/reset_windows.png differ diff --git a/docs/doxygen-user/images/solr/solr_config_monitoring.png b/docs/doxygen-user/images/solr/solr_config_monitoring.png new file mode 100644 index 0000000000..d6116cd256 Binary files /dev/null and b/docs/doxygen-user/images/solr/solr_config_monitoring.png differ diff --git a/docs/doxygen-user/images/solr/solr_config_param.png b/docs/doxygen-user/images/solr/solr_config_param.png index dbc81d0cb1..3960fcf2e8 100644 Binary files a/docs/doxygen-user/images/solr/solr_config_param.png and b/docs/doxygen-user/images/solr/solr_config_param.png differ diff --git a/docs/doxygen-user/images/solr/solr_disable_periodic_search.png b/docs/doxygen-user/images/solr/solr_disable_periodic_search.png new file mode 100644 index 0000000000..c6b4242c68 Binary files /dev/null and b/docs/doxygen-user/images/solr/solr_disable_periodic_search.png differ diff --git a/docs/doxygen-user/images/solr/solr_jvm.png b/docs/doxygen-user/images/solr/solr_jvm.png new file mode 100644 index 0000000000..1285110183 Binary files /dev/null and b/docs/doxygen-user/images/solr/solr_jvm.png differ diff --git a/docs/doxygen-user/images/ui_person_select.png b/docs/doxygen-user/images/ui_person_select.png new file mode 100644 index 0000000000..e82a5d0c30 Binary files /dev/null and b/docs/doxygen-user/images/ui_person_select.png differ diff --git a/docs/doxygen-user/images/ui_tree_top_ds.png b/docs/doxygen-user/images/ui_tree_top_ds.png new file mode 100644 index 0000000000..7870ae8d0c Binary files /dev/null and b/docs/doxygen-user/images/ui_tree_top_ds.png differ diff --git a/docs/doxygen-user/images/ui_tree_top_persons.png b/docs/doxygen-user/images/ui_tree_top_persons.png new file mode 100644 index 0000000000..56b3c43f56 Binary files /dev/null and b/docs/doxygen-user/images/ui_tree_top_persons.png differ diff --git a/docs/doxygen-user/images/view_options_options_panel.png b/docs/doxygen-user/images/view_options_options_panel.png index 40b5f6cf8a..168ce70cc4 100644 Binary files a/docs/doxygen-user/images/view_options_options_panel.png and b/docs/doxygen-user/images/view_options_options_panel.png differ diff --git a/docs/doxygen-user/images/views_grouped_tree.png b/docs/doxygen-user/images/views_grouped_tree.png new file mode 100644 index 0000000000..d76b991a52 Binary files /dev/null and b/docs/doxygen-user/images/views_grouped_tree.png differ diff --git a/docs/doxygen-user/images/views_standard_tree.png b/docs/doxygen-user/images/views_standard_tree.png new file mode 100644 index 0000000000..80eddc68a9 Binary files /dev/null and b/docs/doxygen-user/images/views_standard_tree.png differ diff --git a/docs/doxygen-user/keyword_search.dox b/docs/doxygen-user/keyword_search.dox index fd207a6de2..45ef1c0dfa 100644 --- a/docs/doxygen-user/keyword_search.dox +++ b/docs/doxygen-user/keyword_search.dox @@ -44,12 +44,28 @@ Under the Keyword list is the option to send ingest inbox messages for each hit. The string extraction setting defines how strings are extracted from files from which text cannot be extracted normally because their file formats are not supported. This is the case with arbitrary binary files (such as the page file) and chunks of unallocated space that represent deleted files. When we extract strings from binary files we need to interpret sequences of bytes as text differently, depending on the possible text encoding and script/language used. In many cases we don't know in advance what the specific encoding/language the text is encoded in. However, it helps if the investigator is looking for a specific language, because by selecting less languages the indexing performance will be improved and the number of false positives will be reduced. +\image html keyword-search-configuration-dialog-string-extraction.PNG + The default setting is to search for English strings only, encoded as either UTF8 or UTF16. This setting has the best performance (shortest ingest time). The user can also use the String Viewer first and try different script/language settings, and see which settings give satisfactory results for the type of text relevant to the investigation. Then the same setting that works for the investigation can be applied to the keyword search ingest. -\image html keyword-search-configuration-dialog-string-extraction.PNG -There is also a setting to enable Optical Character Recognition (OCR). If enabled, text may be extracted from supported image types. Enabling this feature will make the keyword search module take longer to run, and the results are not perfect. The following shows a sample image containing text: +## General Settings tab {#generalSettingsTab} + +\image html keyword-search-configuration-dialog-general.PNG + +### NIST NSRL Support +The hash lookup ingest service can be configured to use the NIST NSRL hash set of known files. The keyword search advanced configuration dialog "General" tab contains an option to skip keyword indexing and search on files that have previously marked as "known" and uninteresting files. Selecting this option can greatly reduce size of the index and improve ingest performance. In most cases, user does not need to keyword search for "known" files. + +### Result update frequency during ingest +To control how frequently searches are executed during ingest, the user can adjust the timing setting available in the keyword search advanced configuration dialog "General" tab. Setting the number of minutes lower will result in more frequent index updates and searches being executed and the user will be able to see results more in real-time. However, more frequent updates can affect the overall performance, especially on lower-end systems, and can potentially lengthen the overall time needed for the ingest to complete. + +One can also choose to have no periodic searches. This will speed up the ingest. Users choosing this option can run their keyword searches once the entire keyword search index is complete. + +### Optical Character Recognition +There is also a setting to enable Optical Character Recognition (OCR). If enabled, text may be extracted from supported image types. Enabling this feature will make the keyword search module take longer to run, and the results are not perfect. The secondary checkbox can make OCR run faster by only processing large images and images extracted from documents. + +The following shows a sample image containing text: \image html keyword-search-ocr-image.png @@ -72,19 +88,6 @@ and move them to the right location. The following steps breakdown this process The language files will now be supported when OCR is enabled in the Keyword Search Settings. -## General Settings tab {#generalSettingsTab} - -\image html keyword-search-configuration-dialog-general.PNG - -### NIST NSRL Support -The hash lookup ingest service can be configured to use the NIST NSRL hash set of known files. The keyword search advanced configuration dialog "General" tab contains an option to skip keyword indexing and search on files that have previously marked as "known" and uninteresting files. Selecting this option can greatly reduce size of the index and improve ingest performance. In most cases, user does not need to keyword search for "known" files. - -### Result update frequency during ingest -To control how frequently searches are executed during ingest, the user can adjust the timing setting available in the keyword search advanced configuration dialog "General" tab. Setting the number of minutes lower will result in more frequent index updates and searches being executed and the user will be able to see results more in real-time. However, more frequent updates can affect the overall performance, especially on lower-end systems, and can potentially lengthen the overall time needed for the ingest to complete. - -One can also choose to have no periodic searches. This will speed up the ingest. Users choosing this option can run their keyword searches once the entire keyword search index is complete. - -
    diff --git a/docs/doxygen-user/multi-user/installSolr.dox b/docs/doxygen-user/multi-user/installSolr.dox index 8bfa2c712e..d8a4690d58 100644 --- a/docs/doxygen-user/multi-user/installSolr.dox +++ b/docs/doxygen-user/multi-user/installSolr.dox @@ -53,9 +53,8 @@ Follow these steps to configure Solr: Required Solr Configuration Parameters:
    • JAVA_HOME – path to 64-bit JRE installation. For example \c "JAVA_HOME=C:\Program Files\Java\jre1.8.0_151" or \c "JAVA_HOME=C:\Program Files\ojdkbuild\java-1.8.0-openjdk-1.8.0.222-1" -
    • DEFAULT_CONFDIR – path to Autopsy configuration directory. If the Solr archive was extracted into \c "C:\solr-8.6.3" directory, then this path will be \c "C:\ solr-8.6.3\server\solr\configsets\AutopsyConfig\conf". -
    • Dbootstrap_confdir – same path as DEFAULT_CONFDIR -
    • SOLR_JAVA_MEM - Solr JVM heap size should be somewhere between one third and one half of the total RAM available on the machine. A rule of thumb would be use \c "set SOLR_JAVA_MEM=-Xms2G -Xmx14G" for a machine with 32GB of RAM or more, and \c "set SOLR_JAVA_MEM=-Xms2G -Xmx8G" for a machine with 16GB of RAM. +
    • DEFAULT_CONFDIR – path to Autopsy configuration directory. If the Solr archive was extracted into \c "C:\solr-8.6.3" directory, then this path will be \c "C:\ solr-8.6.3\server\solr\configsets\AutopsyConfig\conf". Do not include quotes around the path. +
    • SOLR_JAVA_MEM - Solr JVM heap size should be as large as the Solr machine’s resources allow, at least half of the total RAM available on the machine. A rule of thumb would be use "set SOLR_JAVA_MEM=-Xms2G -Xmx40G" for a machine with 64GB of RAM, "set SOLR_JAVA_MEM=-Xms2G -Xmx20G" for a machine with 32GB of RAM, and "set SOLR_JAVA_MEM=-Xms2G -Xmx8G" for a machine with 16GB of RAM. Please see the \ref install_solr_heap_usage "troubleshooting section" for more info regarding Solr heap usage and troubleshooting information.
    • SOLR_DATA_HOME – location where Solr indexes will be stored. If this is not configured, the indexes will be stored in the \c "C:\solr-8.6.3\server\solr" directory. NOTE: for Autopsy cases consisting of large number of data sources, Solr indexes can get very large (hundreds of GBs, or TBs) so they should probably be stored on a larger network share.
    @@ -208,8 +207,84 @@ Solr creates two types of data that need to be backed up:
    1. In a default installation that data is stored in \c "C:\solr-8.6.3\server\solr zoo_data" (assuming that the Solr package ZIP was extracted into \c "C:\solr-8.6.3" directory).
    -\section install_solr_delayed_start Delayed Start Problems With Large Number Of Solr Collections +\section Troubleshooting + +\subsection install_solr_delayed_start Delayed Start Problems With Large Number Of Solr Collections In our testing, we have encountered an issue when a very large number (thousands) of Autopsy multi-user cases was created. Each new Autopsy multi-user case creates a Solr "collection" that contains the Solr text index. With 2,000 existing collections, when Solr service is restarted, Solr appears to internally be "loading" roughly 250 collections per minute (in chronological order, starting with oldest collections). After 4 minutes roughly half of the 2,000 collections were loaded. Users are able to search the collections that have been loaded, but they are unable to open or search the collections that have not yet been internally loaded by Solr. After 7-8 minutes all collections were loaded. These numbers will vary depending on the specific cluster configuration, text index file location (network or local storage), network throughput, number of Solr servers, etc. +\subsection install_solr_heap_usage Solr Heap Usage and Recommendations + +Solr JVM heap plays especially important role if you are going to create a large number of Autopsy cases (i.e. Solr collections). Here are some “rule of thumb” Solr heap usage stats that we identified during our internal testing: +
      +
    • For very small cases/collections, our tests show that Solr uses an absolute minimum of 7-10 MB of heap per collection. +
    • For larger cases/collections (50-100GB input E01 size) Solr uses at least 65 MB per collection +
    • For large cases/collections (1.5TB input E01 size) Solr uses at least 850 MB per collection +
    + +\subsubsection install_solr_heap_troublshooting Troubleshooting Solr Heap Issues + +Once the Solr JVM uses all of its available heap and is unable to free up any memory via garbage collection, the Solr service will not be able to create new collections or may become completely unresponsive, resulting in Autopsy being unable to create new text indexes. Below is a list of some of the errors that you might see as a result of this in the Solr (not Autopsy) service logs and/or the Solr admin console: + +
      +
    • org.apache.solr.common.SolrException: Could not register as the leader because creating the ephemeral registration node in ZooKeeper failed +
    • RequestHandlerBase org.apache.solr.common.SolrException: Failed to get config from zookeeper +
    • RecoveryStrategy Error while trying to recover. org.apache.solr.common.SolrException: Cloud state still says we are leader. +
    • RequestHandlerBase org.apache.solr.common.SolrException: Could not load collection from ZK +
    • org.apache.solr.common.SolrException: Error CREATEing SolrCore: Unable to create core. Caused by: There are no more files +
    • org.apache.solr.common.SolrException: java.io.IOException: There are no more files +
    • org.apache.solr.common.SolrException: Cannot unload non-existent core +
    • ZkIndexSchemaReader Error creating ZooKeeper watch for the managed schema +
    + +You may also see the following ZooKeeper errors: +
      +
    • org.apache.zookeeper.KeeperException$NodeExistsException: KeeperErrorCode = NodeExists +
    • org.apache.zookeeper.KeeperException$BadVersionException: KeeperErrorCode = BadVersion for (collection_name)/state.json +
    • org.apache.zookeeper.KeeperException$SessionExpiredException: KeeperErrorCode = Session expired for /roles.json +
    • org.apache.zookeeper.KeeperException$SessionExpiredException: KeeperErrorCode = Session expired for /configs/AutopsyConfig/managed-schema +
    + +The common theme among most of these errors is the breakdown in communication between Solr and ZooKeeper, especially when using an embedded ZooKeeper server. It is important to note that these errors may potentially occur for other reasons and are not unique to Solr heap issues. + +\subsubsection install_solr_monitoring Monitoring Solr Heap Usage + +The simplest way to see current Solr heap usage is to check the Solr Admin Console web page. To access the Solr admin console, on the Solr machine navigate to http://localhost:8983/solr/#/ . There you will be able to see the Solr memory usage: + +\image html solr_config_monitoring.png + +However, the dashboard does not show enough detail to know when Solr is out of heap, so it should only be used to identify that you are NOT having heap issues. Even if the dashboard shows that the Solr heap is fully used, it may or may not be an issue. It is best to use profiling tools like Java VisualVM. In order for VisualVM to connect to Solr, you will need to enable the JMX interface for Solr’s Java process. The details are described here: +
    • https://solr.apache.org/guide/8_3/using-jmx-with-solr.html#using-jmx-with-solr
    + +Solr heap and other performance tuning is described in the following article: +
    • https://cwiki.apache.org/confluence/display/SOLR/SolrPerformanceProblems
    + +\subsubsection install_solr_performance_tuning Notes on Solr Performance Tuning + +If you are going to work with large images (TBs) and KWS performance is important, the best approach is to use a network (Multi-User) Solr server. + +Some notes: +
      +
    • A single Solr server works well for data sources up to 1TB; after that the performance starts to slow down. The performance doesn't "drop off the cliff," but it keeps slowing down as you add more data to the index. After 3TBs of input data the Solr performance takes a significant decline. + +
    • A single Multi-User Solr server may not perform much better than a Single-User Autopsy case. However, in Multi-User mode you can add additional Solr servers and create a Solr cluster. See the \ref install_sorl_adding_nodes section in the above documentation. These additional nodes are where the performance gains come from, especially for large input data sources. Apache Solr documentation calls this "SolrCloud" mode and each Solr server is called a "shard". The more Solr servers/shards you have, the better performance you will have for large data sets. On our test and production clusters, we are using 4-6 Solr servers to handle data sets of up to 10TB, which seems to be the upper limit. After that, you are better off breaking your Autopsy case into multiple cases, thus creating a separate Solr index for each case. + +
    • In our testing, a 3-node SolrCloud indexes data roughly twice as fast as single Solr node. A 6-node SolrCloud indexes data almost twice as fast as 3-node SolrCloud. After that we did not see much performance gain. These performance figures are heavily dependent on network throughput, machine resources, disk access speeds, and the type of data that is being indexed. + +
    • Exact match searches are much faster than substring or regex searches. + +
    • Regex searches tend to use a lot of RAM on the Solr server. + +
    • Indexing/searching of unallocated space really slows everything down because it is mostly binary or garbled data. + +
    • If you are not going to look at the search results until ingest is over then you should disable the periodic keyword searches. They will start taking longer as your input data grows. This can be done in Tools->Options->Keyword Search tab: + +\image html solr_disable_periodic_search.png + +
    • In Single-User mode, if you are ingesting and indexing data sources that are multiple TBs in size, then both Autopsy memory and especially the Solr JVM memory needs to be increased from their default settings. This can be done in Tools->Options->Application tab. We would recommend at least 10GB heap size for Autopsy and at least 6-8GB heap size for Solr. Note that these are "maximum" values that the process will be allowed to use/request. The operating system will not allocate more heap than the process actually needs. + +\image html solr_jvm.png + +
    + */ diff --git a/docs/doxygen-user/recent_activity.dox b/docs/doxygen-user/recent_activity.dox index ba2f94deef..094d43b077 100644 --- a/docs/doxygen-user/recent_activity.dox +++ b/docs/doxygen-user/recent_activity.dox @@ -13,7 +13,17 @@ This allows you to see what activity has occured in the last seven days of usage Configuration ======= -There is nothing to configure for this module. +Configuring Custom Web Categories +------ + +The Recent Activity module will create "Web Categories" results for domains that match a list of categories. There are some built-in categories, but custom categories can also be entered through the "Custom Web Categories" tab on the main options panel. These custom categories will override any matching built-in category. + +\image html custom_web_categories.png + +The buttons below the list of categories allow you to enter new categories, edit existing categories, and delete categories. You can also export your list of categories and import a set of categories that was previously exported from this panel. Importing a set will add its categories to the current list (existing categories will not be deleted). + +The category match for each domain will be listed in the "Name" column in the result viewer. +\image html custom_web_categories_results.png Using the Module @@ -23,6 +33,7 @@ Ingest Settings ------ There are no run-time settings for this module. + Seeing Results ------ Results show up in the tree under "Extracted Content". diff --git a/docs/doxygen-user/reporting.dox b/docs/doxygen-user/reporting.dox index 86a5e9df61..dbdf63a4c8 100644 --- a/docs/doxygen-user/reporting.dox +++ b/docs/doxygen-user/reporting.dox @@ -58,7 +58,7 @@ Generating an Excel report is very similar to an \ref report_html. You select wh \image html reports_excel.png -\subsection report_tagged_hashes Add Tagged Hashes +\subsection report_tagged_hashes Save Tagged Hashes This is one of the report modules that doesn't generate an actual report. The purpose of this module is to easily add the hashes of some/all tagged files to an Autopsy hash set that can be used by the \ref hash_db_page. You can use the "Configure Hash Sets" button to create a new @@ -69,6 +69,10 @@ hash set to write to, or use an existing hash set. After running this module, if you use the same hash set on future cases then everything that was tagged with one of the selected tags in this case will show up as Hashset Hits. +\subsection reports_unique_words Extract Unique Words + +This report module allows you to export all unique "words" found in a case. These words come from the Solr index that was created by the \ref keyword_search_page. + \subsection report_case_uco CASE-UCO This module creates a JSON output file in CASE-UCO format for a single data source. diff --git a/docs/doxygen-user/tree_viewer.dox b/docs/doxygen-user/tree_viewer.dox index 9ca6ce49ce..d3d6651fce 100644 --- a/docs/doxygen-user/tree_viewer.dox +++ b/docs/doxygen-user/tree_viewer.dox @@ -4,20 +4,37 @@ The tree on the left-hand side of the main window is where you can browse the files in the data sources in the case and find saved results from automated analyis (ingest). The tree has five main areas: -- Data Sources: This shows the directory tree hierarchy of the data sources. You can navigate to a specific file or directory here. Each data source added to the case is represented as a distinct sub tree. If you add a data source multiple times, it shows up multiple times. +- Persons / Hosts / Data Sources: This shows the directory tree hierarchy of the data sources. You can navigate to a specific file or directory here. Each data source added to the case is represented as a distinct sub tree. If you add a data source multiple times, it shows up multiple times. - Views: Specific types of files from the data sources are shown here, aggregated by type or other properties. Files here can come from more than one data source. - Results: This is where you can see the results from both the automated analysis (ingest) running in the background and your search results. - Tags: This is where files and results that have been \ref tagging_page "tagged" are shown. - Reports: Reports that you have generated, or that ingest modules have created, show up here. -You can also use the "Group by data source" option available through the \ref view_options_page to move the Views, Results, and Tags tree nodes under their corresponding data sources. This can be helpful on very large cases to reduce the size of each sub tree. For example: +You can also use the "Group by Person/Host" option available through the \ref view_options_page to move the Views, Results, and Tags tree nodes under their corresponding person and host. This can be helpful on very large cases to reduce the size of each sub tree. -\image html ui_layout_group_tree.PNG +\section ui_tree_ds Persons / Hosts / Data Sources +By default, the top node of the tree viewer will contain all data sources in the case. The Data Sources node is organized by host and then the data source itself. Right clicking on the various nodes in the Data Sources area of the tree will allow you to get more options for each data source and its contents. -\section ui_tree_ds Data Sources +\image html ui_tree_top_ds.png -The Data Sources area shows each data source that has been added to the case, in order added (top one is first). -Right clicking on the various nodes in the Data Sources area of the tree will allow you to get more options for each data source and its contents. +If the "Group by Person/Host" option has been selected in the \ref view_options_group "View Options", the hosts and data sources will be organized under any persons that have been associated with the hosts. Additionally, the rest of the nodes (Views, Results, etc) will be found under each data source. + +\image html ui_tree_top_persons.png + +\subsection ui_tree_persons Persons + +If the "Group by Person/Host" option in the \ref view_options_group "View Options" has been set, the top level nodes will display persons. Persons are manually created and can be associated with one or more hosts. To add or remove a person from a host, right-click on the host and select the appropriate option. + +\image html ui_person_select.png + +You can edit and delete persons by right-clicking on the node. + +\subsection ui_tree_hosts Hosts + +All data sources are organized under host nodes. See the \ref host_page "hosts page" for more information on using hosts. + +\subsection ui_tree_ds_node Data Sources +Under the hosts are the nodes for each data source. Unallocated space is the chunks of a file system that are currently not being used for anything. Unallocated space can hold deleted files and other interesting artifacts. In an image data source, unallocated space is stored in blocks with distinct locations in the file system. However, because of the way carving tools work, it is better to feed these tools a single, large unallocated space file. Autopsy provides access to both methods of looking at unallocated space. \li Individual blocks in a volume For each volume, there is a "virtual" folder named "$Unalloc". This folder contains all the individual unallocated blocks in contiguous runs (unallocated space files) as the image is storing them. You can right click and extract any unallocated space file the same way you can extract any other type of file in the Data Sources area. diff --git a/docs/doxygen-user/troubleshooting.dox b/docs/doxygen-user/troubleshooting.dox index 666b86f10b..c08ebf50c4 100644 --- a/docs/doxygen-user/troubleshooting.dox +++ b/docs/doxygen-user/troubleshooting.dox @@ -9,18 +9,19 @@ If you are experiencing an error, we encourage you to post on the forum (https:/
  3. What led to the error. For example:
    • What type of data source was being processed? -
    • Which ingest modules were running? +
    • Which ingest modules were running? You can generate an \ref ingest_monitoring "ingest snapshot" to view the current ingest state.
    • Which specialized viewer were you using?
  4. The error being displayed on screen (if applicable) +
  5. A \ref troubleshooting_stack "thread dump" or screenshot of the \ref ingest_monitoring "ingest snapshot" if Autopsy seems stuck
  6. If there were any errors in the \ref troubleshooting_logs "logs" \section troubleshooting_specific_issues Specific Issues -\subsection troubleshooting_fond_size Font Size Too Small in Windows +\subsection troubleshooting_fond_size Font Size Too Small -Make the following changes if the application is hard to navigate in High DPI systems: +In Windows, you can make the following changes if the application is hard to navigate in High DPI systems:
    1. Right-click on the application icon on your Desktop, Start Menu, etc. @@ -32,8 +33,18 @@ Make the following changes if the application is hard to navigate in High DPI sy
    2. Restart Autopsy.
    +In Linux, you can supply the font size with "--fontsize XX" command line argument, but not all of the dialogs are correctly responsive and some of the text will get cut off. + \section troubleshooting_general General Troubleshooting +\subsection troubleshooting_reset_ui Resetting the UI + +If the Autopsy window no longer looks like the default \ref uilayout_page (for example, if a viewer has disappeared or there is a strange empty space), you can reset it. To do this, go to Window->Reset Windows. This will cause Autopsy to restart. If you have a case open, it will reopen after the reset. + +\image html reset_windows.png + +If resetting the windows does not fix the problem, you may need to delete your user folder as described in the next section. + \subsection troubleshooting_user_folder Deleting the Autopsy User Folder If Autopsy starts behaving strangely, stops loading entirely, or menu items go missing, you probably need to delete your user folder. Doing so essenitally gives you a fresh installation. On Windows the user folder is located in "C:\Users\(user name)\AppData\Roaming\autopsy". diff --git a/docs/doxygen-user/view_options.dox b/docs/doxygen-user/view_options.dox index 1ff398da81..270d919507 100644 --- a/docs/doxygen-user/view_options.dox +++ b/docs/doxygen-user/view_options.dox @@ -66,11 +66,15 @@ If you have a \ref machine_translation_page module installed, this option will a The settings in this section only apply to the current case. -\subsection view_options_group Group by data source +\subsection view_options_group Data Source Grouping -The "Group by data source" option allows you to separate all elements in the \ref ui_tree by data source. This can help nodes load faster on large cases. +The options here allow you to choose how to display data in the \ref ui_tree. The top option ("Group by Data Type") displays combined results for all data sources. All nodes on the tree will contain combined results for all data sources in the case. -\image html ui_layout_group_tree.PNG +\image html views_standard_tree.png + +The second option ("Group by Person/Host") separates the results for each data source, and organizes the data sources by \ref ui_tree_persons "person" and \ref ui_tree_hosts "host". + +\image html views_grouped_tree.png \section view_options_session Current Session Settings diff --git a/nbproject/platform.properties b/nbproject/platform.properties index 7a61a6e1b6..32e681b3af 100644 --- a/nbproject/platform.properties +++ b/nbproject/platform.properties @@ -7,9 +7,9 @@ suite.dir=${basedir} nbplatform.active=download nbplatform.active.dir=${suite.dir}/netbeans-plat/${netbeans-plat-version} harness.dir=${nbplatform.active.dir}/harness -bootstrap.url=https://netbeans-vm.apache.org/uc/${netbeans-plat-version}/tasks.jar +bootstrap.url=https://netbeans-vm1.apache.org/uc/${netbeans-plat-version}/tasks.jar # Where we get the platform from. To see what versions are available, open URL in browser up to the .../updates part of the URL -autoupdate.catalog.url=https://netbeans-vm.apache.org/uc/${netbeans-plat-version}/updates.xml.gz +autoupdate.catalog.url=https://netbeans-vm1.apache.org/uc/${netbeans-plat-version}/updates.xml.gz cluster.path=\ ${nbplatform.active.dir}/harness:\ ${nbplatform.active.dir}/java:\ diff --git a/release_scripts/localization_scripts/languagedictutil.py b/release_scripts/localization_scripts/languagedictutil.py index 25696f6f8c..a8ced46184 100644 --- a/release_scripts/localization_scripts/languagedictutil.py +++ b/release_scripts/localization_scripts/languagedictutil.py @@ -1,10 +1,11 @@ import os from pathlib import Path -from typing import Dict, Iterator, Tuple, TypeVar +from typing import Dict, Iterator, Tuple, TypeVar, List from git import Blob from foundvalue import FoundValue from gitutil import get_text +from propentry import PropEntry from propsutil import get_entry_dict @@ -34,7 +35,7 @@ def extract_translations(orig_file_iter: Iterator[Tuple[str, Blob]], translated_ # determine original and translated files with common parent folders and find common keys to_ret: Dict[str, FoundValue] = dict() - for (common_folder, (original_path, original_blob), (translated_path, translated_blob))\ + for (common_folder, (original_path, original_blob), (translated_path, translated_blob)) \ in common_entries(original_files, translated_files): orig_dict = sanitize_prop_dict_keys(get_entry_dict(get_text(original_blob))) @@ -106,3 +107,42 @@ def common_entries(*dcts: Dict[K, V]) -> Iterator[Tuple[K, Tuple[V, ...]]]: return for i in set(dcts[0]).intersection(*dcts[1:]): yield (i,) + tuple(d[i] for d in dcts) + + +def find_unmatched_translations(orig_file_iter: Iterator[Tuple[str, Blob]], + translated_file_iter: Iterator[Tuple[str, Blob]], + orig_filename: str, translated_filename: str) -> List[PropEntry]: + """ + Finds all unmatched translation (where English is non-empty value and Japanese does not exist or is empty). + + Args: + orig_file_iter: An iterator of tuples containing the path and the content of the file for original content. + translated_file_iter: An iterator of tuples containing the path and the content of the file for translated + content. + orig_filename: The original file name (i.e. 'bundle.properties-MERGED'). + translated_filename: The translated file name (i.e. 'Bundle_ja.properties'). + + Returns: A list of found unmatched translations sorted by path and then key. + + """ + + # Create a dictionary mapping parent path to the file content for both original and translated files + original_files: Dict[str, Tuple[str, Blob]] = _find_file_entries(orig_file_iter, orig_filename) + translated_files: Dict[str, Tuple[str, Blob]] = _find_file_entries(translated_file_iter, translated_filename) + + to_ret: List[PropEntry] = [] + for (common_folder, (original_path, original_blob), (translated_path, translated_blob)) \ + in common_entries(original_files, translated_files): + + orig_dict = get_entry_dict(get_text(original_blob)) + translated_dict = get_entry_dict(get_text(translated_blob)) + + for key, orig_val in orig_dict.items(): + if len(orig_val.strip()) > 0 and (key not in translated_dict or len(translated_dict[key].strip()) < 1): + to_ret.append(PropEntry( + rel_path=common_folder, + key=key, + value=orig_val)) + + to_ret.sort(key=lambda rec: (rec.rel_path, rec.key)) + return to_ret diff --git a/release_scripts/localization_scripts/unmatchedscript.py b/release_scripts/localization_scripts/unmatchedscript.py new file mode 100644 index 0000000000..8f4b05302d --- /dev/null +++ b/release_scripts/localization_scripts/unmatchedscript.py @@ -0,0 +1,92 @@ +"""This script finds all '.properties-MERGED' files with no relevant translation for a given language. + +This script requires the python libraries: gitpython, jproperties, pyexcel-xlsx, xlsxwriter and pyexcel along with +python >= 3.9.1 or the requirements.txt file found in this directory can be used +(https://packaging.python.org/guides/installing-using-pip-and-virtual-environments/#using-requirements-files). As a +consequence of gitpython, this project also requires git >= 1.7.0. +""" +import sys +from typing import List + +from envutil import get_proj_dir +from excelutil import write_results_to_xlsx +from gitutil import get_property_file_entries, get_commit_id, get_git_root, list_paths, get_tree +from csvutil import write_results_to_csv +import argparse + +from languagedictutil import find_unmatched_translations +from outputtype import OutputType +from propentry import convert_to_output, PropEntry +from propsutil import DEFAULT_PROPS_FILENAME, get_lang_bundle_name + + +def get_unmatched(repo_path: str, language: str, original_commit: str, translated_commit: str) -> List[PropEntry]: + """ + Get all original key values that have not been translated. + :param repo_path: Path to repo. + :param language: The language identifier (i.e. 'ja') + :param original_commit: The commit to use for original key values. + :param translated_commit: The commit to use for translated key values. + :return: The list of unmatched items + """ + original_files = filter(lambda x: x[0].endswith(DEFAULT_PROPS_FILENAME), + list_paths(get_tree(repo_path, original_commit))) + + translated_name = get_lang_bundle_name(language) + translated_files = filter(lambda x: x[0].endswith(translated_name), + list_paths(get_tree(repo_path, translated_commit))) + + return find_unmatched_translations(orig_file_iter=original_files, translated_file_iter=translated_files, + orig_filename=DEFAULT_PROPS_FILENAME, translated_filename=translated_name) + + +def main(): + # noinspection PyTypeChecker + parser = argparse.ArgumentParser(description='Gathers all key-value pairs within .properties-MERGED files that ' + 'have not been translated.', + formatter_class=argparse.ArgumentDefaultsHelpFormatter) + parser.add_argument(dest='output_path', type=str, help='The path to the output file. The output path should be' + ' specified as a relative path with the dot slash notation ' + '(i.e. \'./outputpath.xlsx\') or an absolute path.') + + parser.add_argument('-r', '--repo', dest='repo_path', type=str, required=False, + help='The path to the repo. If not specified, path of script is used.') + parser.add_argument('-o', '--output-type', dest='output_type', type=OutputType, choices=list(OutputType), + required=False, help="The output type. Currently supports 'csv' or 'xlsx'.", default='xlsx') + parser.add_argument('-nc', '--no-commit', dest='no_commit', action='store_true', default=False, + required=False, help="Suppresses adding commits to the generated header.") + parser.add_argument('-nt', '--no-translated-col', dest='no_translated_col', action='store_true', default=False, + required=False, help="Don't include a column for translation.") + parser.add_argument('-l', '--language', dest='language', type=str, required=False, default=None, + help="The language identifier (i.e. ja). If specified, this only returns items where the key" + " is not translated (i.e. no matching Japanese key or value is empty)") + + parser.add_argument('-oc', '--original-commit', dest='original_commit', type=str, required=False, default=None, + help="The commit to gather original keys.") + parser.add_argument('-tc', '--translated-commit', dest='translated_commit', type=str, required=False, default=None, + help="The commit to gather translations.") + + args = parser.parse_args() + repo_path = args.repo_path if args.repo_path is not None else get_git_root(get_proj_dir()) + output_path = args.output_path + output_type = args.output_type + translated_col = not args.no_translated_col + original_commit = args.original_commit + translated_commit = args.translated_commit + + prop_entries = get_unmatched(repo_path, args.language, original_commit, translated_commit) \ + if args.language else get_property_file_entries(repo_path) + + processing_result = convert_to_output(prop_entries, original_commit, translated_col) + + # based on https://stackoverflow.com/questions/60208/replacements-for-switch-statement-in-python + { + OutputType.csv: write_results_to_csv, + OutputType.xlsx: write_results_to_xlsx + }[output_type](processing_result, output_path) + + sys.exit(0) + + +if __name__ == "__main__": + main() diff --git a/test/script/tskdbdiff.py b/test/script/tskdbdiff.py index 85087892b1..ac0f4cb044 100644 --- a/test/script/tskdbdiff.py +++ b/test/script/tskdbdiff.py @@ -8,6 +8,8 @@ import os import codecs import datetime import sys +from typing import Callable, Dict, Union, List + import psycopg2 import psycopg2.extras import socket @@ -167,12 +169,29 @@ class TskDbDiff(object): # create file path for gold files inside output folder. In case of diff, both gold and current run files # are available in the report output folder. Prefix Gold- is added to the filename. - gold_file_in_output_dir = output_file[:output_file.rfind("/")] + "/Gold-" + output_file[output_file.rfind("/")+1:] + gold_file_in_output_dir = os.path.join(os.path.dirname(output_file), "Gold-" + os.path.basename(output_file)) shutil.copy(gold_file, gold_file_in_output_dir) return False + @staticmethod + def _get_associated_artifact_type(cur, artifact_id, isMultiUser): + if isMultiUser: + cur.execute( + "SELECT tsk_files.parent_path, blackboard_artifact_types.display_name FROM blackboard_artifact_types INNER JOIN blackboard_artifacts ON blackboard_artifact_types.artifact_type_id = blackboard_artifacts.artifact_type_id INNER JOIN tsk_files ON tsk_files.obj_id = blackboard_artifacts.obj_id WHERE artifact_id=%s", + [artifact_id]) + else: + cur.execute( + "SELECT tsk_files.parent_path, blackboard_artifact_types.display_name FROM blackboard_artifact_types INNER JOIN blackboard_artifacts ON blackboard_artifact_types.artifact_type_id = blackboard_artifacts.artifact_type_id INNER JOIN tsk_files ON tsk_files.obj_id = blackboard_artifacts.obj_id WHERE artifact_id=?", + [artifact_id]) + + info = cur.fetchone() + + return "File path: " + info[0] + " Artifact Type: " + info[1] + + + @staticmethod def _dump_output_db_bb(db_file, bb_dump_file, isMultiUser, pgSettings, id_obj_path_table): """Dumps sorted text results to the given output location. @@ -268,7 +287,7 @@ class TskDbDiff(object): elif attr["value_type"] == 5: attr_value_as_string = str(attr["value_int64"]) if attr["display_name"] == "Associated Artifact": - attr_value_as_string = getAssociatedArtifactType(attribute_cursor, attr_value_as_string, isMultiUser) + attr_value_as_string = TskDbDiff._get_associated_artifact_type(attribute_cursor, attr_value_as_string, isMultiUser) patrn = re.compile("[\n\0\a\b\r\f]") attr_value_as_string = re.sub(patrn, ' ', attr_value_as_string) if attr["source"] == "Keyword Search" and attr["display_name"] == "Keyword Preview": @@ -308,7 +327,7 @@ class TskDbDiff(object): srtcmdlst = ["sort", unsorted_dump, "-o", bb_dump_file] subprocess.call(srtcmdlst) - + @staticmethod def _dump_output_db_nonbb(db_file, dump_file, isMultiUser, pgSettings): """Dumps a database to a text file. @@ -320,66 +339,33 @@ class TskDbDiff(object): """ conn, backup_db_file = db_connect(db_file, isMultiUser, pgSettings) - id_files_table = build_id_files_table(conn.cursor(), isMultiUser) - id_vs_parts_table = build_id_vs_parts_table(conn.cursor(), isMultiUser) - id_vs_info_table = build_id_vs_info_table(conn.cursor(), isMultiUser) - id_fs_info_table = build_id_fs_info_table(conn.cursor(), isMultiUser) - id_objects_table = build_id_objects_table(conn.cursor(), isMultiUser) - id_artifact_types_table = build_id_artifact_types_table(conn.cursor(), isMultiUser) - id_legacy_artifact_types = build_id_legacy_artifact_types_table(conn.cursor(), isMultiUser) - id_reports_table = build_id_reports_table(conn.cursor(), isMultiUser) - id_images_table = build_id_image_names_table(conn.cursor(), isMultiUser) - id_accounts_table = build_id_accounts_table(conn.cursor(), isMultiUser) - id_obj_path_table = build_id_obj_path_table(id_files_table, id_objects_table, id_artifact_types_table, id_reports_table, id_images_table, id_accounts_table) + guid_utils = TskGuidUtils.create(conn) - if isMultiUser: # Use PostgreSQL - os.environ['PGPASSWORD']=pgSettings.password - pgDump = ["pg_dump", "--inserts", "-U", pgSettings.username, "-h", pgSettings.pgHost, "-p", pgSettings.pgPort, "-d", db_file, "-E", "utf-8", "-T", "blackboard_artifacts", "-T", "blackboard_attributes", "-f", "postgreSQLDump.sql"] - subprocess.call(pgDump) - postgreSQL_db = codecs.open("postgreSQLDump.sql", "r", "utf-8") - # Write to the database dump - with codecs.open(dump_file, "wb", "utf_8") as db_log: - dump_line = '' - for line in postgreSQL_db: - line = line.strip('\r\n ') - # Deal with pg_dump result file - if (line.startswith('--') or line.lower().startswith('alter') or "pg_catalog" in line or "idle_in_transaction_session_timeout" in line or not line): # It's comment or alter statement or catalog entry or set idle entry or empty line - continue - elif not line.endswith(';'): # Statement not finished - dump_line += line - continue - else: - dump_line += line - if 'INSERT INTO image_gallery_groups_seen' in dump_line: - dump_line = '' - continue; - dump_line = normalize_db_entry(dump_line, id_obj_path_table, id_vs_parts_table, id_vs_info_table, id_fs_info_table, id_objects_table, id_reports_table, id_images_table, id_legacy_artifact_types, id_accounts_table) - db_log.write('%s\n' % dump_line) - dump_line = '' - postgreSQL_db.close() - else: # use Sqlite - # Delete the blackboard tables - conn.text_factory = lambda x: x.decode("utf-8", "ignore") - conn.execute("DROP TABLE blackboard_artifacts") - conn.execute("DROP TABLE blackboard_attributes") - # Write to the database dump - with codecs.open(dump_file, "wb", "utf_8") as db_log: - for line in conn.iterdump(): - if 'INSERT INTO "image_gallery_groups_seen"' in line: - continue - line = normalize_db_entry(line, id_obj_path_table, id_vs_parts_table, id_vs_info_table, id_fs_info_table, id_objects_table, id_reports_table, id_images_table, id_legacy_artifact_types, id_accounts_table) - db_log.write('%s\n' % line) - # Now sort the file + if isMultiUser: + table_cols = get_pg_table_columns(conn) + schema = get_pg_schema(db_file, pgSettings.username, pgSettings.password, + pgSettings.pgHost, pgSettings.pgPort) + else: + table_cols = get_sqlite_table_columns(conn) + schema = get_sqlite_schema(conn) + + with codecs.open(dump_file, "wb", "utf_8") as output_file: + output_file.write(schema + "\n") + for table, cols in sorted(table_cols.items(), key=lambda pr: pr[0]): + normalizer = TABLE_NORMALIZATIONS[table] if table in TABLE_NORMALIZATIONS else None + write_normalized(guid_utils, output_file, conn, table, cols, normalizer) + + # Now sort the file srtcmdlst = ["sort", dump_file, "-o", dump_file] subprocess.call(srtcmdlst) conn.close() # cleanup the backup - if backup_db_file: - os.remove(backup_db_file) - return id_obj_path_table - + # if backup_db_file: + # os.remove(backup_db_file) + return guid_utils.obj_id_guids + @staticmethod def dump_output_db(db_file, dump_file, bb_dump_file, isMultiUser, pgSettings): """Dumps the given database to text files for later comparison. @@ -391,7 +377,7 @@ class TskDbDiff(object): id_obj_path_table = TskDbDiff._dump_output_db_nonbb(db_file, dump_file, isMultiUser, pgSettings) TskDbDiff._dump_output_db_bb(db_file, bb_dump_file, isMultiUser, pgSettings, id_obj_path_table) - + @staticmethod def _get_tmp_file(base, ext): time = datetime.datetime.now().time().strftime("%H%M%f") return os.path.join(os.environ['TMP'], base + time + ext) @@ -407,452 +393,737 @@ class PGSettings(object): self.username = user self.password = password - def get_pgHost(): + def get_pgHost(self): return self.pgHost - def get_pgPort(): + def get_pgPort(self): return self.pgPort - def get_username(): + def get_username(self): return self.username - def get_password(): + def get_password(self): return self.password -def normalize_db_entry(line, files_table, vs_parts_table, vs_info_table, fs_info_table, objects_table, reports_table, images_table, artifact_table, accounts_table): - """ Make testing more consistent and reasonable by doctoring certain db entries. - - Args: - line: a String, the line to remove the object id from. - files_table: a map from object ids to file paths. +class TskGuidUtils: + """ + This class provides guids for potentially volatile data. """ - # Sqlite statement use double quotes for table name, PostgreSQL doesn't. We check both databases results for normalization. - files_index = line.find('INSERT INTO "tsk_files"') > -1 or line.find('INSERT INTO tsk_files ') > -1 - path_index = line.find('INSERT INTO "tsk_files_path"') > -1 or line.find('INSERT INTO tsk_files_path ') > -1 - object_index = line.find('INSERT INTO "tsk_objects"') > -1 or line.find('INSERT INTO tsk_objects ') > -1 - vs_parts_index = line.find('INSERT INTO "tsk_vs_parts"') > -1 or line.find('INSERT INTO tsk_vs_parts ') > -1 - report_index = line.find('INSERT INTO "reports"') > -1 or line.find('INSERT INTO reports ') > -1 - layout_index = line.find('INSERT INTO "tsk_file_layout"') > -1 or line.find('INSERT INTO tsk_file_layout ') > -1 - data_source_info_index = line.find('INSERT INTO "data_source_info"') > -1 or line.find('INSERT INTO data_source_info ') > -1 - event_description_index = line.find('INSERT INTO "tsk_event_descriptions"') > -1 or line.find('INSERT INTO tsk_event_descriptions ') > -1 - events_index = line.find('INSERT INTO "tsk_events"') > -1 or line.find('INSERT INTO tsk_events ') > -1 - ingest_job_index = line.find('INSERT INTO "ingest_jobs"') > -1 or line.find('INSERT INTO ingest_jobs ') > -1 - examiners_index = line.find('INSERT INTO "tsk_examiners"') > -1 or line.find('INSERT INTO tsk_examiners ') > -1 - ig_groups_index = line.find('INSERT INTO "image_gallery_groups"') > -1 or line.find('INSERT INTO image_gallery_groups ') > -1 - ig_groups_seen_index = line.find('INSERT INTO "image_gallery_groups_seen"') > -1 or line.find('INSERT INTO image_gallery_groups_seen ') > -1 - os_account_index = line.find('INSERT INTO "tsk_os_accounts"') > -1 or line.find('INSERT INTO tsk_os_accounts') > -1 - os_account_attr_index = line.find('INSERT INTO "tsk_os_account_attributes"') > -1 or line.find('INSERT INTO tsk_os_account_attributes') > -1 - - parens = line[line.find('(') + 1 : line.rfind(')')] - no_space_parens = parens.replace(" ", "") - fields_list = list(csv.reader([no_space_parens], quotechar="'"))[0] - #Add back in the quotechar for values that were originally wrapped (csv reader consumes this character) - fields_list_with_quotes = [] - ptr = 0 - for field in fields_list: - if(len(field) == 0): - field = "'" + field + "'" - else: - start = no_space_parens.find(field, ptr) - if((start - 1) >= 0 and no_space_parens[start - 1] == '\''): - if((start + len(field)) < len(no_space_parens) and no_space_parens[start + len(field)] == '\''): - field = "'" + field + "'" - fields_list_with_quotes.append(field) - if(ptr > 0): - #Add one for each comma that is used to separate values in the original string - ptr+=1 - ptr += len(field) + @staticmethod + def _get_guid_dict(db_conn, select_statement, delim=""): + """ + Retrieves a dictionary mapping the first item selected to a concatenation of the remaining values. + Args: + db_conn: The database connection. + select_statement: The select statement. + delim: The delimiter for how row data from index 1 to end shall be concatenated. - fields_list = fields_list_with_quotes + Returns: A dictionary mapping the key (the first item in the select statement) to a concatenation of the remaining values. - # remove object ID - if files_index: - - # Ignore TIFF size and hash if extracted from PDFs. - # See JIRA-6951 for more details. - # index -3 = 3rd from the end, which is extension - # index -5 = 5th from the end, which is the parent path. - if fields_list[-3] == "'tif'" and fields_list[-5].endswith(".pdf/'"): - fields_list[15] = "'SIZE_IGNORED'" - fields_list[23] = "'MD5_IGNORED'" - fields_list[24] = "'SHA256_IGNORED'" - newLine = ('INSERT INTO "tsk_files" VALUES(' + ', '.join(fields_list[1:-1]) + ');') #leave off first (object id) and last (os_account_id) field - # Remove object ID from Unalloc file name - newLine = re.sub('Unalloc_[0-9]+_', 'Unalloc_', newLine) - return newLine - # remove object ID - elif vs_parts_index: - newLine = ('INSERT INTO "tsk_vs_parts" VALUES(' + ', '.join(fields_list[1:]) + ');') - return newLine - # remove group ID - elif ig_groups_index: - newLine = ('INSERT INTO "image_gallery_groups" VALUES(' + ', '.join(fields_list[1:]) + ');') - return newLine - #remove id field - elif ig_groups_seen_index: - # Only removing the id and group_id fields for now. May need to care about examiner_id and seen fields in future. - newLine = ('INSERT INTO "image_gallery_groups_seen" VALUES(' + ', '.join(fields_list[2:]) + ');') - return newLine - # remove object ID - elif path_index: - obj_id = int(fields_list[0]) - objValue = files_table[obj_id] - # remove the obj_id from ModuleOutput/EmbeddedFileExtractor directory - idx_pre = fields_list[1].find('EmbeddedFileExtractor') + len('EmbeddedFileExtractor') - if idx_pre > -1: - idx_pos = fields_list[1].find('\\', idx_pre + 2) - dir_to_replace = fields_list[1][idx_pre + 1 : idx_pos] # +1 to skip the file seperator - dir_to_replace = dir_to_replace[0:dir_to_replace.rfind('_')] - pathValue = fields_list[1][:idx_pre+1] + dir_to_replace + fields_list[1][idx_pos:] - else: - pathValue = fields_list[1] - # remove localhost from postgres par_obj_name - multiOutput_idx = pathValue.find('ModuleOutput') - if multiOutput_idx > -1: - pathValue = "'" + pathValue[pathValue.find('ModuleOutput'):] #postgres par_obj_name include losthost + """ + cursor = db_conn.cursor() + cursor.execute(select_statement) + ret_dict = {} + for row in cursor: + # concatenate value rows with delimiter filtering out any null values. + ret_dict[row[0]] = delim.join([str(col) for col in filter(lambda col: col is not None, row[1:])]) - newLine = ('INSERT INTO "tsk_files_path" VALUES(' + objValue + ', ' + pathValue + ', ' + ', '.join(fields_list[2:]) + ');') - return newLine - # remove object ID - elif layout_index: - obj_id = fields_list[0] - path= files_table[int(obj_id)] - newLine = ('INSERT INTO "tsk_file_layout" VALUES(' + path + ', ' + ', '.join(fields_list[1:]) + ');') - # Remove object ID from Unalloc file name - newLine = re.sub('Unalloc_[0-9]+_', 'Unalloc_', newLine) - return newLine - # remove object ID - elif object_index: - obj_id = fields_list[0] - parent_id = fields_list[1] - newLine = 'INSERT INTO "tsk_objects" VALUES(' - path = None - parent_path = None + return ret_dict - #if obj_id or parent_id is invalid literal, we simple return the values as it is - try: - obj_id = int(obj_id) - if parent_id != 'NULL': - parent_id = int(parent_id) - except Exception as e: - print(obj_id, parent_id) - return line + @staticmethod + def create(db_conn): + """ + Creates an instance of this class by querying for relevant guid data. + Args: + db_conn: The database connection. - if obj_id in files_table.keys(): - path = files_table[obj_id] - elif obj_id in vs_parts_table.keys(): - path = vs_parts_table[obj_id] - elif obj_id in vs_info_table.keys(): - path = vs_info_table[obj_id] - elif obj_id in fs_info_table.keys(): - path = fs_info_table[obj_id] - elif obj_id in reports_table.keys(): - path = reports_table[obj_id] + Returns: The instance of this class. + + """ + guid_files = TskGuidUtils._get_guid_dict(db_conn, "SELECT obj_id, parent_path, name FROM tsk_files") + guid_vs_parts = TskGuidUtils._get_guid_dict(db_conn, "SELECT obj_id, addr, start FROM tsk_vs_parts", "_") + guid_vs_info = TskGuidUtils._get_guid_dict(db_conn, "SELECT obj_id, vs_type, img_offset FROM tsk_vs_info", "_") + guid_fs_info = TskGuidUtils._get_guid_dict(db_conn, "SELECT obj_id, img_offset, fs_type FROM tsk_fs_info", "_") + guid_image_names = TskGuidUtils._get_guid_dict(db_conn, "SELECT obj_id, name FROM tsk_image_names " + "WHERE sequence=0") + guid_os_accounts = TskGuidUtils._get_guid_dict(db_conn, "SELECT os_account_obj_id, addr FROM tsk_os_accounts") + guid_reports = TskGuidUtils._get_guid_dict(db_conn, "SELECT obj_id, path FROM reports") + + objid_artifacts = TskGuidUtils._get_guid_dict(db_conn, + "SELECT blackboard_artifacts.artifact_obj_id, " + "blackboard_artifact_types.type_name " + "FROM blackboard_artifacts " + "INNER JOIN blackboard_artifact_types " + "ON blackboard_artifact_types.artifact_type_id = " + "blackboard_artifacts.artifact_type_id") + + artifact_objid_artifacts = TskGuidUtils._get_guid_dict(db_conn, + "SELECT blackboard_artifacts.artifact_id, " + "blackboard_artifact_types.type_name " + "FROM blackboard_artifacts " + "INNER JOIN blackboard_artifact_types " + "ON blackboard_artifact_types.artifact_type_id = " + "blackboard_artifacts.artifact_type_id") + + cursor = db_conn.cursor() + cursor.execute("SELECT obj_id, par_obj_id FROM tsk_objects") + par_obj_objects = dict([(row[0], row[1]) for row in cursor]) + + guid_artifacts = {} + for k, v in objid_artifacts.items(): + if k in par_obj_objects: + par_obj_id = par_obj_objects[k] + + # check for artifact parent in files, images, reports + path = '' + for artifact_parent_dict in [guid_files, guid_image_names, guid_reports]: + if par_obj_id in artifact_parent_dict: + path = artifact_parent_dict[par_obj_id] + break + + guid_artifacts[k] = "/".join([path, v]) + + return TskGuidUtils( + # aggregate all the object id dictionaries together + obj_id_guids={**guid_files, **guid_reports, **guid_os_accounts, **guid_vs_parts, **guid_vs_info, + **guid_fs_info, **guid_fs_info, **guid_image_names, **guid_artifacts}, + artifact_types=artifact_objid_artifacts) + + artifact_types: Dict[int, str] + obj_id_guids: Dict[int, any] + + def __init__(self, obj_id_guids: Dict[int, any], artifact_types: Dict[int, str]): + """ + Main constructor. + Args: + obj_id_guids: A dictionary mapping object ids to their guids. + artifact_types: A dictionary mapping artifact ids to their types. + """ + self.artifact_types = artifact_types + self.obj_id_guids = obj_id_guids + + def get_guid_for_objid(self, obj_id, omitted_value: Union[str, None] = 'Object ID Omitted'): + """ + Returns the guid for the specified object id or returns omitted value if the object id is not found. + Args: + obj_id: The object id. + omitted_value: The value if no object id mapping is found. + + Returns: The relevant guid or the omitted_value. + + """ + return self.obj_id_guids[obj_id] if obj_id in self.obj_id_guids else omitted_value + + def get_guid_for_file_objid(self, obj_id, omitted_value: Union[str, None] = 'Object ID Omitted'): + # this method is just an alias for get_guid_for_objid + return self.get_guid_for_objid(obj_id, omitted_value) + + def get_guid_for_accountid(self, account_id, omitted_value: Union[str, None] = 'Account ID Omitted'): + # this method is just an alias for get_guid_for_objid + return self.get_guid_for_objid(account_id, omitted_value) + + def get_guid_for_artifactid(self, artifact_id, omitted_value: Union[str, None] = 'Artifact ID Omitted'): + """ + Returns the guid for the specified artifact id or returns omitted value if the artifact id is not found. + Args: + artifact_id: The artifact id. + omitted_value: The value if no object id mapping is found. + + Returns: The relevant guid or the omitted_value. + """ + return self.artifact_types[artifact_id] if artifact_id in self.artifact_types else omitted_value + + +class NormalizeRow: + """ + Given a dictionary representing a row (i.e. column name mapped to value), returns a normalized representation of + that row such that the values should be less volatile from run to run. + """ + row_masker: Callable[[TskGuidUtils, Dict[str, any]], Dict[str, any]] + + def __init__(self, row_masker: Callable[[TskGuidUtils, Dict[str, any]], Union[Dict[str, any], None]]): + """ + Main constructor. + Args: + row_masker: The function to be called to mask the specified row. + """ + self.row_masker = row_masker + + def normalize(self, guid_util: TskGuidUtils, row: Dict[str, any]) -> Union[Dict[str, any], None]: + """ + Normalizes a row such that the values should be less volatile from run to run. + Args: + guid_util: The TskGuidUtils instance providing guids for volatile ids. + row: The row values mapping column name to value. + + Returns: The normalized row or None if the row should be ignored. + + """ + return self.row_masker(guid_util, row) + + +class NormalizeColumns(NormalizeRow): + """ + Utility for normalizing specific column values of a row so they are not volatile values that will change from run + to run. + """ + + @classmethod + def _normalize_col_vals(cls, + col_mask: Dict[str, Union[any, Callable[[TskGuidUtils, any], any]]], + guid_util: TskGuidUtils, + row: Dict[str, any]): + """ + Normalizes column values for each column rule provided. + Args: + col_mask: A dictionary mapping columns to either the replacement value or a function to retrieve the + replacement value given the TskGuidUtils instance and original value as arguments. + guid_util: The TskGuidUtil used to provide guids for volatile values. + row: The dictionary representing the row mapping column names to values. + + Returns: The new row representation. + + """ + row_copy = row.copy() + for key, val in col_mask.items(): + # only replace values if present in row + if key in row_copy: + # if a column replacing function, call with original value + if isinstance(val, Callable): + row_copy[key] = val(guid_util, row[key]) + # otherwise, just replace with mask value + else: + row_copy[key] = val + + return row_copy + + def __init__(self, col_mask: Dict[str, Union[any, Callable[[any], any]]]): + super().__init__(lambda guid_util, row: NormalizeColumns._normalize_col_vals(col_mask, guid_util, row)) + + +def get_path_segs(path: Union[str, None]) -> Union[List[str], None]: + """ + Breaks a path string into its folders and filenames. + Args: + path: The path string or None. + + Returns: The path segments or None. + + """ + if path: + # split on backslash or forward slash + return list(filter(lambda x: len(x.strip()) > 0, [s for s in re.split(r"[\\/]", path)])) + else: + return None + + +def index_of(lst, search_item) -> int: + """ + Returns the index of the item in the list or -1. + Args: + lst: The list. + search_item: The item to search for. + + Returns: The index in the list of the item or -1. + + """ + for idx, item in enumerate(lst): + if item == search_item: + return idx + + return -1 + + +def get_sql_insert_value(val) -> str: + """ + Returns the value that would appear in a sql insert statement (i.e. string becomes 'string', None becomes NULL) + Args: + val: The original value. + + Returns: The sql insert equivalent value. + + """ + if val is None: + return "NULL" + + if isinstance(val, str): + escaped_val = val.replace('\n', '\\n').replace("'", "''") + return f"'{escaped_val}'" + + return str(val) + + +def get_sqlite_table_columns(conn) -> Dict[str, List[str]]: + """ + Retrieves a dictionary mapping table names to a list of all the columns for that table + where the columns are in ordinal value. + Args: + conn: The database connection. + + Returns: A dictionary of the form { table_name: [col_name1, col_name2...col_nameN] } + + """ + cur = conn.cursor() + cur.execute("SELECT name FROM sqlite_master tables WHERE tables.type='table'") + tables = list([table[0] for table in cur.fetchall()]) + cur.close() + + to_ret = {} + for table in tables: + cur = conn.cursor() + cur.execute('SELECT name FROM pragma_table_info(?) ORDER BY cid', [table]) + to_ret[table] = list([col[0] for col in cur.fetchall()]) + + return to_ret + + +def get_pg_table_columns(conn) -> Dict[str, List[str]]: + """ + Returns a dictionary mapping table names to the list of their columns in ordinal order. + Args: + conn: The pg database connection. + + Returns: The dictionary of tables mapped to a list of their ordinal-orderd column names. + """ + cursor = conn.cursor() + cursor.execute(""" + SELECT cols.table_name, cols.column_name + FROM information_schema.columns cols + WHERE cols.column_name IS NOT NULL + AND cols.table_name IS NOT NULL + AND cols.table_name IN ( + SELECT tables.tablename FROM pg_catalog.pg_tables tables + WHERE LOWER(schemaname) = 'public' + ) + ORDER by cols.table_name, cols.ordinal_position; + """) + mapping = {} + for row in cursor: + mapping.setdefault(row[0], []).append(row[1]) + + cursor.close() + return mapping + + +def sanitize_schema(original: str) -> str: + """ + Sanitizes sql script representing table/index creations. + Args: + original: The original sql schema creation script. + + Returns: The sanitized schema. + """ + sanitized_lines = [] + dump_line = '' + for line in original.splitlines(): + line = line.strip('\r\n ') + lower_line = line.lower() + # It's comment or alter statement or catalog entry or set idle entry or empty line + if (not line or + line.startswith('--') or + lower_line.startswith('set') or + " set default nextval" in lower_line or + " owner to " in lower_line or + " owned by " in lower_line or + "pg_catalog" in lower_line or + "idle_in_transaction_session_timeout" in lower_line): + continue + + # if there is no white space or parenthesis delimiter, add a space + if re.match(r'^.+?[^\s()]$', dump_line) and re.match(r'^[^\s()]', line): + dump_line += ' ' + + # append the line to the outputted line + dump_line += line + + # if line ends with ';' then this will be one statement in diff + if line.endswith(';'): + sanitized_lines.append(dump_line) + dump_line = '' + + if len(dump_line.strip()) > 0: + sanitized_lines.append(dump_line) + + return "\n".join(sanitized_lines) + + +def get_pg_schema(dbname: str, pg_username: str, pg_pword: str, pg_host: str, pg_port: Union[str, int]): + """ + Gets the schema to be added to the dump text from the postgres database. + Args: + dbname: The name of the database. + pg_username: The postgres user name. + pg_pword: The postgres password. + pg_host: The postgres host. + pg_port: The postgres port. + + Returns: The normalized schema. + + """ + os.environ['PGPASSWORD'] = pg_pword + pg_dump = ["pg_dump", "-U", pg_username, "-h", pg_host, "-p", str(pg_port), + "--schema-only", "-d", dbname, "-t", "public.*"] + output = subprocess.check_output(pg_dump) + output_str = output.decode('UTF-8') + return sanitize_schema(output_str) + + +def get_sqlite_schema(db_conn): + """ + Gets the schema to be added to the dump text from the sqlite database. + Args: + db_conn: The database connection. + + Returns: The normalized schema. + + """ + cursor = db_conn.cursor() + query = "SELECT sql FROM sqlite_master " \ + "WHERE type IN ('table', 'index') AND sql IS NOT NULL " \ + "ORDER BY type DESC, tbl_name ASC" + + cursor.execute(query) + schema = '\n'.join([str(row[0]) + ';' for row in cursor]) + return sanitize_schema(schema) + + +def _mask_event_desc(desc: str) -> str: + """ + Masks dynamic event descriptions of the form ":" so the artifact id is no longer + present. + Args: + desc: The original description. + + Returns: The normalized description. + + """ + + # Takes a string like "Shell Bags: 30840" and replaces with "ShellBags:" + match = re.search(r"^\s*(.+?)\s*:\s*\d+\s*$", desc.strip()) + if match: + return f"{match.group(1)}:" + + return desc + + +def normalize_tsk_event_descriptions(guid_util: TskGuidUtils, row: Dict[str, any]) -> Dict[str, any]: + """ + Normalizes event description rows masking possibly changing column values. + Args: + guid_util: Provides guids for ids that may change from run to run. + row: A dictionary mapping column names to values. + + Returns: The normalized event description row. + """ + row_copy = row.copy() + # replace object ids with information that is deterministic + row_copy['event_description_id'] = MASKED_ID + row_copy['content_obj_id'] = guid_util.get_guid_for_file_objid(row['content_obj_id']) + row_copy['artifact_id'] = guid_util.get_guid_for_artifactid(row['artifact_id']) if row['artifact_id'] else None + + if row['full_description'] == row['med_description'] == row['short_description']: + row_copy['full_description'] = _mask_event_desc(row['full_description']) + row_copy['med_description'] = _mask_event_desc(row['med_description']) + row_copy['short_description'] = _mask_event_desc(row['short_description']) + + return row_copy + + +def normalize_ingest_jobs(guid_util: TskGuidUtils, row: Dict[str, any]) -> Dict[str, any]: + """ + Normalizes ingest jobs table rows. + Args: + guid_util: Provides guids for ids that may change from run to run. + row: A dictionary mapping column names to values. + + Returns: The normalized ingest job row. + + """ + row_copy = row.copy() + row_copy['host_name'] = "{host_name}" + + start_time = row['start_date_time'] + end_time = row['end_date_time'] + if start_time <= end_time: + row_copy['start_date_time'] = 0 + row_copy['end_date_time'] = 0 + + return row_copy + + +def normalize_unalloc_files(path_str: Union[str, None]) -> Union[str, None]: + """ + Normalizes a path string removing timestamps from unalloc files. + Args: + path_str: The original path string. + + Returns: The path string where timestamps are removed from unalloc strings. + + """ + + # takes a file name like "Unalloc_30580_7466496_2980941312" and removes the object id to become + # "Unalloc_7466496_2980941312" + return None if path_str is None else re.sub('Unalloc_[0-9]+_', 'Unalloc_', path_str) + + +def normalize_regripper_files(path_str: Union[str, None]) -> Union[str, None]: + """ + Normalizes a path string removing timestamps from regripper files. + Args: + path_str: The original path string. + + Returns: The path string where timestamps are removed from regripper paths. + + """ + # takes a file name like "regripper-12345-full" and removes the id to become "regripper-full" + return None if path_str is None else re.sub(r'regripper-[0-9]+-full', 'regripper-full', path_str) + + +def normalize_tsk_files(guid_util: TskGuidUtils, row: Dict[str, any]) -> Dict[str, any]: + """ + Normalizes files table rows. + Args: + guid_util: Provides guids for ids that may change from run to run. + row: A dictionary mapping column names to values. + + Returns: The normalized files table row. + + """ + # Ignore TIFF size and hash if extracted from PDFs. + # See JIRA-6951 for more details. + row_copy = row.copy() + if row['extension'] is not None and row['extension'].strip().lower() == 'tif' and \ + row['parent_path'] is not None and row['parent_path'].strip().lower().endswith('.pdf/'): + row_copy['size'] = "SIZE_IGNORED" + row_copy['md5'] = "MD5_IGNORED" + row_copy['sha256'] = "SHA256_IGNORED" + + row_copy['obj_id'] = MASKED_OBJ_ID + row_copy['os_account_obj_id'] = 'MASKED_OS_ACCOUNT_OBJ_ID' + row_copy['parent_path'] = normalize_unalloc_files(row['parent_path']) + row_copy['name'] = normalize_unalloc_files(row['name']) + return row_copy + + +def normalize_tsk_files_path(guid_util: TskGuidUtils, row: Dict[str, any]) -> Dict[str, any]: + """ + Normalizes file path table rows. + Args: + guid_util: Provides guids for ids that may change from run to run. + row: A dictionary mapping column names to values. + + Returns: The normalized file path table row. + """ + row_copy = row.copy() + path = row['path'] + if path is not None: + path_parts = get_path_segs(path) + module_output_idx = index_of(path_parts, 'ModuleOutput') + if module_output_idx >= 0: + # remove everything up to and including ModuleOutput if ModuleOutput present + path_parts = path_parts[module_output_idx:] + if len(path_parts) > 1 and path_parts[1] == 'Embedded File Extractor': + # Takes a folder like ModuleOutput\Embedded File Extractor/f_000168_4435\f_000168 + # and fixes the folder after 'Embedded File Extractor', 'f_000168_4435' to remove the last number + # to become 'f_000168' + match = re.match(r'^(.+?)_\d*$', path_parts[2]) + if match: + path_parts[2] = match.group(1) + + row_copy['path'] = os.path.join(*path_parts) if len(path_parts) > 0 else '/' + + row_copy['obj_id'] = guid_util.get_guid_for_file_objid(row['obj_id']) + return row_copy + + +def normalize_tsk_objects_path(guid_util: TskGuidUtils, objid: int, + no_path_placeholder: Union[str, None]) -> Union[str, None]: + """ + Returns a normalized path to be used in a tsk_objects table row. + Args: + guid_util: The utility for fetching guids. + objid: The object id of the item. + no_path_placeholder: text to return if no path value found. + + Returns: The 'no_path_placeholder' text if no path. Otherwise, the normalized path. + + """ + path = guid_util.get_guid_for_objid(objid, omitted_value=None) + + if path is None: + return no_path_placeholder + else: # remove host name (for multi-user) and dates/times from path for reports - if path is not None: - if 'ModuleOutput' in path: - # skip past the host name (if any) - path = path[path.find('ModuleOutput'):] - if 'BulkExtractor' in path or 'Smirk' in path: - # chop off the last folder (which contains a date/time) - path = path[:path.rfind('\\')] - if 'Reports\\AutopsyTestCase HTML Report' in path: - path = 'Reports\\AutopsyTestCase HTML Report' + path_parts = get_path_segs(path) + module_output_idx = index_of(path_parts, 'ModuleOutput') + if module_output_idx >= 0: + # remove everything up to and including ModuleOutput if ModuleOutput present + path_parts = path_parts[module_output_idx:] - if parent_id in files_table.keys(): - parent_path = files_table[parent_id] - elif parent_id in vs_parts_table.keys(): - parent_path = vs_parts_table[parent_id] - elif parent_id in vs_info_table.keys(): - parent_path = vs_info_table[parent_id] - elif parent_id in fs_info_table.keys(): - parent_path = fs_info_table[parent_id] - elif parent_id in images_table.keys(): - parent_path = images_table[parent_id] - elif parent_id in accounts_table.keys(): - parent_path = accounts_table[parent_id] - elif parent_id == 'NULL': - parent_path = "NULL" - - # Remove host name (for multi-user) from parent_path - if parent_path is not None: - if 'ModuleOutput' in parent_path: - # skip past the host name (if any) - parent_path = parent_path[parent_path.find('ModuleOutput'):] + if "BulkExtractor" in path_parts or "Smirk" in path_parts: + # chop off the last folder (which contains a date/time) + path_parts = path_parts[:-1] - if path and parent_path: - # Remove object ID from Unalloc file names and regripper output - path = re.sub('Unalloc_[0-9]+_', 'Unalloc_', path) - path = re.sub('regripper\-[0-9]+\-full', 'regripper-full', path) - parent_path = re.sub('Unalloc_[0-9]+_', 'Unalloc_', parent_path) - parent_path = re.sub('regripper\-[0-9]+\-full', 'regripper-full', parent_path) - return newLine + path + ', ' + parent_path + ', ' + ', '.join(fields_list[2:]) + ');' - else: - return newLine + '"OBJECT IDS OMITTED", ' + ', '.join(fields_list[2:]) + ');' #omit parent object id and object id when we cant annonymize them - # remove time-based information, ie Test_6/11/14 -> Test - elif report_index: - fields_list[1] = "AutopsyTestCase" - fields_list[2] = "0" - newLine = ('INSERT INTO "reports" VALUES(' + ','.join(fields_list[1:]) + ');') # remove report_id - return newLine - elif data_source_info_index: - fields_list[1] = "{device id}" - fields_list[4] = "{dateTime}" - newLine = ('INSERT INTO "data_source_info" VALUES(' + ','.join(fields_list) + ');') - return newLine - elif ingest_job_index: - fields_list[2] = "{host_name}" - start_time = int(fields_list[3]) - end_time = int(fields_list[4]) - if (start_time <= end_time): - fields_list[3] = "0" - fields_list[4] = "0" - newLine = ('INSERT INTO "ingest_jobs" VALUES(' + ','.join(fields_list) + ');') - return newLine - elif examiners_index: - fields_list[1] = "{examiner_name}" - newLine = ('INSERT INTO "tsk_examiners" VALUES(' + ','.join(fields_list) + ');') - return newLine - # remove all timing dependent columns from events table - elif events_index: - newLine = ('INSERT INTO "tsk_events" VALUES(' + ','.join(fields_list[1:2]) + ');') - return newLine - # remove object ids from event description table - elif event_description_index: - # replace object ids with information that is deterministic - file_obj_id = int(fields_list[5]) - object_id = int(fields_list[4]) - legacy_artifact_id = 'NULL' - if (fields_list[6] != 'NULL'): - legacy_artifact_id = int(fields_list[6]) - if file_obj_id != 'NULL' and file_obj_id in files_table.keys(): - fields_list[5] = files_table[file_obj_id] - if object_id != 'NULL' and object_id in files_table.keys(): - fields_list[4] = files_table[object_id] - if legacy_artifact_id != 'NULL' and legacy_artifact_id in artifact_table.keys(): - fields_list[6] = artifact_table[legacy_artifact_id] - if fields_list[1] == fields_list[2] and fields_list[1] == fields_list[3]: - fields_list[1] = cleanupEventDescription(fields_list[1]) - fields_list[2] = cleanupEventDescription(fields_list[2]) - fields_list[3] = cleanupEventDescription(fields_list[3]) - newLine = ('INSERT INTO "tsk_event_descriptions" VALUES(' + ','.join(fields_list[1:]) + ');') # remove report_id - return newLine - elif os_account_index: - newLine = ('INSERT INTO "tsk_os_accounts" VALUES(' + ','.join(fields_list[1:]) + ');') # remove id since value that would be substituted is in diff line already - return newLine - elif os_account_attr_index: - #substitue the account object id for a non changing value - os_account_id = int(fields_list[1]) - fields_list[1] = accounts_table[os_account_id] - #substitue the source object id for a non changing value - source_obj_id = int(fields_list[3]) - if source_obj_id in files_table.keys(): - fields_list[3] = files_table[source_obj_id] - elif source_obj_id in vs_parts_table.keys(): - fields_list[3] = vs_parts_table[source_obj_id] - elif source_obj_id in vs_info_table.keys(): - fields_list[3] = vs_info_table[source_obj_id] - elif source_obj_id in fs_info_table.keys(): - fields_list[3] = fs_info_table[source_obj_id] - elif source_obj_id in images_table.keys(): - fields_list[3] = images_table[source_obj_id] - elif source_obj_id in accounts_table.keys(): - fields_list[3] = accounts_table[source_obj_id] - elif source_obj_id == 'NULL': - fields_list[3] = "NULL" - newLine = ('INSERT INTO "tsk_os_account_attributes" VALUES(' + ','.join(fields_list[1:]) + ');') # remove id - return newLine - else: - return line - -def cleanupEventDescription(description): - test = re.search("^'\D+:\d+'$", description) - if test is not None: - return re.sub(":\d+", ":", description) - else: - return description + if path_parts and len(path_parts) >= 2: + for idx in range(0, len(path_parts) - 1): + if path_parts[idx].lower() == "reports" and \ + path_parts[idx + 1].lower().startswith("autopsytestcase html report"): + path_parts = ["Reports", "AutopsyTestCase HTML Report"] + break -def getAssociatedArtifactType(cur, artifact_id, isMultiUser): - if isMultiUser: - cur.execute("SELECT tsk_files.parent_path, blackboard_artifact_types.display_name FROM blackboard_artifact_types INNER JOIN blackboard_artifacts ON blackboard_artifact_types.artifact_type_id = blackboard_artifacts.artifact_type_id INNER JOIN tsk_files ON tsk_files.obj_id = blackboard_artifacts.obj_id WHERE artifact_id=%s",[artifact_id]) - else: - cur.execute("SELECT tsk_files.parent_path, blackboard_artifact_types.display_name FROM blackboard_artifact_types INNER JOIN blackboard_artifacts ON blackboard_artifact_types.artifact_type_id = blackboard_artifacts.artifact_type_id INNER JOIN tsk_files ON tsk_files.obj_id = blackboard_artifacts.obj_id WHERE artifact_id=?",[artifact_id]) + path = os.path.join(*path_parts) if len(path_parts) > 0 else '/' - info = cur.fetchone() - - return "File path: " + info[0] + " Artifact Type: " + info[1] + return normalize_regripper_files(normalize_unalloc_files(path)) -def build_id_files_table(db_cursor, isPostgreSQL): - """Build the map of object ids to file paths. - Args: - db_cursor: the database cursor +def normalize_tsk_objects(guid_util: TskGuidUtils, row: Dict[str, any]) -> Dict[str, any]: """ - # for each row in the db, take the object id, parent path, and name, then create a tuple in the dictionary - # with the object id as the key and the full file path (parent + name) as the value - mapping = dict([(row[0], str(row[1]) + str(row[2])) for row in sql_select_execute(db_cursor, isPostgreSQL, "SELECT obj_id, parent_path, name FROM tsk_files")]) - return mapping - -def build_id_vs_parts_table(db_cursor, isPostgreSQL): - """Build the map of object ids to vs_parts. - + Normalizes object table rows. Args: - db_cursor: the database cursor + guid_util: Provides guids for ids that may change from run to run. + row: A dictionary mapping column names to values. + + Returns: The normalized object table row. """ - # for each row in the db, take the object id, addr, and start, then create a tuple in the dictionary - # with the object id as the key and (addr + start) as the value - mapping = dict([(row[0], str(row[1]) + '_' + str(row[2])) for row in sql_select_execute(db_cursor, isPostgreSQL, "SELECT obj_id, addr, start FROM tsk_vs_parts")]) - return mapping + row_copy = row.copy() + row_copy['obj_id'] = None if row['obj_id'] is None else \ + normalize_tsk_objects_path(guid_util, row['obj_id'], MASKED_OBJ_ID) -def build_id_vs_info_table(db_cursor, isPostgreSQL): - """Build the map of object ids to vs_info. + row_copy['par_obj_id'] = None if row['par_obj_id'] is None else \ + normalize_tsk_objects_path(guid_util, row['par_obj_id'], 'MASKED_PARENT_OBJ_ID') + return row_copy + + +MASKED_OBJ_ID = "MASKED_OBJ_ID" +MASKED_ID = "MASKED_ID" + +IGNORE_TABLE = "IGNORE_TABLE" + +TableNormalization = Union[IGNORE_TABLE, NormalizeRow] + +""" +This dictionary maps tables where data should be specially handled to how they should be handled. +""" +TABLE_NORMALIZATIONS: Dict[str, TableNormalization] = { + "image_gallery_groups_seen": IGNORE_TABLE, + "blackboard_artifacts": IGNORE_TABLE, + "blackboard_attributes": IGNORE_TABLE, + "tsk_files": NormalizeRow(normalize_tsk_files), + "tsk_vs_parts": NormalizeColumns({ + "obj_id": MASKED_OBJ_ID + }), + "image_gallery_groups": NormalizeColumns({ + "group_id": MASKED_ID + }), + "tsk_files_path": NormalizeRow(normalize_tsk_files_path), + "tsk_file_layout": NormalizeColumns({ + "obj_id": lambda guid_util, col: normalize_unalloc_files(guid_util.get_guid_for_file_objid(col)) + }), + "tsk_objects": NormalizeRow(normalize_tsk_objects), + "reports": NormalizeColumns({ + "obj_id": MASKED_OBJ_ID, + "path": "AutopsyTestCase", + "crtime": 0 + }), + "data_source_info": NormalizeColumns({ + "device_id": "{device id}", + "added_date_time": "{dateTime}" + }), + "ingest_jobs": NormalizeRow(normalize_ingest_jobs), + "tsk_examiners": NormalizeColumns({ + "login_name": "{examiner_name}" + }), + "tsk_events": NormalizeColumns({ + "event_id": "MASKED_EVENT_ID", + "event_description_id": None, + "time": None, + }), + "tsk_event_descriptions": NormalizeRow(normalize_tsk_event_descriptions), + "tsk_os_accounts": NormalizeColumns({ + "os_account_obj_id": MASKED_OBJ_ID + }), + "tsk_os_account_attributes": NormalizeColumns({ + "id": MASKED_ID, + "os_account_obj_id": lambda guid_util, col: guid_util.get_guid_for_accountid(col), + "source_obj_id": lambda guid_util, col: guid_util.get_guid_for_objid(col) + }), + "tsk_os_account_instances": NormalizeColumns({ + "id": MASKED_ID, + "os_account_obj_id": lambda guid_util, col: guid_util.get_guid_for_accountid(col) + }), + "tsk_data_artifacts": NormalizeColumns({ + "artifact_obj_id": + lambda guid_util, col: guid_util.get_guid_for_file_objid(col, omitted_value="Artifact Object ID Omitted"), + "os_account_obj_id": + lambda guid_util, col: guid_util.get_guid_for_file_objid(col, omitted_value="Account Object ID Omitted"), + }) +} + + +def write_normalized(guid_utils: TskGuidUtils, output_file, db_conn, table: str, column_names: List[str], + normalizer: Union[TableNormalization, None] = None): + """ + Outputs rows of a file as their normalized values (where values should not change from run to run). Args: - db_cursor: the database cursor + guid_utils: Provides guids to replace values that would potentially change from run to run. + output_file: The file where the normalized dump will be written. + db_conn: The database connection. + table: The name of the table. + column_names: The name of the columns in the table in ordinal order. + normalizer: The normalizer (if any) to use so that data is properly normalized. """ - # for each row in the db, take the object id, vs_type, and img_offset, then create a tuple in the dictionary - # with the object id as the key and (vs_type + img_offset) as the value - mapping = dict([(row[0], str(row[1]) + '_' + str(row[2])) for row in sql_select_execute(db_cursor, isPostgreSQL, "SELECT obj_id, vs_type, img_offset FROM tsk_vs_info")]) - return mapping + if normalizer == IGNORE_TABLE: + return - -def build_id_fs_info_table(db_cursor, isPostgreSQL): - """Build the map of object ids to fs_info. + cursor = db_conn.cursor() - Args: - db_cursor: the database cursor - """ - # for each row in the db, take the object id, img_offset, and fs_type, then create a tuple in the dictionary - # with the object id as the key and (img_offset + fs_type) as the value - mapping = dict([(row[0], str(row[1]) + '_' + str(row[2])) for row in sql_select_execute(db_cursor, isPostgreSQL, "SELECT obj_id, img_offset, fs_type FROM tsk_fs_info")]) - return mapping + joined_columns = ",".join([col for col in column_names]) + cursor.execute(f"SELECT {joined_columns} FROM {table}") + for row in cursor: + if len(row) != len(column_names): + print( + f"ERROR: in {table}, number of columns retrieved: {len(row)} but columns are" + f" {len(column_names)} with {str(column_names)}") + continue -def build_id_objects_table(db_cursor, isPostgreSQL): - """Build the map of object ids to par_id. + row_dict = {} + for col_idx in range(0, len(column_names)): + row_dict[column_names[col_idx]] = row[col_idx] - Args: - db_cursor: the database cursor - """ - # for each row in the db, take the object id, par_obj_id, then create a tuple in the dictionary - # with the object id as the key and par_obj_id, type as the value - mapping = dict([(row[0], [row[1], row[2]]) for row in sql_select_execute(db_cursor, isPostgreSQL, "SELECT * FROM tsk_objects")]) - return mapping + if normalizer and isinstance(normalizer, NormalizeRow): + row_masker: NormalizeRow = normalizer + row_dict = row_masker.normalize(guid_utils, row_dict) -def build_id_image_names_table(db_cursor, isPostgreSQL): - """Build the map of object ids to name. + if row_dict is not None: + # show row as json-like value + entries = [] + for column in column_names: + value = get_sql_insert_value(row_dict[column] if column in row_dict and row_dict[column] else None) + if value is not None: + entries.append((column, value)) + insert_values = ", ".join([f"{pr[0]}: {pr[1]}" for pr in entries]) + insert_statement = f"{table}: {{{insert_values}}}\n" + output_file.write(insert_statement) - Args: - db_cursor: the database cursor - """ - # for each row in the db, take the object id and name then create a tuple in the dictionary - # with the object id as the key and name, type as the value - mapping = dict([(row[0], row[1]) for row in sql_select_execute(db_cursor, isPostgreSQL, "SELECT obj_id, name FROM tsk_image_names WHERE sequence=0")]) - #data_sources which are logical file sets will be found in the files table - return mapping -def build_id_artifact_types_table(db_cursor, isPostgreSQL): - """Build the map of object ids to artifact ids. - - Args: - db_cursor: the database cursor - """ - # for each row in the db, take the object id, par_obj_id, then create a tuple in the dictionary - # with the object id as the key and artifact type as the value - mapping = dict([(row[0], row[1]) for row in sql_select_execute(db_cursor, isPostgreSQL, "SELECT blackboard_artifacts.artifact_obj_id, blackboard_artifact_types.type_name FROM blackboard_artifacts INNER JOIN blackboard_artifact_types ON blackboard_artifact_types.artifact_type_id = blackboard_artifacts.artifact_type_id ")]) - return mapping - -def build_id_legacy_artifact_types_table(db_cursor, isPostgreSQL): - """Build the map of legacy artifact ids to artifact type. - - Args: - db_cursor: the database cursor - """ - # for each row in the db, take the legacy artifact id then create a tuple in the dictionary - # with the artifact id as the key and artifact type as the value - mapping = dict([(row[0], row[1]) for row in sql_select_execute(db_cursor, isPostgreSQL, "SELECT blackboard_artifacts.artifact_id, blackboard_artifact_types.type_name FROM blackboard_artifacts INNER JOIN blackboard_artifact_types ON blackboard_artifact_types.artifact_type_id = blackboard_artifacts.artifact_type_id ")]) - return mapping - -def build_id_reports_table(db_cursor, isPostgreSQL): - """Build the map of report object ids to report path. - - Args: - db_cursor: the database cursor - """ - # for each row in the reports table in the db, create an obj_id -> path map - mapping = dict([(row[0], row[1]) for row in sql_select_execute(db_cursor, isPostgreSQL, "SELECT obj_id, path FROM reports")]) - return mapping - -def build_id_accounts_table(db_cursor, isPostgreSQL): - """Build the map of object ids to OS account SIDs. - - Args: - db_cursor: the database cursor - """ - # for each row in the db, take the object id and account SID then creates a tuple in the dictionary - # with the object id as the key and the OS Account's SID as the value - mapping = dict([(row[0], row[1]) for row in sql_select_execute(db_cursor, isPostgreSQL, "SELECT os_account_obj_id, unique_id FROM tsk_os_accounts")]) - return mapping - -def build_id_obj_path_table(files_table, objects_table, artifacts_table, reports_table, images_table, accounts_table): - """Build the map of object ids to artifact ids. - - Args: - files_table: obj_id, path - objects_table: obj_id, par_obj_id, type - artifacts_table: obj_id, artifact_type_name - reports_table: obj_id, path - images_table: obj_id, name - accounts_table: obj_id, unique_id - """ - # make a copy of files_table and update it with new data from artifacts_table and reports_table - mapping = files_table.copy() - for k, v in objects_table.items(): - path = "" - if k not in mapping.keys(): # If the mapping table doesn't have data for obj_id - if k in reports_table.keys(): # For a report we use the report path - par_obj_id = v[0] - if par_obj_id is not None: - mapping[k] = reports_table[k] - elif k in artifacts_table.keys(): # For an artifact we use it's par_obj_id's path+name plus it's artifact_type name - par_obj_id = v[0] # The parent of an artifact can be a file or a report - if par_obj_id in mapping.keys(): - path = mapping[par_obj_id] - elif par_obj_id in reports_table.keys(): - path = reports_table[par_obj_id] - elif par_obj_id in images_table.keys(): - path = images_table[par_obj_id] - mapping[k] = path + "/" + artifacts_table[k] - elif k in accounts_table.keys(): # For an OS Account object ID we use its unique_id field which is the account SID - mapping[k] = accounts_table[k] - elif v[0] not in mapping.keys(): - if v[0] in artifacts_table.keys(): - par_obj_id = objects_table[v[0]] - path = mapping[par_obj_id] - mapping[k] = path + "/" + artifacts_table[v[0]] - return mapping - -def db_connect(db_file, isMultiUser, pgSettings=None): - if isMultiUser: # use PostgreSQL +def db_connect(db_file, is_multi_user, pg_settings=None): + if is_multi_user: # use PostgreSQL try: - return psycopg2.connect("dbname=" + db_file + " user=" + pgSettings.username + " host=" + pgSettings.pgHost + " password=" + pgSettings.password), None + return psycopg2.connect("dbname=" + db_file + " user=" + pg_settings.username + " host=" + + pg_settings.pgHost + " password=" + pg_settings.password), None except: print("Failed to connect to the database: " + db_file) - else: # Sqlite + else: # Sqlite # Make a copy that we can modify backup_db_file = TskDbDiff._get_tmp_file("tsk_backup_db", ".db") shutil.copy(db_file, backup_db_file) # We sometimes get situations with messed up permissions - os.chmod (backup_db_file, 0o777) + os.chmod(backup_db_file, 0o777) return sqlite3.connect(backup_db_file), backup_db_file -def sql_select_execute(cursor, isPostgreSQL, sql_stmt): - if isPostgreSQL: - cursor.execute(sql_stmt) - return cursor.fetchall() - else: - return cursor.execute(sql_stmt) def main(): try: @@ -863,7 +1134,7 @@ def main(): print("usage: tskdbdiff [OUTPUT DB PATH] [GOLD DB PATH]") sys.exit(1) - db_diff = TskDbDiff(output_db, gold_db, output_dir=".") + db_diff = TskDbDiff(output_db, gold_db, output_dir=".") dump_passed, bb_dump_passed = db_diff.run_diff() if dump_passed and bb_dump_passed: @@ -882,4 +1153,3 @@ if __name__ == "__main__": sys.exit(1) main() - diff --git a/thirdparty/DomainCategorization/README.txt b/thirdparty/DomainCategorization/README.txt new file mode 100644 index 0000000000..4dfe31b7ac --- /dev/null +++ b/thirdparty/DomainCategorization/README.txt @@ -0,0 +1 @@ +This is a folder containing information for web domain categorization and custom categories. \ No newline at end of file diff --git a/thirdparty/NetbeansLocalization/README.txt b/thirdparty/NetbeansLocalization/README.txt new file mode 100644 index 0000000000..c5751c99f0 --- /dev/null +++ b/thirdparty/NetbeansLocalization/README.txt @@ -0,0 +1 @@ +This contains jars provided in Netbeans 8 RCP that provide localization bundles. They do not appear to be included in Netbeans >= 9. See Jira 7434 for more information. diff --git a/thirdparty/NetbeansLocalization/org-jdesktop-layout_ja.jar b/thirdparty/NetbeansLocalization/org-jdesktop-layout_ja.jar new file mode 100644 index 0000000000..070f7febd5 Binary files /dev/null and b/thirdparty/NetbeansLocalization/org-jdesktop-layout_ja.jar differ diff --git a/thirdparty/NetbeansLocalization/org-netbeans-api-annotations-common_ja.jar b/thirdparty/NetbeansLocalization/org-netbeans-api-annotations-common_ja.jar new file mode 100644 index 0000000000..edb9a78b04 Binary files /dev/null and b/thirdparty/NetbeansLocalization/org-netbeans-api-annotations-common_ja.jar differ diff --git a/thirdparty/NetbeansLocalization/org-netbeans-api-htmlui_ja.jar b/thirdparty/NetbeansLocalization/org-netbeans-api-htmlui_ja.jar new file mode 100644 index 0000000000..935bd84100 Binary files /dev/null and b/thirdparty/NetbeansLocalization/org-netbeans-api-htmlui_ja.jar differ diff --git a/thirdparty/NetbeansLocalization/org-netbeans-api-intent_ja.jar b/thirdparty/NetbeansLocalization/org-netbeans-api-intent_ja.jar new file mode 100644 index 0000000000..4f476868ae Binary files /dev/null and b/thirdparty/NetbeansLocalization/org-netbeans-api-intent_ja.jar differ diff --git a/thirdparty/NetbeansLocalization/org-netbeans-api-io_ja.jar b/thirdparty/NetbeansLocalization/org-netbeans-api-io_ja.jar new file mode 100644 index 0000000000..d115ccc52d Binary files /dev/null and b/thirdparty/NetbeansLocalization/org-netbeans-api-io_ja.jar differ diff --git a/thirdparty/NetbeansLocalization/org-netbeans-api-progress-compat8_ja.jar b/thirdparty/NetbeansLocalization/org-netbeans-api-progress-compat8_ja.jar new file mode 100644 index 0000000000..902ee4b55e Binary files /dev/null and b/thirdparty/NetbeansLocalization/org-netbeans-api-progress-compat8_ja.jar differ diff --git a/thirdparty/NetbeansLocalization/org-netbeans-api-progress-nb_ja.jar b/thirdparty/NetbeansLocalization/org-netbeans-api-progress-nb_ja.jar new file mode 100644 index 0000000000..3cd2c2c517 Binary files /dev/null and b/thirdparty/NetbeansLocalization/org-netbeans-api-progress-nb_ja.jar differ diff --git a/thirdparty/NetbeansLocalization/org-netbeans-api-progress_ja.jar b/thirdparty/NetbeansLocalization/org-netbeans-api-progress_ja.jar new file mode 100644 index 0000000000..7ae7ceb95d Binary files /dev/null and b/thirdparty/NetbeansLocalization/org-netbeans-api-progress_ja.jar differ diff --git a/thirdparty/NetbeansLocalization/org-netbeans-api-search_ja.jar b/thirdparty/NetbeansLocalization/org-netbeans-api-search_ja.jar new file mode 100644 index 0000000000..a77ec100a2 Binary files /dev/null and b/thirdparty/NetbeansLocalization/org-netbeans-api-search_ja.jar differ diff --git a/thirdparty/NetbeansLocalization/org-netbeans-api-templates_ja.jar b/thirdparty/NetbeansLocalization/org-netbeans-api-templates_ja.jar new file mode 100644 index 0000000000..a0db573a65 Binary files /dev/null and b/thirdparty/NetbeansLocalization/org-netbeans-api-templates_ja.jar differ diff --git a/thirdparty/NetbeansLocalization/org-netbeans-api-visual_ja.jar b/thirdparty/NetbeansLocalization/org-netbeans-api-visual_ja.jar new file mode 100644 index 0000000000..305048fb13 Binary files /dev/null and b/thirdparty/NetbeansLocalization/org-netbeans-api-visual_ja.jar differ diff --git a/thirdparty/NetbeansLocalization/org-netbeans-core-execution_ja.jar b/thirdparty/NetbeansLocalization/org-netbeans-core-execution_ja.jar new file mode 100644 index 0000000000..22e24e7de8 Binary files /dev/null and b/thirdparty/NetbeansLocalization/org-netbeans-core-execution_ja.jar differ diff --git a/thirdparty/NetbeansLocalization/org-netbeans-core-io-ui_ja.jar b/thirdparty/NetbeansLocalization/org-netbeans-core-io-ui_ja.jar new file mode 100644 index 0000000000..2096ba4174 Binary files /dev/null and b/thirdparty/NetbeansLocalization/org-netbeans-core-io-ui_ja.jar differ diff --git a/thirdparty/NetbeansLocalization/org-netbeans-core-multitabs_ja.jar b/thirdparty/NetbeansLocalization/org-netbeans-core-multitabs_ja.jar new file mode 100644 index 0000000000..a86f711f83 Binary files /dev/null and b/thirdparty/NetbeansLocalization/org-netbeans-core-multitabs_ja.jar differ diff --git a/thirdparty/NetbeansLocalization/org-netbeans-core-multiview_ja.jar b/thirdparty/NetbeansLocalization/org-netbeans-core-multiview_ja.jar new file mode 100644 index 0000000000..bd6f67dff9 Binary files /dev/null and b/thirdparty/NetbeansLocalization/org-netbeans-core-multiview_ja.jar differ diff --git a/thirdparty/NetbeansLocalization/org-netbeans-core-nativeaccess_ja.jar b/thirdparty/NetbeansLocalization/org-netbeans-core-nativeaccess_ja.jar new file mode 100644 index 0000000000..dc386bac99 Binary files /dev/null and b/thirdparty/NetbeansLocalization/org-netbeans-core-nativeaccess_ja.jar differ diff --git a/thirdparty/NetbeansLocalization/org-netbeans-core-netigso_ja.jar b/thirdparty/NetbeansLocalization/org-netbeans-core-netigso_ja.jar new file mode 100644 index 0000000000..57429511f7 Binary files /dev/null and b/thirdparty/NetbeansLocalization/org-netbeans-core-netigso_ja.jar differ diff --git a/thirdparty/NetbeansLocalization/org-netbeans-core-network_ja.jar b/thirdparty/NetbeansLocalization/org-netbeans-core-network_ja.jar new file mode 100644 index 0000000000..b7d257b02c Binary files /dev/null and b/thirdparty/NetbeansLocalization/org-netbeans-core-network_ja.jar differ diff --git a/thirdparty/NetbeansLocalization/org-netbeans-core-osgi_ja.jar b/thirdparty/NetbeansLocalization/org-netbeans-core-osgi_ja.jar new file mode 100644 index 0000000000..1fc1980df7 Binary files /dev/null and b/thirdparty/NetbeansLocalization/org-netbeans-core-osgi_ja.jar differ diff --git a/thirdparty/NetbeansLocalization/org-netbeans-core-output2_ja.jar b/thirdparty/NetbeansLocalization/org-netbeans-core-output2_ja.jar new file mode 100644 index 0000000000..a024cec8d8 Binary files /dev/null and b/thirdparty/NetbeansLocalization/org-netbeans-core-output2_ja.jar differ diff --git a/thirdparty/NetbeansLocalization/org-netbeans-core-ui_ja.jar b/thirdparty/NetbeansLocalization/org-netbeans-core-ui_ja.jar new file mode 100644 index 0000000000..583bed5055 Binary files /dev/null and b/thirdparty/NetbeansLocalization/org-netbeans-core-ui_ja.jar differ diff --git a/thirdparty/NetbeansLocalization/org-netbeans-core-windows_ja.jar b/thirdparty/NetbeansLocalization/org-netbeans-core-windows_ja.jar new file mode 100644 index 0000000000..22ebd5d849 Binary files /dev/null and b/thirdparty/NetbeansLocalization/org-netbeans-core-windows_ja.jar differ diff --git a/thirdparty/NetbeansLocalization/org-netbeans-core_ja.jar b/thirdparty/NetbeansLocalization/org-netbeans-core_ja.jar new file mode 100644 index 0000000000..ee88e3d0b2 Binary files /dev/null and b/thirdparty/NetbeansLocalization/org-netbeans-core_ja.jar differ diff --git a/thirdparty/NetbeansLocalization/org-netbeans-lib-uihandler_ja.jar b/thirdparty/NetbeansLocalization/org-netbeans-lib-uihandler_ja.jar new file mode 100644 index 0000000000..12964589fe Binary files /dev/null and b/thirdparty/NetbeansLocalization/org-netbeans-lib-uihandler_ja.jar differ diff --git a/thirdparty/NetbeansLocalization/org-netbeans-libs-felix_ja.jar b/thirdparty/NetbeansLocalization/org-netbeans-libs-felix_ja.jar new file mode 100644 index 0000000000..2237f74e64 Binary files /dev/null and b/thirdparty/NetbeansLocalization/org-netbeans-libs-felix_ja.jar differ diff --git a/thirdparty/NetbeansLocalization/org-netbeans-libs-javafx_ja.jar b/thirdparty/NetbeansLocalization/org-netbeans-libs-javafx_ja.jar new file mode 100644 index 0000000000..e27934543a Binary files /dev/null and b/thirdparty/NetbeansLocalization/org-netbeans-libs-javafx_ja.jar differ diff --git a/thirdparty/NetbeansLocalization/org-netbeans-libs-jna-platform_ja.jar b/thirdparty/NetbeansLocalization/org-netbeans-libs-jna-platform_ja.jar new file mode 100644 index 0000000000..bfb0273184 Binary files /dev/null and b/thirdparty/NetbeansLocalization/org-netbeans-libs-jna-platform_ja.jar differ diff --git a/thirdparty/NetbeansLocalization/org-netbeans-libs-jna_ja.jar b/thirdparty/NetbeansLocalization/org-netbeans-libs-jna_ja.jar new file mode 100644 index 0000000000..f6a70fdef9 Binary files /dev/null and b/thirdparty/NetbeansLocalization/org-netbeans-libs-jna_ja.jar differ diff --git a/thirdparty/NetbeansLocalization/org-netbeans-libs-jsr223_ja.jar b/thirdparty/NetbeansLocalization/org-netbeans-libs-jsr223_ja.jar new file mode 100644 index 0000000000..aaaeae7439 Binary files /dev/null and b/thirdparty/NetbeansLocalization/org-netbeans-libs-jsr223_ja.jar differ diff --git a/thirdparty/NetbeansLocalization/org-netbeans-libs-junit4_ja.jar b/thirdparty/NetbeansLocalization/org-netbeans-libs-junit4_ja.jar new file mode 100644 index 0000000000..d2504a6e01 Binary files /dev/null and b/thirdparty/NetbeansLocalization/org-netbeans-libs-junit4_ja.jar differ diff --git a/thirdparty/NetbeansLocalization/org-netbeans-libs-osgi_ja.jar b/thirdparty/NetbeansLocalization/org-netbeans-libs-osgi_ja.jar new file mode 100644 index 0000000000..46c049df50 Binary files /dev/null and b/thirdparty/NetbeansLocalization/org-netbeans-libs-osgi_ja.jar differ diff --git a/thirdparty/NetbeansLocalization/org-netbeans-libs-testng_ja.jar b/thirdparty/NetbeansLocalization/org-netbeans-libs-testng_ja.jar new file mode 100644 index 0000000000..5e6a42ced2 Binary files /dev/null and b/thirdparty/NetbeansLocalization/org-netbeans-libs-testng_ja.jar differ diff --git a/thirdparty/NetbeansLocalization/org-netbeans-modules-applemenu_ja.jar b/thirdparty/NetbeansLocalization/org-netbeans-modules-applemenu_ja.jar new file mode 100644 index 0000000000..a87cab81b9 Binary files /dev/null and b/thirdparty/NetbeansLocalization/org-netbeans-modules-applemenu_ja.jar differ diff --git a/thirdparty/NetbeansLocalization/org-netbeans-modules-autoupdate-cli_ja.jar b/thirdparty/NetbeansLocalization/org-netbeans-modules-autoupdate-cli_ja.jar new file mode 100644 index 0000000000..a485b6f5ef Binary files /dev/null and b/thirdparty/NetbeansLocalization/org-netbeans-modules-autoupdate-cli_ja.jar differ diff --git a/thirdparty/NetbeansLocalization/org-netbeans-modules-autoupdate-services_ja.jar b/thirdparty/NetbeansLocalization/org-netbeans-modules-autoupdate-services_ja.jar new file mode 100644 index 0000000000..696c3cf0bc Binary files /dev/null and b/thirdparty/NetbeansLocalization/org-netbeans-modules-autoupdate-services_ja.jar differ diff --git a/thirdparty/NetbeansLocalization/org-netbeans-modules-autoupdate-ui_ja.jar b/thirdparty/NetbeansLocalization/org-netbeans-modules-autoupdate-ui_ja.jar new file mode 100644 index 0000000000..2944325af7 Binary files /dev/null and b/thirdparty/NetbeansLocalization/org-netbeans-modules-autoupdate-ui_ja.jar differ diff --git a/thirdparty/NetbeansLocalization/org-netbeans-modules-core-kit_ja.jar b/thirdparty/NetbeansLocalization/org-netbeans-modules-core-kit_ja.jar new file mode 100644 index 0000000000..7fb1cf061c Binary files /dev/null and b/thirdparty/NetbeansLocalization/org-netbeans-modules-core-kit_ja.jar differ diff --git a/thirdparty/NetbeansLocalization/org-netbeans-modules-editor-mimelookup-impl_ja.jar b/thirdparty/NetbeansLocalization/org-netbeans-modules-editor-mimelookup-impl_ja.jar new file mode 100644 index 0000000000..1bed7e0b38 Binary files /dev/null and b/thirdparty/NetbeansLocalization/org-netbeans-modules-editor-mimelookup-impl_ja.jar differ diff --git a/thirdparty/NetbeansLocalization/org-netbeans-modules-editor-mimelookup_ja.jar b/thirdparty/NetbeansLocalization/org-netbeans-modules-editor-mimelookup_ja.jar new file mode 100644 index 0000000000..ab36fa091a Binary files /dev/null and b/thirdparty/NetbeansLocalization/org-netbeans-modules-editor-mimelookup_ja.jar differ diff --git a/thirdparty/NetbeansLocalization/org-netbeans-modules-favorites_ja.jar b/thirdparty/NetbeansLocalization/org-netbeans-modules-favorites_ja.jar new file mode 100644 index 0000000000..b9829e54c0 Binary files /dev/null and b/thirdparty/NetbeansLocalization/org-netbeans-modules-favorites_ja.jar differ diff --git a/thirdparty/NetbeansLocalization/org-netbeans-modules-javahelp_ja.jar b/thirdparty/NetbeansLocalization/org-netbeans-modules-javahelp_ja.jar new file mode 100644 index 0000000000..e80bbd607a Binary files /dev/null and b/thirdparty/NetbeansLocalization/org-netbeans-modules-javahelp_ja.jar differ diff --git a/thirdparty/NetbeansLocalization/org-netbeans-modules-junitlib_ja.jar b/thirdparty/NetbeansLocalization/org-netbeans-modules-junitlib_ja.jar new file mode 100644 index 0000000000..a9d8bce0ee Binary files /dev/null and b/thirdparty/NetbeansLocalization/org-netbeans-modules-junitlib_ja.jar differ diff --git a/thirdparty/NetbeansLocalization/org-netbeans-modules-keyring-fallback_ja.jar b/thirdparty/NetbeansLocalization/org-netbeans-modules-keyring-fallback_ja.jar new file mode 100644 index 0000000000..6fa12e67da Binary files /dev/null and b/thirdparty/NetbeansLocalization/org-netbeans-modules-keyring-fallback_ja.jar differ diff --git a/thirdparty/NetbeansLocalization/org-netbeans-modules-keyring-impl_ja.jar b/thirdparty/NetbeansLocalization/org-netbeans-modules-keyring-impl_ja.jar new file mode 100644 index 0000000000..cdd341da23 Binary files /dev/null and b/thirdparty/NetbeansLocalization/org-netbeans-modules-keyring-impl_ja.jar differ diff --git a/thirdparty/NetbeansLocalization/org-netbeans-modules-keyring_ja.jar b/thirdparty/NetbeansLocalization/org-netbeans-modules-keyring_ja.jar new file mode 100644 index 0000000000..733a9b20e5 Binary files /dev/null and b/thirdparty/NetbeansLocalization/org-netbeans-modules-keyring_ja.jar differ diff --git a/thirdparty/NetbeansLocalization/org-netbeans-modules-masterfs-linux_ja.jar b/thirdparty/NetbeansLocalization/org-netbeans-modules-masterfs-linux_ja.jar new file mode 100644 index 0000000000..4b493caa98 Binary files /dev/null and b/thirdparty/NetbeansLocalization/org-netbeans-modules-masterfs-linux_ja.jar differ diff --git a/thirdparty/NetbeansLocalization/org-netbeans-modules-masterfs-macosx_ja.jar b/thirdparty/NetbeansLocalization/org-netbeans-modules-masterfs-macosx_ja.jar new file mode 100644 index 0000000000..2ffd662388 Binary files /dev/null and b/thirdparty/NetbeansLocalization/org-netbeans-modules-masterfs-macosx_ja.jar differ diff --git a/thirdparty/NetbeansLocalization/org-netbeans-modules-masterfs-nio2_ja.jar b/thirdparty/NetbeansLocalization/org-netbeans-modules-masterfs-nio2_ja.jar new file mode 100644 index 0000000000..d1eb2762b9 Binary files /dev/null and b/thirdparty/NetbeansLocalization/org-netbeans-modules-masterfs-nio2_ja.jar differ diff --git a/thirdparty/NetbeansLocalization/org-netbeans-modules-masterfs-ui_ja.jar b/thirdparty/NetbeansLocalization/org-netbeans-modules-masterfs-ui_ja.jar new file mode 100644 index 0000000000..9eae950a75 Binary files /dev/null and b/thirdparty/NetbeansLocalization/org-netbeans-modules-masterfs-ui_ja.jar differ diff --git a/thirdparty/NetbeansLocalization/org-netbeans-modules-masterfs-windows_ja.jar b/thirdparty/NetbeansLocalization/org-netbeans-modules-masterfs-windows_ja.jar new file mode 100644 index 0000000000..b31b2bd76b Binary files /dev/null and b/thirdparty/NetbeansLocalization/org-netbeans-modules-masterfs-windows_ja.jar differ diff --git a/thirdparty/NetbeansLocalization/org-netbeans-modules-masterfs_ja.jar b/thirdparty/NetbeansLocalization/org-netbeans-modules-masterfs_ja.jar new file mode 100644 index 0000000000..fdbe8874ac Binary files /dev/null and b/thirdparty/NetbeansLocalization/org-netbeans-modules-masterfs_ja.jar differ diff --git a/thirdparty/NetbeansLocalization/org-netbeans-modules-netbinox_ja.jar b/thirdparty/NetbeansLocalization/org-netbeans-modules-netbinox_ja.jar new file mode 100644 index 0000000000..81d02839f5 Binary files /dev/null and b/thirdparty/NetbeansLocalization/org-netbeans-modules-netbinox_ja.jar differ diff --git a/thirdparty/NetbeansLocalization/org-netbeans-modules-options-api_ja.jar b/thirdparty/NetbeansLocalization/org-netbeans-modules-options-api_ja.jar new file mode 100644 index 0000000000..d7fc6636a4 Binary files /dev/null and b/thirdparty/NetbeansLocalization/org-netbeans-modules-options-api_ja.jar differ diff --git a/thirdparty/NetbeansLocalization/org-netbeans-modules-options-keymap_ja.jar b/thirdparty/NetbeansLocalization/org-netbeans-modules-options-keymap_ja.jar new file mode 100644 index 0000000000..aa05ab9434 Binary files /dev/null and b/thirdparty/NetbeansLocalization/org-netbeans-modules-options-keymap_ja.jar differ diff --git a/thirdparty/NetbeansLocalization/org-netbeans-modules-print_ja.jar b/thirdparty/NetbeansLocalization/org-netbeans-modules-print_ja.jar new file mode 100644 index 0000000000..17333fcb69 Binary files /dev/null and b/thirdparty/NetbeansLocalization/org-netbeans-modules-print_ja.jar differ diff --git a/thirdparty/NetbeansLocalization/org-netbeans-modules-progress-ui_ja.jar b/thirdparty/NetbeansLocalization/org-netbeans-modules-progress-ui_ja.jar new file mode 100644 index 0000000000..71b4f6c62c Binary files /dev/null and b/thirdparty/NetbeansLocalization/org-netbeans-modules-progress-ui_ja.jar differ diff --git a/thirdparty/NetbeansLocalization/org-netbeans-modules-queries_ja.jar b/thirdparty/NetbeansLocalization/org-netbeans-modules-queries_ja.jar new file mode 100644 index 0000000000..fb46c44bbf Binary files /dev/null and b/thirdparty/NetbeansLocalization/org-netbeans-modules-queries_ja.jar differ diff --git a/thirdparty/NetbeansLocalization/org-netbeans-modules-sampler_ja.jar b/thirdparty/NetbeansLocalization/org-netbeans-modules-sampler_ja.jar new file mode 100644 index 0000000000..7b8d70e76a Binary files /dev/null and b/thirdparty/NetbeansLocalization/org-netbeans-modules-sampler_ja.jar differ diff --git a/thirdparty/NetbeansLocalization/org-netbeans-modules-sendopts_ja.jar b/thirdparty/NetbeansLocalization/org-netbeans-modules-sendopts_ja.jar new file mode 100644 index 0000000000..8bb8725873 Binary files /dev/null and b/thirdparty/NetbeansLocalization/org-netbeans-modules-sendopts_ja.jar differ diff --git a/thirdparty/NetbeansLocalization/org-netbeans-modules-settings_ja.jar b/thirdparty/NetbeansLocalization/org-netbeans-modules-settings_ja.jar new file mode 100644 index 0000000000..4edce51acd Binary files /dev/null and b/thirdparty/NetbeansLocalization/org-netbeans-modules-settings_ja.jar differ diff --git a/thirdparty/NetbeansLocalization/org-netbeans-modules-spi-actions_ja.jar b/thirdparty/NetbeansLocalization/org-netbeans-modules-spi-actions_ja.jar new file mode 100644 index 0000000000..34340d9419 Binary files /dev/null and b/thirdparty/NetbeansLocalization/org-netbeans-modules-spi-actions_ja.jar differ diff --git a/thirdparty/NetbeansLocalization/org-netbeans-modules-templates_ja.jar b/thirdparty/NetbeansLocalization/org-netbeans-modules-templates_ja.jar new file mode 100644 index 0000000000..67dec8d299 Binary files /dev/null and b/thirdparty/NetbeansLocalization/org-netbeans-modules-templates_ja.jar differ diff --git a/thirdparty/NetbeansLocalization/org-netbeans-modules-templatesui_ja.jar b/thirdparty/NetbeansLocalization/org-netbeans-modules-templatesui_ja.jar new file mode 100644 index 0000000000..c58fabf436 Binary files /dev/null and b/thirdparty/NetbeansLocalization/org-netbeans-modules-templatesui_ja.jar differ diff --git a/thirdparty/NetbeansLocalization/org-netbeans-modules-uihandler_ja.jar b/thirdparty/NetbeansLocalization/org-netbeans-modules-uihandler_ja.jar new file mode 100644 index 0000000000..94401aa524 Binary files /dev/null and b/thirdparty/NetbeansLocalization/org-netbeans-modules-uihandler_ja.jar differ diff --git a/thirdparty/NetbeansLocalization/org-netbeans-spi-quicksearch_ja.jar b/thirdparty/NetbeansLocalization/org-netbeans-spi-quicksearch_ja.jar new file mode 100644 index 0000000000..0356025fdf Binary files /dev/null and b/thirdparty/NetbeansLocalization/org-netbeans-spi-quicksearch_ja.jar differ diff --git a/thirdparty/NetbeansLocalization/org-netbeans-swing-outline_ja.jar b/thirdparty/NetbeansLocalization/org-netbeans-swing-outline_ja.jar new file mode 100644 index 0000000000..6fa52f1e3e Binary files /dev/null and b/thirdparty/NetbeansLocalization/org-netbeans-swing-outline_ja.jar differ diff --git a/thirdparty/NetbeansLocalization/org-netbeans-swing-plaf_ja.jar b/thirdparty/NetbeansLocalization/org-netbeans-swing-plaf_ja.jar new file mode 100644 index 0000000000..6007a388a6 Binary files /dev/null and b/thirdparty/NetbeansLocalization/org-netbeans-swing-plaf_ja.jar differ diff --git a/thirdparty/NetbeansLocalization/org-netbeans-swing-tabcontrol_ja.jar b/thirdparty/NetbeansLocalization/org-netbeans-swing-tabcontrol_ja.jar new file mode 100644 index 0000000000..75357b99f1 Binary files /dev/null and b/thirdparty/NetbeansLocalization/org-netbeans-swing-tabcontrol_ja.jar differ diff --git a/thirdparty/NetbeansLocalization/org-openide-actions_ja.jar b/thirdparty/NetbeansLocalization/org-openide-actions_ja.jar new file mode 100644 index 0000000000..f393db1170 Binary files /dev/null and b/thirdparty/NetbeansLocalization/org-openide-actions_ja.jar differ diff --git a/thirdparty/NetbeansLocalization/org-openide-awt_ja.jar b/thirdparty/NetbeansLocalization/org-openide-awt_ja.jar new file mode 100644 index 0000000000..f35574dd86 Binary files /dev/null and b/thirdparty/NetbeansLocalization/org-openide-awt_ja.jar differ diff --git a/thirdparty/NetbeansLocalization/org-openide-compat_ja.jar b/thirdparty/NetbeansLocalization/org-openide-compat_ja.jar new file mode 100644 index 0000000000..bba222edfa Binary files /dev/null and b/thirdparty/NetbeansLocalization/org-openide-compat_ja.jar differ diff --git a/thirdparty/NetbeansLocalization/org-openide-dialogs_ja.jar b/thirdparty/NetbeansLocalization/org-openide-dialogs_ja.jar new file mode 100644 index 0000000000..fdd75a84d5 Binary files /dev/null and b/thirdparty/NetbeansLocalization/org-openide-dialogs_ja.jar differ diff --git a/thirdparty/NetbeansLocalization/org-openide-execution-compat8_ja.jar b/thirdparty/NetbeansLocalization/org-openide-execution-compat8_ja.jar new file mode 100644 index 0000000000..11c1df1ea1 Binary files /dev/null and b/thirdparty/NetbeansLocalization/org-openide-execution-compat8_ja.jar differ diff --git a/thirdparty/NetbeansLocalization/org-openide-execution_ja.jar b/thirdparty/NetbeansLocalization/org-openide-execution_ja.jar new file mode 100644 index 0000000000..e1b84b35bc Binary files /dev/null and b/thirdparty/NetbeansLocalization/org-openide-execution_ja.jar differ diff --git a/thirdparty/NetbeansLocalization/org-openide-explorer_ja.jar b/thirdparty/NetbeansLocalization/org-openide-explorer_ja.jar new file mode 100644 index 0000000000..c8a06bbd70 Binary files /dev/null and b/thirdparty/NetbeansLocalization/org-openide-explorer_ja.jar differ diff --git a/thirdparty/NetbeansLocalization/org-openide-filesystems-nb_ja.jar b/thirdparty/NetbeansLocalization/org-openide-filesystems-nb_ja.jar new file mode 100644 index 0000000000..55b02fc68a Binary files /dev/null and b/thirdparty/NetbeansLocalization/org-openide-filesystems-nb_ja.jar differ diff --git a/thirdparty/NetbeansLocalization/org-openide-io_ja.jar b/thirdparty/NetbeansLocalization/org-openide-io_ja.jar new file mode 100644 index 0000000000..50e19b9778 Binary files /dev/null and b/thirdparty/NetbeansLocalization/org-openide-io_ja.jar differ diff --git a/thirdparty/NetbeansLocalization/org-openide-loaders_ja.jar b/thirdparty/NetbeansLocalization/org-openide-loaders_ja.jar new file mode 100644 index 0000000000..c06d6e1712 Binary files /dev/null and b/thirdparty/NetbeansLocalization/org-openide-loaders_ja.jar differ diff --git a/thirdparty/NetbeansLocalization/org-openide-nodes_ja.jar b/thirdparty/NetbeansLocalization/org-openide-nodes_ja.jar new file mode 100644 index 0000000000..ed84a00886 Binary files /dev/null and b/thirdparty/NetbeansLocalization/org-openide-nodes_ja.jar differ diff --git a/thirdparty/NetbeansLocalization/org-openide-options_ja.jar b/thirdparty/NetbeansLocalization/org-openide-options_ja.jar new file mode 100644 index 0000000000..b306b0a028 Binary files /dev/null and b/thirdparty/NetbeansLocalization/org-openide-options_ja.jar differ diff --git a/thirdparty/NetbeansLocalization/org-openide-text_ja.jar b/thirdparty/NetbeansLocalization/org-openide-text_ja.jar new file mode 100644 index 0000000000..3e7e07b059 Binary files /dev/null and b/thirdparty/NetbeansLocalization/org-openide-text_ja.jar differ diff --git a/thirdparty/NetbeansLocalization/org-openide-util-enumerations_ja.jar b/thirdparty/NetbeansLocalization/org-openide-util-enumerations_ja.jar new file mode 100644 index 0000000000..841b9d7551 Binary files /dev/null and b/thirdparty/NetbeansLocalization/org-openide-util-enumerations_ja.jar differ diff --git a/thirdparty/NetbeansLocalization/org-openide-windows_ja.jar b/thirdparty/NetbeansLocalization/org-openide-windows_ja.jar new file mode 100644 index 0000000000..92fc28e2d8 Binary files /dev/null and b/thirdparty/NetbeansLocalization/org-openide-windows_ja.jar differ diff --git a/thirdparty/aLeapp/aleapp.exe b/thirdparty/aLeapp/aleapp.exe index 71106090c0..3f47725616 100644 Binary files a/thirdparty/aLeapp/aleapp.exe and b/thirdparty/aLeapp/aleapp.exe differ diff --git a/thirdparty/iLeapp/ileapp.exe b/thirdparty/iLeapp/ileapp.exe index 0456c3c726..f1d395e930 100644 Binary files a/thirdparty/iLeapp/ileapp.exe and b/thirdparty/iLeapp/ileapp.exe differ diff --git a/thunderbirdparser/src/org/sleuthkit/autopsy/thunderbirdparser/PstParser.java b/thunderbirdparser/src/org/sleuthkit/autopsy/thunderbirdparser/PstParser.java index 9cbc8c6a6f..358c67a8a5 100644 --- a/thunderbirdparser/src/org/sleuthkit/autopsy/thunderbirdparser/PstParser.java +++ b/thunderbirdparser/src/org/sleuthkit/autopsy/thunderbirdparser/PstParser.java @@ -107,6 +107,11 @@ class PstParser implements AutoCloseable{ logger.log(Level.INFO, "Found encrypted PST file."); //NON-NLS return ParseResult.ENCRYPT; } + if (ex.getMessage().toLowerCase().startsWith("unable to")) { + logger.log(Level.WARNING, ex.getMessage()); + logger.log(Level.WARNING, String.format("Error in parsing PST file %s, file may be empty or corrupt", file.getName())); + return ParseResult.ERROR; + } String msg = file.getName() + ": Failed to create internal java-libpst PST file to parse:\n" + ex.getMessage(); //NON-NLS logger.log(Level.WARNING, msg, ex); return ParseResult.ERROR; diff --git a/thunderbirdparser/src/org/sleuthkit/autopsy/thunderbirdparser/ThunderbirdMboxFileIngestModule.java b/thunderbirdparser/src/org/sleuthkit/autopsy/thunderbirdparser/ThunderbirdMboxFileIngestModule.java index 1c165ef535..f2e5a107b9 100644 --- a/thunderbirdparser/src/org/sleuthkit/autopsy/thunderbirdparser/ThunderbirdMboxFileIngestModule.java +++ b/thunderbirdparser/src/org/sleuthkit/autopsy/thunderbirdparser/ThunderbirdMboxFileIngestModule.java @@ -619,8 +619,6 @@ public final class ThunderbirdMboxFileIngestModule implements FileIngestModule { size, cTime, crTime, aTime, mTime, true, abstractFile, "", EmailParserModuleFactory.getModuleName(), EmailParserModuleFactory.getModuleVersion(), "", encodingType); - associateAttachmentWithMesssge(messageArtifact, df); - files.add(df); fileAttachments.add(new FileAttachment(df)); @@ -646,19 +644,6 @@ public final class ThunderbirdMboxFileIngestModule implements FileIngestModule { return files; } - /** - * Creates a TSK_ASSOCIATED_OBJECT artifact between the attachment file and - * the message artifact. - */ - private BlackboardArtifact associateAttachmentWithMesssge(BlackboardArtifact message, AbstractFile attachedFile) throws TskCoreException { - Collection attributes = new ArrayList<>(); - attributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_ASSOCIATED_ARTIFACT, EmailParserModuleFactory.getModuleName(), message.getArtifactID())); - - BlackboardArtifact bba = attachedFile.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_ASSOCIATED_OBJECT); - bba.addAttributes(attributes); //write out to bb - return bba; - } - /** * Finds and returns a set of unique email addresses found in the input * string