diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/application/OtherOccurrences.java b/Core/src/org/sleuthkit/autopsy/centralrepository/application/OtherOccurrences.java index 155ca727d5..80e4ada388 100755 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/application/OtherOccurrences.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/application/OtherOccurrences.java @@ -87,27 +87,10 @@ public final class OtherOccurrences { if (osAccountAddr.isPresent()) { try { - for (OsAccountInstance instance : osAccount.getOsAccountInstances()) { - DataSource osAccountDataSource = instance.getDataSource(); - try { - CorrelationCase correlationCase = CentralRepository.getInstance().getCase(Case.getCurrentCaseThrows()); - CorrelationAttributeInstance correlationAttributeInstance = new CorrelationAttributeInstance( - CentralRepository.getInstance().getCorrelationTypeById(CorrelationAttributeInstance.OSACCOUNT_TYPE_ID), - osAccountAddr.get(), - correlationCase, - CorrelationDataSource.fromTSKDataSource(correlationCase, instance.getDataSource()), - "", - "", - TskData.FileKnown.KNOWN, - osAccount.getId()); - + for (OsAccountInstance instance : osAccount.getOsAccountInstances()) { + CorrelationAttributeInstance correlationAttributeInstance = CorrelationAttributeUtil.makeCorrAttr(instance.getOsAccount(), instance.getDataSource()); + if (correlationAttributeInstance != null) { ret.add(correlationAttributeInstance); - } catch (CentralRepoException ex) { - logger.log(Level.SEVERE, String.format("Cannot get central repository for OsAccount: %s.", osAccountAddr.get()), ex); //NON-NLS - } catch (NoCurrentCaseException ex) { - logger.log(Level.WARNING, String.format("Exception while getting open case looking up osAccount %s.", osAccountAddr.get()), ex); //NON-NLS - } catch (CorrelationAttributeNormalizationException ex) { - logger.log(Level.SEVERE, String.format("Exception with Correlation Attribute Normalization for osAccount %s.", osAccountAddr.get()), ex); //NON-NLS } } } catch (TskCoreException ex) { diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/CorrelationAttributeUtil.java b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/CorrelationAttributeUtil.java index 2e023f82c0..6d2ff51c01 100755 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/CorrelationAttributeUtil.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/CorrelationAttributeUtil.java @@ -40,6 +40,8 @@ import org.sleuthkit.datamodel.Content; import org.sleuthkit.datamodel.DataArtifact; import org.sleuthkit.datamodel.HashUtility; import org.sleuthkit.datamodel.InvalidAccountIDException; +import org.sleuthkit.datamodel.OsAccount; +import org.sleuthkit.datamodel.OsAccountInstance; import org.sleuthkit.datamodel.TskCoreException; import org.sleuthkit.datamodel.TskData; @@ -568,6 +570,51 @@ public class CorrelationAttributeUtil { } } + /** + * Makes a correlation attribute instance of a given type from an OS + * account. Checks address if it is null, or one of the ones always present + * on a windows system and thus not unique. + * + * @param osAccoun The OS account. + * @param dataSource The data source content object. + * + * @return The correlation attribute instance or null, if an error occurred. + */ + public static CorrelationAttributeInstance makeCorrAttr(OsAccount osAccount, Content dataSource) { + + Optional accountAddr = osAccount.getAddr(); + // Check address if it is null or one of the ones below we want to ignore it since they will always be one a windows system + // and they are not unique + if (!accountAddr.isPresent() || accountAddr.get().equals("S-1-5-18") || accountAddr.get().equals("S-1-5-19") || accountAddr.get().equals("S-1-5-20")) { + return null; + } + try { + + CorrelationCase correlationCase = CentralRepository.getInstance().getCase(Case.getCurrentCaseThrows()); + CorrelationAttributeInstance correlationAttributeInstance = new CorrelationAttributeInstance( + CentralRepository.getInstance().getCorrelationTypeById(CorrelationAttributeInstance.OSACCOUNT_TYPE_ID), + accountAddr.get(), + correlationCase, + CorrelationDataSource.fromTSKDataSource(correlationCase, dataSource), + "", + "", + TskData.FileKnown.KNOWN, + osAccount.getId()); + + return correlationAttributeInstance; + + } catch (CentralRepoException ex) { + logger.log(Level.SEVERE, String.format("Cannot get central repository for OsAccount: %s.", accountAddr.get()), ex); //NON-NLS + return null; + } catch (NoCurrentCaseException ex) { + logger.log(Level.SEVERE, "Exception while getting open case.", ex); //NON-NLS + return null; + } catch (CorrelationAttributeNormalizationException ex) { + logger.log(Level.SEVERE, "Exception with Correlation Attribute Normalization.", ex); //NON-NLS + return null; + } + } + /** * Gets the correlation attribute instance for a file. * diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/eventlisteners/CaseEventListener.java b/Core/src/org/sleuthkit/autopsy/centralrepository/eventlisteners/CaseEventListener.java index a3ccca4ccf..474ac1cd85 100644 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/eventlisteners/CaseEventListener.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/eventlisteners/CaseEventListener.java @@ -661,7 +661,8 @@ public final class CaseEventListener implements PropertyChangeListener { "CaseEventsListener.prevCaseComment.text=Users seen in previous cases", "CaseEventsListener.prevExists.text=Previously Seen Users (Central Repository)"}) /** - * Add OsAccount Instance to CR and find interesting items based on the OsAccount + * Add OsAccount Instance to CR and find interesting items based on the + * OsAccount */ private final class OsAccountInstancesAddedTask implements Runnable { @@ -677,7 +678,7 @@ public final class CaseEventListener implements PropertyChangeListener { @Override public void run() { //Nothing to do here if the central repo is not enabled or if ingest is running but is set to not save data/make artifacts - if (!CentralRepository.isEnabled() + if (!CentralRepository.isEnabled() || (IngestManager.getInstance().isIngestRunning() && !(IngestEventsListener.isFlagSeenDevices() || IngestEventsListener.shouldCreateCrProperties()))) { return; } @@ -687,27 +688,15 @@ public final class CaseEventListener implements PropertyChangeListener { for (OsAccountInstance osAccountInstance : addedOsAccountNew) { try { OsAccount osAccount = osAccountInstance.getOsAccount(); - Optional accountAddr = osAccount.getAddr(); - // Check address if it is null or one of the ones below we want to ignore it since they will always be one a windows system - // and they are not unique - if (!accountAddr.isPresent() || accountAddr.get().equals("S-1-5-18") || accountAddr.get().equals("S-1-5-19") || accountAddr.get().equals("S-1-5-20")) { + CorrelationAttributeInstance correlationAttributeInstance = CorrelationAttributeUtil.makeCorrAttr(osAccount, osAccountInstance.getDataSource()); + if (correlationAttributeInstance == null) { return; } + + Optional accountAddr = osAccount.getAddr(); try { - - CorrelationCase correlationCase = CentralRepository.getInstance().getCase(Case.getCurrentCaseThrows()); - CorrelationAttributeInstance correlationAttributeInstance = new CorrelationAttributeInstance( - CentralRepository.getInstance().getCorrelationTypeById(CorrelationAttributeInstance.OSACCOUNT_TYPE_ID), - accountAddr.get(), - correlationCase, - CorrelationDataSource.fromTSKDataSource(correlationCase, osAccountInstance.getDataSource()), - "", - "", - TskData.FileKnown.KNOWN, - osAccount.getId()); - // Save to the database if requested - if(IngestEventsListener.shouldCreateCrProperties()) { + if (IngestEventsListener.shouldCreateCrProperties()) { dbManager.addArtifactInstance(correlationAttributeInstance); } @@ -740,14 +729,11 @@ public final class CaseEventListener implements PropertyChangeListener { } } - } catch (CentralRepoException ex) { - LOGGER.log(Level.SEVERE, String.format("Cannot get central repository for OsAccount: %s.", accountAddr.get()), ex); //NON-NLS - } catch (NoCurrentCaseException ex) { - LOGGER.log(Level.SEVERE, "Exception while getting open case.", ex); //NON-NLS } catch (CorrelationAttributeNormalizationException ex) { LOGGER.log(Level.SEVERE, "Exception with Correlation Attribute Normalization.", ex); //NON-NLS + } catch (CentralRepoException ex) { + LOGGER.log(Level.SEVERE, String.format("Cannot get central repository for OsAccount: %s.", accountAddr.get()), ex); //NON-NLS } - } catch (TskCoreException ex) { LOGGER.log(Level.SEVERE, "Cannot get central repository for OsAccount: " + "OsAccount", ex); } diff --git a/Core/src/org/sleuthkit/autopsy/commandlineingest/Bundle.properties-MERGED b/Core/src/org/sleuthkit/autopsy/commandlineingest/Bundle.properties-MERGED index 447b4e3038..84598e3756 100755 --- a/Core/src/org/sleuthkit/autopsy/commandlineingest/Bundle.properties-MERGED +++ b/Core/src/org/sleuthkit/autopsy/commandlineingest/Bundle.properties-MERGED @@ -1,8 +1,9 @@ CommandLineIngestSettingPanel_empty_report_name_mgs=Report profile name was empty, no profile created. CommandLineIngestSettingPanel_existing_report_name_mgs=Report profile name was already exists, no profile created. +CommandLineIngestSettingPanel_invalid_report_name_mgs=Report profile name contained illegal characters, no profile created. CommandListIngestSettingsPanel_Default_Report_DisplayName=Default CommandListIngestSettingsPanel_Make_Config=Make new profile... -CommandListIngestSettingsPanel_Report_Name_Msg=Please supply a report profile name (commas not allowed): +CommandListIngestSettingsPanel_Report_Name_Msg=Please supply a report profile name (letters, digits, and underscore characters only): OpenIDE-Module-Name=CommandLineAutopsy OptionsCategory_Keywords_Command_Line_Ingest_Settings=Command Line Ingest Settings OptionsCategory_Keywords_General=Options diff --git a/Core/src/org/sleuthkit/autopsy/commandlineingest/CommandLineIngestSettingsPanel.java b/Core/src/org/sleuthkit/autopsy/commandlineingest/CommandLineIngestSettingsPanel.java index ca11d03f13..531f96a878 100755 --- a/Core/src/org/sleuthkit/autopsy/commandlineingest/CommandLineIngestSettingsPanel.java +++ b/Core/src/org/sleuthkit/autopsy/commandlineingest/CommandLineIngestSettingsPanel.java @@ -280,18 +280,15 @@ public class CommandLineIngestSettingsPanel extends javax.swing.JPanel { add(nodePanel, java.awt.BorderLayout.CENTER); }// //GEN-END:initComponents @Messages({ - "CommandListIngestSettingsPanel_Report_Name_Msg=Please supply a report profile name (commas not allowed):", + "CommandListIngestSettingsPanel_Report_Name_Msg=Please supply a report profile name (letters, digits, and underscore characters only):", "CommandLineIngestSettingPanel_empty_report_name_mgs=Report profile name was empty, no profile created.", - "CommandLineIngestSettingPanel_existing_report_name_mgs=Report profile name was already exists, no profile created." + "CommandLineIngestSettingPanel_existing_report_name_mgs=Report profile name was already exists, no profile created.", + "CommandLineIngestSettingPanel_invalid_report_name_mgs=Report profile name contained illegal characters, no profile created." }) private void bnEditReportSettingsActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_bnEditReportSettingsActionPerformed String reportName = getReportName(); if (reportName.equals(Bundle.CommandListIngestSettingsPanel_Make_Config())) { reportName = JOptionPane.showInputDialog(this, Bundle.CommandListIngestSettingsPanel_Report_Name_Msg()); - - // sanitize report name. Remove all commas because in CommandLineOptionProcessor we use commas - // to separate multiple report names - reportName = reportName.replaceAll(",", ""); // User hit cancel if (reportName == null) { @@ -302,6 +299,15 @@ public class CommandLineIngestSettingsPanel extends javax.swing.JPanel { } else if (doesReportProfileNameExist(reportName)) { JOptionPane.showMessageDialog(WindowManager.getDefault().getMainWindow(), Bundle.CommandLineIngestSettingPanel_existing_report_name_mgs()); return; + } else { + // sanitize report name + String originalReportName = reportName; + reportName = reportName.replaceAll("[^A-Za-z0-9_]", ""); + if (reportName.isEmpty() || (!(originalReportName.equals(reportName)))) { + // report name contained only invalid characters, display error + JOptionPane.showMessageDialog(WindowManager.getDefault().getMainWindow(), Bundle.CommandLineIngestSettingPanel_invalid_report_name_mgs()); + return; + } } } diff --git a/Core/src/org/sleuthkit/autopsy/communications/relationships/MediaViewer.java b/Core/src/org/sleuthkit/autopsy/communications/relationships/MediaViewer.java index 1473e7cd67..67a9ee5482 100755 --- a/Core/src/org/sleuthkit/autopsy/communications/relationships/MediaViewer.java +++ b/Core/src/org/sleuthkit/autopsy/communications/relationships/MediaViewer.java @@ -24,6 +24,7 @@ import java.awt.KeyboardFocusManager; import java.beans.PropertyChangeEvent; import java.beans.PropertyChangeListener; import java.util.HashSet; +import java.util.List; import java.util.Set; import java.util.concurrent.ExecutionException; import java.util.logging.Level; @@ -38,6 +39,7 @@ import org.openide.nodes.AbstractNode; import org.openide.nodes.Node; import org.openide.util.Lookup; import org.openide.util.NbBundle.Messages; +import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.communications.ModifiableProxyLookup; import org.sleuthkit.autopsy.corecomponents.TableFilterNode; import org.sleuthkit.autopsy.coreutils.Logger; @@ -46,8 +48,10 @@ import org.sleuthkit.autopsy.directorytree.DataResultFilterNode; import org.sleuthkit.datamodel.AbstractContent; import org.sleuthkit.datamodel.Account; import org.sleuthkit.datamodel.BlackboardArtifact; +import static org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE.TSK_ASSOCIATED_OBJECT; +import org.sleuthkit.datamodel.BlackboardAttribute; import org.sleuthkit.datamodel.Content; -import org.sleuthkit.datamodel.TskCoreException; +import org.sleuthkit.datamodel.SleuthkitCase; /** * A Panel that shows the media (thumbnails) for the selected account. @@ -65,6 +69,7 @@ final class MediaViewer extends JPanel implements RelationshipsViewer, ExplorerM private final MessageDataContent contentViewer; private MediaViewerWorker worker; + private SelectionWorker selectionWorker; @Messages({ "MediaViewer_Name=Media Attachments" @@ -106,11 +111,15 @@ final class MediaViewer extends JPanel implements RelationshipsViewer, ExplorerM @Override public void setSelectionInfo(SelectionInfo info) { contentViewer.setNode(null); - thumbnailViewer.resetComponent(); + thumbnailViewer.setNode(null); if (worker != null) { worker.cancel(true); } + + if(selectionWorker != null) { + selectionWorker.cancel(true); + } worker = new MediaViewerWorker(info); @@ -181,21 +190,66 @@ final class MediaViewer extends JPanel implements RelationshipsViewer, ExplorerM */ private void handleNodeSelectionChange() { final Node[] nodes = tableEM.getSelectedNodes(); + contentViewer.setNode(null); + + if(selectionWorker != null) { + selectionWorker.cancel(true); + } if (nodes != null && nodes.length == 1) { AbstractContent thumbnail = nodes[0].getLookup().lookup(AbstractContent.class); if (thumbnail != null) { - try { - Content parentContent = thumbnail.getParent(); - if (parentContent != null && parentContent instanceof BlackboardArtifact) { - contentViewer.setNode(new BlackboardArtifactNode((BlackboardArtifact) parentContent)); - } - } catch (TskCoreException ex) { - logger.log(Level.WARNING, "Unable to get parent Content from AbstraceContent instance.", ex); //NON-NLS + setCursor(Cursor.getPredefinedCursor(Cursor.WAIT_CURSOR)); + selectionWorker = new SelectionWorker(thumbnail); + selectionWorker.execute(); + } + } + } + + /** + * A SwingWorker to get the artifact associated with the selected thumbnail. + */ + private class SelectionWorker extends SwingWorker { + + private final AbstractContent thumbnail; + + // Construct a SelectionWorker. + SelectionWorker(AbstractContent thumbnail) { + this.thumbnail = thumbnail; + } + + @Override + protected BlackboardArtifact doInBackground() throws Exception { + SleuthkitCase skCase = Case.getCurrentCase().getSleuthkitCase(); + List artifactsList = skCase.getBlackboardArtifacts(TSK_ASSOCIATED_OBJECT, thumbnail.getId()); + for (BlackboardArtifact contextArtifact : artifactsList) { + BlackboardAttribute associatedArtifactAttribute = contextArtifact.getAttribute(new BlackboardAttribute.Type(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_ASSOCIATED_ARTIFACT)); + if (associatedArtifactAttribute != null) { + long artifactId = associatedArtifactAttribute.getValueLong(); + return contextArtifact.getSleuthkitCase().getBlackboardArtifact(artifactId); } } - } else { - contentViewer.setNode(null); + return null; + } + + @Override + protected void done() { + if (isCancelled()) { + return; + } + + try { + BlackboardArtifact artifact = get(); + if (artifact != null) { + contentViewer.setNode(new BlackboardArtifactNode(artifact)); + } else { + contentViewer.setNode(null); + } + } catch (InterruptedException | ExecutionException ex) { + logger.log(Level.SEVERE, "Failed message viewer based on thumbnail selection. thumbnailID = " + thumbnail.getId(), ex); + } finally { + setCursor(Cursor.getPredefinedCursor(Cursor.DEFAULT_CURSOR)); + } } } diff --git a/Core/src/org/sleuthkit/autopsy/contentviewers/analysisresults/AnalysisResultsContentViewer.java b/Core/src/org/sleuthkit/autopsy/contentviewers/analysisresults/AnalysisResultsContentViewer.java index 634d5c61db..ef623f44cf 100644 --- a/Core/src/org/sleuthkit/autopsy/contentviewers/analysisresults/AnalysisResultsContentViewer.java +++ b/Core/src/org/sleuthkit/autopsy/contentviewers/analysisresults/AnalysisResultsContentViewer.java @@ -40,18 +40,17 @@ import org.sleuthkit.datamodel.TskCoreException; */ @ServiceProvider(service = DataContentViewer.class, position = 7) public class AnalysisResultsContentViewer implements DataContentViewer { + private static final Logger logger = Logger.getLogger(AnalysisResultsContentPanel.class.getName()); - + // isPreferred value private static final int PREFERRED_VALUE = 3; - + private final AnalysisResultsViewModel viewModel = new AnalysisResultsViewModel(); private final AnalysisResultsContentPanel panel = new AnalysisResultsContentPanel(); - + private SwingWorker worker = null; - - @NbBundle.Messages({ "AnalysisResultsContentViewer_title=Analysis Results" }) @@ -135,11 +134,11 @@ public class AnalysisResultsContentViewer implements DataContentViewer { if (content instanceof AnalysisResult) { return true; } - + if (content == null || content instanceof BlackboardArtifact) { continue; } - + try { if (Case.getCurrentCaseThrows().getSleuthkitCase().getBlackboard().hasAnalysisResults(content.getId())) { return true; @@ -148,7 +147,7 @@ public class AnalysisResultsContentViewer implements DataContentViewer { logger.log(Level.SEVERE, "Unable to get analysis results for file with obj id " + content.getId(), ex); } } - + return false; } diff --git a/Core/src/org/sleuthkit/autopsy/contentviewers/analysisresults/AnalysisResultsViewModel.java b/Core/src/org/sleuthkit/autopsy/contentviewers/analysisresults/AnalysisResultsViewModel.java index 00cc170b14..03720e0cbe 100644 --- a/Core/src/org/sleuthkit/autopsy/contentviewers/analysisresults/AnalysisResultsViewModel.java +++ b/Core/src/org/sleuthkit/autopsy/contentviewers/analysisresults/AnalysisResultsViewModel.java @@ -252,7 +252,7 @@ public class AnalysisResultsViewModel { try { nodeContent = Optional.of(content); - + // get the aggregate score of that content aggregateScore = Optional.ofNullable(content.getAggregateScore()); diff --git a/Core/src/org/sleuthkit/autopsy/datamodel/ImageNode.java b/Core/src/org/sleuthkit/autopsy/datamodel/ImageNode.java index 5df888052b..f5eed043fa 100644 --- a/Core/src/org/sleuthkit/autopsy/datamodel/ImageNode.java +++ b/Core/src/org/sleuthkit/autopsy/datamodel/ImageNode.java @@ -1,7 +1,7 @@ /* * Autopsy Forensic Browser * - * Copyright 2012-2019 Basis Technology Corp. + * Copyright 2012-2021 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -110,7 +110,7 @@ public class ImageNode extends AbstractContentNode { actionsList.add(a); } actionsList.addAll(ExplorerNodeActionVisitor.getActions(content)); - actionsList.add(new FileSearchAction(Bundle.ImageNode_getActions_openFileSearchByAttr_text())); + actionsList.add(new FileSearchAction(Bundle.ImageNode_getActions_openFileSearchByAttr_text(), content.getId())); actionsList.add(new ViewSummaryInformationAction(content.getId())); actionsList.add(new RunIngestModulesAction(Collections.singletonList(content))); actionsList.add(new NewWindowViewAction(NbBundle.getMessage(this.getClass(), "ImageNode.getActions.viewInNewWin.text"), this)); diff --git a/Core/src/org/sleuthkit/autopsy/datamodel/SpecialDirectoryNode.java b/Core/src/org/sleuthkit/autopsy/datamodel/SpecialDirectoryNode.java index a3c6691ba2..244f9ece70 100644 --- a/Core/src/org/sleuthkit/autopsy/datamodel/SpecialDirectoryNode.java +++ b/Core/src/org/sleuthkit/autopsy/datamodel/SpecialDirectoryNode.java @@ -1,7 +1,7 @@ /* * Autopsy Forensic Browser * - * Copyright 2017-2019 Basis Technology Corp. + * Copyright 2017-2021 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -65,7 +65,7 @@ public abstract class SpecialDirectoryNode extends AbstractAbstractFileNodesingletonList(content))); diff --git a/Core/src/org/sleuthkit/autopsy/datasourcesummary/datamodel/AnalysisSummary.java b/Core/src/org/sleuthkit/autopsy/datasourcesummary/datamodel/AnalysisSummary.java old mode 100644 new mode 100755 index 5fff3eac6c..60598f7529 --- a/Core/src/org/sleuthkit/autopsy/datasourcesummary/datamodel/AnalysisSummary.java +++ b/Core/src/org/sleuthkit/autopsy/datasourcesummary/datamodel/AnalysisSummary.java @@ -1,7 +1,7 @@ /* * Autopsy Forensic Browser * - * Copyright 2020 Basis Technology Corp. + * Copyright 2020-2021 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -18,9 +18,7 @@ */ package org.sleuthkit.autopsy.datasourcesummary.datamodel; -import org.sleuthkit.autopsy.datasourcesummary.uiutils.DefaultArtifactUpdateGovernor; import java.util.ArrayList; -import java.util.Arrays; import java.util.Collections; import java.util.HashSet; import java.util.List; @@ -31,30 +29,23 @@ import java.util.stream.Collectors; import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.tuple.Pair; import org.sleuthkit.autopsy.datasourcesummary.datamodel.SleuthkitCaseProvider.SleuthkitCaseProviderException; +import org.sleuthkit.datamodel.SleuthkitCase; import org.sleuthkit.datamodel.BlackboardArtifact; import org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE; import org.sleuthkit.datamodel.BlackboardAttribute; import org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE; import org.sleuthkit.datamodel.DataSource; -import org.sleuthkit.datamodel.SleuthkitCase; import org.sleuthkit.datamodel.TskCoreException; /** - * Providing data for the data source analysis tab. + * Helper class for getting hash set hits, keyword hits, and interesting item + * hits within a datasource. */ -public class AnalysisSummary implements DefaultArtifactUpdateGovernor { +public class AnalysisSummary { private static final BlackboardAttribute.Type TYPE_SET_NAME = new BlackboardAttribute.Type(ATTRIBUTE_TYPE.TSK_SET_NAME); - private static final Set EXCLUDED_KEYWORD_SEARCH_ITEMS = new HashSet<>(); - private static final Set ARTIFACT_UPDATE_TYPE_IDS = new HashSet<>(Arrays.asList( - ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT.getTypeID(), - ARTIFACT_TYPE.TSK_INTERESTING_ARTIFACT_HIT.getTypeID(), - ARTIFACT_TYPE.TSK_HASHSET_HIT.getTypeID(), - ARTIFACT_TYPE.TSK_KEYWORD_HIT.getTypeID() - )); - private final SleuthkitCaseProvider provider; /** @@ -73,11 +64,6 @@ public class AnalysisSummary implements DefaultArtifactUpdateGovernor { this.provider = provider; } - @Override - public Set getArtifactTypeIdsForRefresh() { - return ARTIFACT_UPDATE_TYPE_IDS; - } - /** * Gets counts for hashset hits. * diff --git a/Core/src/org/sleuthkit/autopsy/datasourcesummary/datamodel/Bundle.properties-MERGED b/Core/src/org/sleuthkit/autopsy/datasourcesummary/datamodel/Bundle.properties-MERGED old mode 100644 new mode 100755 diff --git a/Core/src/org/sleuthkit/autopsy/datasourcesummary/datamodel/Bundle_ja.properties b/Core/src/org/sleuthkit/autopsy/datasourcesummary/datamodel/Bundle_ja.properties old mode 100644 new mode 100755 diff --git a/Core/src/org/sleuthkit/autopsy/datasourcesummary/datamodel/CityRecord.java b/Core/src/org/sleuthkit/autopsy/datasourcesummary/datamodel/CityRecord.java index d2a1182e6a..26fa46498c 100644 --- a/Core/src/org/sleuthkit/autopsy/datasourcesummary/datamodel/CityRecord.java +++ b/Core/src/org/sleuthkit/autopsy/datasourcesummary/datamodel/CityRecord.java @@ -1,7 +1,7 @@ /* * Autopsy Forensic Browser * - * Copyright 2020 Basis Technology Corp. + * Copyright 2020-2021 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/Core/src/org/sleuthkit/autopsy/datasourcesummary/datamodel/ClosestCityMapper.java b/Core/src/org/sleuthkit/autopsy/datasourcesummary/datamodel/ClosestCityMapper.java index 4427d25f23..ecadc0931e 100644 --- a/Core/src/org/sleuthkit/autopsy/datasourcesummary/datamodel/ClosestCityMapper.java +++ b/Core/src/org/sleuthkit/autopsy/datasourcesummary/datamodel/ClosestCityMapper.java @@ -1,7 +1,7 @@ /* * Autopsy Forensic Browser * - * Copyright 2020 Basis Technology Corp. + * Copyright 2020-2021 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -64,6 +64,7 @@ class ClosestCityMapper { * Retrieves singleton instance of this class. * * @return The singleton instance of this class. + * * @throws IOException */ static ClosestCityMapper getInstance() throws IOException { @@ -95,8 +96,9 @@ class ClosestCityMapper { * Main Constructor loading from an input stream. * * @param citiesInputStream The input stream for the csv text file - * containing the cities. - * @param logger The logger to be used with this. + * containing the cities. + * @param logger The logger to be used with this. + * * @throws IOException */ private ClosestCityMapper(InputStream citiesInputStream, java.util.logging.Logger logger) throws IOException { @@ -109,6 +111,7 @@ class ClosestCityMapper { * city can be determined. * * @param point The point to locate. + * * @return The closest city or null if no close city can be found. */ CityRecord findClosest(CityRecord point) { @@ -120,6 +123,7 @@ class ClosestCityMapper { * returned. * * @param s The string to parse. + * * @return The double value or null if value cannot be parsed. */ private Double tryParse(String s) { @@ -138,8 +142,9 @@ class ClosestCityMapper { * Parses a country name and transforms values like "last, first" to "first * last" (i.e. "Korea, South" becomes "South Korea"). * - * @param orig The original string value. + * @param orig The original string value. * @param lineNum The line number that this country was found. + * * @return The country name. */ private String parseCountryName(String orig, int lineNum) { @@ -159,9 +164,10 @@ class ClosestCityMapper { /** * Parses a row from the csv creating a city record. * - * @param csvRow The row of data where each item in the list is each column - * in the row. + * @param csvRow The row of data where each item in the list is each column + * in the row. * @param lineNum The line number for this csv row. + * * @return The parsed CityRecord or null if none can be determined. */ private CityRecord getCsvCityRecord(List csvRow, int lineNum) { @@ -199,8 +205,9 @@ class ClosestCityMapper { /** * Parses a row of the csv into individual column values. * - * @param line The line to parse. + * @param line The line to parse. * @param lineNum The line number in the csv where this line is. + * * @return The list of column values. */ private List parseCsvLine(String line, int lineNum) { @@ -222,10 +229,12 @@ class ClosestCityMapper { * Parses all lines in the csv file input stream into a list of city * records. * - * @param csvInputStream The csv file input stream. + * @param csvInputStream The csv file input stream. * @param ignoreHeaderRow Whether or not there is a header row in the csv - * file. + * file. + * * @return The list of city records. + * * @throws IOException */ private List parseCsvLines(InputStream csvInputStream, boolean ignoreHeaderRow) throws IOException { diff --git a/Core/src/org/sleuthkit/autopsy/datasourcesummary/datamodel/ContainerSummary.java b/Core/src/org/sleuthkit/autopsy/datasourcesummary/datamodel/ContainerSummary.java old mode 100644 new mode 100755 index 8129911500..4ecacdb059 --- a/Core/src/org/sleuthkit/autopsy/datasourcesummary/datamodel/ContainerSummary.java +++ b/Core/src/org/sleuthkit/autopsy/datasourcesummary/datamodel/ContainerSummary.java @@ -1,7 +1,7 @@ /* * Autopsy Forensic Browser * - * Copyright 2020 Basis Technology Corp. + * Copyright 2020-2021 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -18,29 +18,25 @@ */ package org.sleuthkit.autopsy.datasourcesummary.datamodel; -import org.sleuthkit.autopsy.datasourcesummary.uiutils.DefaultArtifactUpdateGovernor; import java.sql.SQLException; +import java.util.ArrayList; import java.util.Arrays; +import java.util.Collections; import java.util.HashSet; +import java.util.List; import java.util.Set; -import org.sleuthkit.autopsy.datasourcesummary.datamodel.SleuthkitCaseProvider.SleuthkitCaseProviderException; -import org.sleuthkit.autopsy.ingest.ModuleContentEvent; -import org.sleuthkit.datamodel.AbstractFile; +import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; import org.sleuthkit.datamodel.BlackboardArtifact; import org.sleuthkit.datamodel.BlackboardAttribute; import org.sleuthkit.datamodel.DataSource; +import org.sleuthkit.datamodel.Image; import org.sleuthkit.datamodel.TskCoreException; import org.sleuthkit.datamodel.TskData; /** * Provides methods to query for data source overview details. */ -public class ContainerSummary implements DefaultArtifactUpdateGovernor { - - private static final Set ARTIFACT_UPDATE_TYPE_IDS = new HashSet<>(Arrays.asList( - BlackboardArtifact.ARTIFACT_TYPE.TSK_OS_INFO.getTypeID(), - BlackboardArtifact.ARTIFACT_TYPE.TSK_DATA_SOURCE_USAGE.getTypeID() - )); +public class ContainerSummary { private final SleuthkitCaseProvider provider; @@ -59,22 +55,7 @@ public class ContainerSummary implements DefaultArtifactUpdateGovernor { public ContainerSummary(SleuthkitCaseProvider provider) { this.provider = provider; } - - @Override - public boolean isRefreshRequired(ModuleContentEvent evt) { - return true; - } - - @Override - public boolean isRefreshRequired(AbstractFile file) { - return true; - } - - @Override - public Set getArtifactTypeIdsForRefresh() { - return ARTIFACT_UPDATE_TYPE_IDS; - } - + /** * Gets the size of unallocated files in a particular datasource. * @@ -231,4 +212,228 @@ public class ContainerSummary implements DefaultArtifactUpdateGovernor { String separator = ", "; return getConcattedStringsResult(query, valueParam, separator); } + + /** + * Data model data for data source images. + */ + public static class ImageDetails { + + private final long unallocatedSize; + private final long size; + private final long sectorSize; + + private final String timeZone; + private final String imageType; + + private final List paths; + private final String md5Hash; + private final String sha1Hash; + private final String sha256Hash; + + /** + * Main constructor. + * + * @param unallocatedSize Size in bytes of unallocated space. + * @param size Total size in bytes. + * @param sectorSize Sector size in bytes. + * @param timeZone The time zone. + * @param imageType The type of image. + * @param paths The source paths for the image. + * @param md5Hash The md5 hash or null. + * @param sha1Hash The sha1 hash or null. + * @param sha256Hash The sha256 hash or null. + */ + ImageDetails(long unallocatedSize, long size, long sectorSize, + String timeZone, String imageType, List paths, String md5Hash, + String sha1Hash, String sha256Hash) { + this.unallocatedSize = unallocatedSize; + this.size = size; + this.sectorSize = sectorSize; + this.timeZone = timeZone; + this.imageType = imageType; + this.paths = paths == null ? Collections.emptyList() : new ArrayList<>(paths); + this.md5Hash = md5Hash; + this.sha1Hash = sha1Hash; + this.sha256Hash = sha256Hash; + } + + /** + * @return Size in bytes of unallocated space. + */ + public long getUnallocatedSize() { + return unallocatedSize; + } + + /** + * @return Total size in bytes. + */ + public long getSize() { + return size; + } + + /** + * @return Sector size in bytes. + */ + public long getSectorSize() { + return sectorSize; + } + + /** + * @return The time zone. + */ + public String getTimeZone() { + return timeZone; + } + + /** + * @return The type of image. + */ + public String getImageType() { + return imageType; + } + + /** + * @return The source paths for the image. + */ + public List getPaths() { + return Collections.unmodifiableList(paths); + } + + /** + * @return The md5 hash or null. + */ + public String getMd5Hash() { + return md5Hash; + } + + /** + * @return The sha1 hash or null. + */ + public String getSha1Hash() { + return sha1Hash; + } + + /** + * @return The sha256 hash or null. + */ + public String getSha256Hash() { + return sha256Hash; + } + } + + /** + * Data model for container data. + */ + public static class ContainerDetails { + + private final String displayName; + private final String originalName; + private final String deviceIdValue; + private final String acquisitionDetails; + private final ImageDetails imageDetails; + + /** + * Main constructor. + * + * @param displayName The display name for this data source. + * @param originalName The original name for this data source. + * @param deviceIdValue The device id value for this data source. + * @param acquisitionDetails The acquisition details for this data + * source or null. + * @param imageDetails If the data source is an image, the image + * data model for this data source or null if + * non-image. + */ + ContainerDetails(String displayName, String originalName, String deviceIdValue, + String acquisitionDetails, ImageDetails imageDetails) { + this.displayName = displayName; + this.originalName = originalName; + this.deviceIdValue = deviceIdValue; + this.acquisitionDetails = acquisitionDetails; + this.imageDetails = imageDetails; + } + + /** + * @return The display name for this data source. + */ + public String getDisplayName() { + return displayName; + } + + /** + * @return The original name for this data source. + */ + public String getOriginalName() { + return originalName; + } + + /** + * @return The device id value for this data source. + */ + public String getDeviceId() { + return deviceIdValue; + } + + /** + * @return The acquisition details for this data source or null. + */ + public String getAcquisitionDetails() { + return acquisitionDetails; + } + + /** + * @return If the data source is an image, the image details for this + * data source or null if non-image. + */ + public ImageDetails getImageDetails() { + return imageDetails; + } + } + + /** + * Generates a container data model object containing data about the data + * source. + * + * @param ds The data source. + * + * @return The generated view model. + */ + public ContainerDetails getContainerDetails(DataSource ds) throws TskCoreException, SQLException, SleuthkitCaseProvider.SleuthkitCaseProviderException { + if (ds == null) { + return null; + } + + return new ContainerDetails( + ds.getName(), + ds.getName(), + ds.getDeviceId(), + ds.getAcquisitionDetails(), + ds instanceof Image ? getImageDetails((Image) ds) : null + ); + } + + /** + * Generates an image data model object containing data about the image. + * + * @param image The image. + * + * @return The generated view model. + */ + public ImageDetails getImageDetails(Image image) throws TskCoreException, SQLException, SleuthkitCaseProvider.SleuthkitCaseProviderException { + if (image == null) { + return null; + } + + Long unallocSize = getSizeOfUnallocatedFiles(image); + String imageType = image.getType().getName(); + Long size = image.getSize(); + Long sectorSize = image.getSsize(); + String timeZone = image.getTimeZone(); + List paths = image.getPaths() == null ? Collections.emptyList() : Arrays.asList(image.getPaths()); + String md5 = image.getMd5(); + String sha1 = image.getSha1(); + String sha256 = image.getSha256(); + + return new ImageDetails(unallocSize, size, sectorSize, timeZone, imageType, paths, md5, sha1, sha256); + } } diff --git a/Core/src/org/sleuthkit/autopsy/datasourcesummary/uiutils/DataFetcher.java b/Core/src/org/sleuthkit/autopsy/datasourcesummary/datamodel/DataFetcher.java similarity index 96% rename from Core/src/org/sleuthkit/autopsy/datasourcesummary/uiutils/DataFetcher.java rename to Core/src/org/sleuthkit/autopsy/datasourcesummary/datamodel/DataFetcher.java index 6eabe79634..45dcae5f35 100644 --- a/Core/src/org/sleuthkit/autopsy/datasourcesummary/uiutils/DataFetcher.java +++ b/Core/src/org/sleuthkit/autopsy/datasourcesummary/datamodel/DataFetcher.java @@ -16,7 +16,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.sleuthkit.autopsy.datasourcesummary.uiutils; +package org.sleuthkit.autopsy.datasourcesummary.datamodel; /** * A function that accepts input of type I and outputs type O. This function is diff --git a/Core/src/org/sleuthkit/autopsy/datasourcesummary/datamodel/DataSourceInfoUtilities.java b/Core/src/org/sleuthkit/autopsy/datasourcesummary/datamodel/DataSourceInfoUtilities.java index c711f3c9a0..becd78f418 100644 --- a/Core/src/org/sleuthkit/autopsy/datasourcesummary/datamodel/DataSourceInfoUtilities.java +++ b/Core/src/org/sleuthkit/autopsy/datasourcesummary/datamodel/DataSourceInfoUtilities.java @@ -1,7 +1,7 @@ /* * Autopsy Forensic Browser * - * Copyright 2019 - 2020 Basis Technology Corp. + * Copyright 2019 - 2021 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -20,6 +20,7 @@ package org.sleuthkit.autopsy.datasourcesummary.datamodel; import java.sql.ResultSet; import java.sql.SQLException; +import java.text.DecimalFormat; import java.util.ArrayList; import java.util.Comparator; import java.util.Date; @@ -41,7 +42,10 @@ import org.sleuthkit.datamodel.TskData.TSK_FS_META_TYPE_ENUM; * Utilities for getting information about a data source or all data sources * from the case database. */ -final class DataSourceInfoUtilities { +public final class DataSourceInfoUtilities { + + public static final String COMMA_FORMAT_STR = "#,###"; + public static final DecimalFormat COMMA_FORMATTER = new DecimalFormat(COMMA_FORMAT_STR); /** * Gets a count of tsk_files for a particular datasource. @@ -100,7 +104,7 @@ final class DataSourceInfoUtilities { * @throws TskCoreException * @throws SQLException */ - static Long getCountOfRegNonSlackFiles(SleuthkitCase skCase, DataSource currentDataSource, String additionalWhere) + public static Long getCountOfRegNonSlackFiles(SleuthkitCase skCase, DataSource currentDataSource, String additionalWhere) throws TskCoreException, SQLException { String whereClause = "meta_type=" + TSK_FS_META_TYPE_ENUM.TSK_FS_META_TYPE_REG.getValue() + " AND type<>" + TSK_DB_FILES_TYPE_ENUM.SLACK.getFileType(); @@ -115,7 +119,7 @@ final class DataSourceInfoUtilities { /** * An interface for handling a result set and returning a value. */ - interface ResultSetHandler { + public interface ResultSetHandler { T process(ResultSet resultset) throws SQLException; } @@ -149,14 +153,14 @@ final class DataSourceInfoUtilities { * * @return The clause. */ - static String getMetaFlagsContainsStatement(TSK_FS_META_FLAG_ENUM flag) { + public static String getMetaFlagsContainsStatement(TSK_FS_META_FLAG_ENUM flag) { return "meta_flags & " + flag.getValue() + " > 0"; } /** * Enum for specifying the sort order for getAttributes. */ - enum SortOrder { + public enum SortOrder { DESCENDING, ASCENDING } @@ -181,7 +185,7 @@ final class DataSourceInfoUtilities { * * @throws TskCoreException */ - static List getArtifacts(SleuthkitCase skCase, BlackboardArtifact.Type artifactType, DataSource dataSource, BlackboardAttribute.Type attributeType, SortOrder sortOrder) throws TskCoreException { + public static List getArtifacts(SleuthkitCase skCase, BlackboardArtifact.Type artifactType, DataSource dataSource, BlackboardAttribute.Type attributeType, SortOrder sortOrder) throws TskCoreException { return getArtifacts(skCase, artifactType, dataSource, attributeType, sortOrder, 0); } @@ -207,7 +211,7 @@ final class DataSourceInfoUtilities { * * @throws TskCoreException */ - static List getArtifacts(SleuthkitCase skCase, BlackboardArtifact.Type artifactType, DataSource dataSource, BlackboardAttribute.Type attributeType, SortOrder sortOrder, int maxCount) throws TskCoreException { + public static List getArtifacts(SleuthkitCase skCase, BlackboardArtifact.Type artifactType, DataSource dataSource, BlackboardAttribute.Type attributeType, SortOrder sortOrder, int maxCount) throws TskCoreException { if (maxCount < 0) { throw new IllegalArgumentException("Invalid maxCount passed to getArtifacts, value must be equal to or greater than 0"); } @@ -380,7 +384,7 @@ final class DataSourceInfoUtilities { * @return The 'getValueString()' value or null if the attribute or String * could not be retrieved. */ - static String getStringOrNull(BlackboardArtifact artifact, Type attributeType) { + public static String getStringOrNull(BlackboardArtifact artifact, Type attributeType) { BlackboardAttribute attr = getAttributeOrNull(artifact, attributeType); return (attr == null) ? null : attr.getValueString(); } @@ -394,11 +398,11 @@ final class DataSourceInfoUtilities { * @return The 'getValueLong()' value or null if the attribute could not be * retrieved. */ - static Long getLongOrNull(BlackboardArtifact artifact, Type attributeType) { + public static Long getLongOrNull(BlackboardArtifact artifact, Type attributeType) { BlackboardAttribute attr = getAttributeOrNull(artifact, attributeType); return (attr == null) ? null : attr.getValueLong(); } - + /** * Retrieves the int value of a certain attribute type from an artifact. * @@ -408,7 +412,7 @@ final class DataSourceInfoUtilities { * @return The 'getValueInt()' value or null if the attribute could not be * retrieved. */ - static Integer getIntOrNull(BlackboardArtifact artifact, Type attributeType) { + public static Integer getIntOrNull(BlackboardArtifact artifact, Type attributeType) { BlackboardAttribute attr = getAttributeOrNull(artifact, attributeType); return (attr == null) ? null : attr.getValueInt(); } @@ -423,8 +427,31 @@ final class DataSourceInfoUtilities { * @return The date determined from the 'getValueLong()' as seconds from * epoch or null if the attribute could not be retrieved or is 0. */ - static Date getDateOrNull(BlackboardArtifact artifact, Type attributeType) { + public static Date getDateOrNull(BlackboardArtifact artifact, Type attributeType) { Long longVal = getLongOrNull(artifact, attributeType); return (longVal == null || longVal == 0) ? null : new Date(longVal * 1000); } + + /** + * Returns the long value or zero if longVal is null. + * + * @param longVal The long value. + * + * @return The long value or 0 if provided value is null. + */ + public static long getLongOrZero(Long longVal) { + return longVal == null ? 0 : longVal; + } + + /** + * Returns string value of long with comma separators. If null returns a + * string of '0'. + * + * @param longVal The long value. + * + * @return The string value of the long. + */ + public static String getStringOrZero(Long longVal) { + return longVal == null ? "0" : COMMA_FORMATTER.format(longVal); + } } diff --git a/Core/src/org/sleuthkit/autopsy/datasourcesummary/datamodel/GeolocationSummary.java b/Core/src/org/sleuthkit/autopsy/datasourcesummary/datamodel/GeolocationSummary.java old mode 100644 new mode 100755 index d66bbaf30a..1b8f2feb8e --- a/Core/src/org/sleuthkit/autopsy/datasourcesummary/datamodel/GeolocationSummary.java +++ b/Core/src/org/sleuthkit/autopsy/datasourcesummary/datamodel/GeolocationSummary.java @@ -1,7 +1,7 @@ /* * Autopsy Forensic Browser * - * Copyright 2020 Basis Technology Corp. + * Copyright 2020-2021 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -32,7 +32,6 @@ import java.util.stream.Collectors; import java.util.stream.Stream; import org.apache.commons.lang3.tuple.Pair; import org.sleuthkit.autopsy.datasourcesummary.datamodel.SleuthkitCaseProvider.SleuthkitCaseProviderException; -import org.sleuthkit.autopsy.datasourcesummary.uiutils.DefaultArtifactUpdateGovernor; import org.sleuthkit.autopsy.geolocation.AbstractWaypointFetcher; import org.sleuthkit.autopsy.geolocation.GeoFilter; import org.sleuthkit.autopsy.geolocation.MapWaypoint; @@ -45,7 +44,7 @@ import org.sleuthkit.datamodel.DataSource; /** * Gathers summary data about Geolocation information for a data source. */ -public class GeolocationSummary implements DefaultArtifactUpdateGovernor { +public class GeolocationSummary { /** * A count of hits for a particular city. @@ -59,8 +58,8 @@ public class GeolocationSummary implements DefaultArtifactUpdateGovernor { * Main constructor. * * @param cityRecord The record for the city including name, country, - * and location. - * @param count The number of hits in proximity to that city. + * and location. + * @param count The number of hits in proximity to that city. */ CityRecordCount(CityRecord cityRecord, int count) { this.cityRecord = cityRecord; @@ -69,7 +68,7 @@ public class GeolocationSummary implements DefaultArtifactUpdateGovernor { /** * @return The record for the city including name, country, and - * location. + * location. */ public CityRecord getCityRecord() { return cityRecord; @@ -96,8 +95,8 @@ public class GeolocationSummary implements DefaultArtifactUpdateGovernor { /** * Main constructor. * - * @param mostCommon The list of most common cities seen. - * @param mostRecent The list of most recent cities seen. + * @param mostCommon The list of most common cities seen. + * @param mostRecent The list of most recent cities seen. * @param mostRecentSeen */ CityData(CityCountsList mostCommon, CityCountsList mostRecent, Long mostRecentSeen) { @@ -122,7 +121,7 @@ public class GeolocationSummary implements DefaultArtifactUpdateGovernor { /** * @return The time stamp in seconds from epoch of the most recently - * seen city + * seen city */ public Long getMostRecentSeen() { return mostRecentSeen; @@ -142,10 +141,10 @@ public class GeolocationSummary implements DefaultArtifactUpdateGovernor { /** * Main constructor. * - * @param counts The list of cities and the count of how many points are - * closest to that city. + * @param counts The list of cities and the count of how many points + * are closest to that city. * @param otherCount The count of points where no closest city was - * determined due to not being close enough. + * determined due to not being close enough. */ CityCountsList(List counts, int otherCount) { this.counts = Collections.unmodifiableList(new ArrayList<>(counts)); @@ -154,7 +153,7 @@ public class GeolocationSummary implements DefaultArtifactUpdateGovernor { /** * @return The list of cities and the count of how many points are - * closest to that city. + * closest to that city. */ public List getCounts() { return counts; @@ -162,7 +161,7 @@ public class GeolocationSummary implements DefaultArtifactUpdateGovernor { /** * @return The count of points where no closest city was determined due - * to not being close enough. + * to not being close enough. */ public int getOtherCount() { return otherCount; @@ -183,10 +182,10 @@ public class GeolocationSummary implements DefaultArtifactUpdateGovernor { * Main constructor. * * @param mapWaypoints The way points found for the data source. - * @param tracks A list of sets where each set is a track in the data - * source. - * @param areas A list of areas where each set is an area in the data - * source. + * @param tracks A list of sets where each set is a track in the + * data source. + * @param areas A list of areas where each set is an area in the + * data source. */ private GeoResult(Set mapWaypoints, List> tracks, List> areas) { this.mapWaypoints = mapWaypoints; @@ -250,6 +249,7 @@ public class GeolocationSummary implements DefaultArtifactUpdateGovernor { * A supplier method that can throw an exception of E. * * @return The object type. + * * @throws E The exception type. */ T get() throws E; @@ -277,13 +277,12 @@ public class GeolocationSummary implements DefaultArtifactUpdateGovernor { /** * @return Returns all the geolocation artifact types. */ - public List getGeoTypes() { + public static List getGeoTypes() { return GPS_ARTIFACT_TYPES; } - @Override - public Set getArtifactTypeIdsForRefresh() { - return GPS_ARTIFACT_TYPE_IDS; + public static Set getArtifactTypeIdsForRefresh() { + return Collections.unmodifiableSet(GPS_ARTIFACT_TYPE_IDS); } /** @@ -291,13 +290,14 @@ public class GeolocationSummary implements DefaultArtifactUpdateGovernor { * the event where either time is null. * * @param minTime The minimum time. If null is provided, this function will - * return false. - * @param time The time to check. If null is provided and the min time is - * non-null, then this function will return false. + * return false. + * @param time The time to check. If null is provided and the min time is + * non-null, then this function will return false. + * * @return If minTime == null then false. If minTime != null && time == null - * then false. Otherwise time >= minTime. + * then false. Otherwise time >= minTime. */ - private boolean greaterThanOrEqual(Long minTime, Long time) { + private static boolean greaterThanOrEqual(Long minTime, Long time) { if (minTime != null && time != null && time >= minTime) { return true; } else { @@ -310,12 +310,13 @@ public class GeolocationSummary implements DefaultArtifactUpdateGovernor { * a total of waypoints whose time stamp is greater than or equal to * minTime. * - * @param points The list of way points. + * @param points The list of way points. * @param minTime The minimum time for most recent points count. + * * @return A pair where the left value is the total count of way points and - * the right is the total list of way points that are >= minTime. + * the right is the total list of way points that are >= minTime. */ - private Pair getCounts(List points, Long minTime) { + private static Pair getCounts(List points, Long minTime) { if (points == null) { return EMPTY_COUNT; } @@ -332,7 +333,8 @@ public class GeolocationSummary implements DefaultArtifactUpdateGovernor { * the point is null, null is returned. * * @param cityMapper The means of mapping a point to the closest city. - * @param pt The geolocation point. + * @param pt The geolocation point. + * * @return A tuple of the closest city and timestamp in seconds from epoch. */ private Pair getClosestWithTime(ClosestCityMapper cityMapper, MapWaypoint pt) { @@ -351,10 +353,12 @@ public class GeolocationSummary implements DefaultArtifactUpdateGovernor { * into a stream of the unique cities identified in this grouping and the * latest time stamp for each grouping. * - * @param points The points in the grouping. + * @param points The points in the grouping. * @param cityMapper The means of mapping a point to the closest city. + * * @return A stream of tuples where each tuple will be a unique city (or - * null if a closest is not determined) and the latest timestamp for each. + * null if a closest is not determined) and the latest timestamp for + * each. */ private Stream> reduceGrouping(Set points, ClosestCityMapper cityMapper) { if (points == null) { @@ -367,7 +371,7 @@ public class GeolocationSummary implements DefaultArtifactUpdateGovernor { if (pair == null) { continue; } - + CityRecord city = pair.getLeft(); Long prevTime = timeMapping.get(city); Long curTime = pair.getRight(); @@ -375,7 +379,7 @@ public class GeolocationSummary implements DefaultArtifactUpdateGovernor { timeMapping.put(city, curTime); } } - + return timeMapping.entrySet().stream() .map(e -> Pair.of(e.getKey(), e.getValue())); } @@ -385,10 +389,12 @@ public class GeolocationSummary implements DefaultArtifactUpdateGovernor { * of tuples where each tuple represents a point with the closest city and * the time stamp in seconds from epoch. * - * @param geoResult The result from the Geolocation API. + * @param geoResult The result from the Geolocation API. * @param cityMapper The means of mapping a point to the closest city. + * * @return A list of tuples where each tuple represents a point to be - * counted with a combination of the closest city and the timestamp. + * counted with a combination of the closest city and the timestamp. + * * @throws IOException */ private Stream> processGeoResult(GeoResult geoResult, ClosestCityMapper cityMapper) { @@ -398,7 +404,7 @@ public class GeolocationSummary implements DefaultArtifactUpdateGovernor { List> areas = (geoResult.getAreas() == null) ? Collections.emptyList() : geoResult.getAreas(); List> tracks = (geoResult.getTracks() == null) ? Collections.emptyList() : geoResult.getTracks(); - + Stream> reducedGroupings = Stream.of(areas, tracks) .flatMap((groupingList) -> groupingList.stream()) .flatMap((grouping) -> reduceGrouping(grouping, cityMapper)); @@ -407,7 +413,7 @@ public class GeolocationSummary implements DefaultArtifactUpdateGovernor { .flatMap((groupingList) -> groupingList.stream()) .flatMap((group) -> group.stream()) .collect(Collectors.toSet()); - + Set pointSet = geoResult.getMapWaypoints() == null ? Collections.emptySet() : geoResult.getMapWaypoints(); Stream> citiesForPoints = pointSet.stream() // it appears that AbstractWaypointFetcher.handleFilteredWaypointSet returns all points @@ -423,8 +429,8 @@ public class GeolocationSummary implements DefaultArtifactUpdateGovernor { * number of found hits (i.e. most hits is first index). * * @param dataSource The data source. - * @param daysCount Number of days to go back. - * @param maxCount Maximum number of results. + * @param daysCount Number of days to go back. + * @param maxCount Maximum number of results. * * @return The sorted list. * @@ -507,11 +513,12 @@ public class GeolocationSummary implements DefaultArtifactUpdateGovernor { * Main constructor. * * @param asyncResult Geolocation fetches results in a callback which is - * already handled by other mechanisms in data source summary. The - * BlockingQueue blocks until a result is received from geolocation. - * @param filters The applicable filters for geolocation. + * already handled by other mechanisms in data source + * summary. The BlockingQueue blocks until a result + * is received from geolocation. + * @param filters The applicable filters for geolocation. */ - public PointFetcher(BlockingQueue asyncResult, GeoFilter filters) { + PointFetcher(BlockingQueue asyncResult, GeoFilter filters) { super(filters); this.asyncResult = asyncResult; } @@ -531,6 +538,7 @@ public class GeolocationSummary implements DefaultArtifactUpdateGovernor { * Fetches all GPS data for the data source from the current case. * * @param dataSource The data source. + * * @return The GPS data pertaining to the data source. * @throws SleuthkitCaseProviderException * @throws GeoLocationDataException diff --git a/Core/src/org/sleuthkit/autopsy/datasourcesummary/datamodel/LatLngMap.java b/Core/src/org/sleuthkit/autopsy/datasourcesummary/datamodel/LatLngMap.java index 01032f61da..22a004c03c 100644 --- a/Core/src/org/sleuthkit/autopsy/datasourcesummary/datamodel/LatLngMap.java +++ b/Core/src/org/sleuthkit/autopsy/datasourcesummary/datamodel/LatLngMap.java @@ -1,7 +1,7 @@ /* * Autopsy Forensic Browser * - * Copyright 2020 Basis Technology Corp. + * Copyright 2020-2021 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -64,8 +64,8 @@ class LatLngMap { * Main contructor. * * @param pointsToAdd The points to be added to the data structure. - * @param bucketSize The size of a grid square in kilometers. So, if this - * value is 100, each sqaure will be a 100 x 100 km. + * @param bucketSize The size of a grid square in kilometers. So, if this + * value is 100, each sqaure will be a 100 x 100 km. */ LatLngMap(List pointsToAdd, double bucketSize) { this.bucketSize = bucketSize; @@ -86,6 +86,7 @@ class LatLngMap { * closest neighboring buckets. * * @param point The point to calculate the bucket location pair. + * * @return The pair that was determined. */ private Pair getBucketLocation(XYZPoint point) { @@ -106,6 +107,7 @@ class LatLngMap { * Finds closest point within (.5 * bucketSize) distance. * * @param point The point for which to find closest. + * * @return Returns the found point. */ E findClosest(E point) { @@ -132,9 +134,10 @@ class LatLngMap { /** * Within the specific bucket, finds the closest point if any exists. * - * @param x The x axis bucket. - * @param y The y axis bucket. + * @param x The x axis bucket. + * @param y The y axis bucket. * @param point The point to search for. + * * @return The point, if any, that was found. */ private E findClosestInBucket(int x, int y, E point) { diff --git a/Core/src/org/sleuthkit/autopsy/datasourcesummary/datamodel/MimeTypeSummary.java b/Core/src/org/sleuthkit/autopsy/datasourcesummary/datamodel/MimeTypeSummary.java old mode 100644 new mode 100755 index e753a44a76..4c5e0331eb --- a/Core/src/org/sleuthkit/autopsy/datasourcesummary/datamodel/MimeTypeSummary.java +++ b/Core/src/org/sleuthkit/autopsy/datasourcesummary/datamodel/MimeTypeSummary.java @@ -1,7 +1,7 @@ /* * Autopsy Forensic Browser * - * Copyright 2020 Basis Technology Corp. + * Copyright 2020-2021 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -18,30 +18,22 @@ */ package org.sleuthkit.autopsy.datasourcesummary.datamodel; -import org.sleuthkit.autopsy.datasourcesummary.uiutils.DefaultUpdateGovernor; import java.sql.SQLException; -import java.util.Arrays; -import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.stream.Collectors; import org.sleuthkit.autopsy.datasourcesummary.datamodel.SleuthkitCaseProvider.SleuthkitCaseProviderException; -import org.sleuthkit.autopsy.ingest.IngestManager; -import org.sleuthkit.autopsy.ingest.ModuleContentEvent; -import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.DataSource; import org.sleuthkit.datamodel.TskCoreException; /** - * Provides methods to query for datasource files by mime type. + * Class to export summary information used by TypesPanel tab on the known files + * present in the specified DataSource. */ -public class MimeTypeSummary implements DefaultUpdateGovernor { +public class MimeTypeSummary { private final SleuthkitCaseProvider provider; - private static final Set INGEST_JOB_EVENTS = new HashSet<>( - Arrays.asList(IngestManager.IngestJobEvent.COMPLETED, IngestManager.IngestJobEvent.CANCELLED)); - /** * Main constructor. */ @@ -58,26 +50,6 @@ public class MimeTypeSummary implements DefaultUpdateGovernor { this.provider = provider; } - @Override - public boolean isRefreshRequired(ModuleContentEvent evt) { - return true; - } - - @Override - public boolean isRefreshRequired(AbstractFile file) { - return true; - } - - @Override - public boolean isRefreshRequired(IngestManager.IngestJobEvent evt) { - return (evt != null && INGEST_JOB_EVENTS.contains(evt)); - } - - @Override - public Set getIngestJobEventUpdates() { - return INGEST_JOB_EVENTS; - } - /** * Get the number of files in the case database for the current data source * which have the specified mimetypes. @@ -98,12 +70,7 @@ public class MimeTypeSummary implements DefaultUpdateGovernor { */ public Long getCountOfFilesForMimeTypes(DataSource currentDataSource, Set setOfMimeTypes) throws SleuthkitCaseProvider.SleuthkitCaseProviderException, TskCoreException, SQLException { - - return DataSourceInfoUtilities.getCountOfRegNonSlackFiles( - provider.get(), - currentDataSource, - "mime_type IN " + getSqlSet(setOfMimeTypes) - ); + return DataSourceInfoUtilities.getCountOfRegNonSlackFiles(provider.get(), currentDataSource, "mime_type IN " + getSqlSet(setOfMimeTypes)); } /** @@ -124,13 +91,9 @@ public class MimeTypeSummary implements DefaultUpdateGovernor { */ public Long getCountOfFilesNotInMimeTypes(DataSource currentDataSource, Set setOfMimeTypes) throws SleuthkitCaseProvider.SleuthkitCaseProviderException, TskCoreException, SQLException { - - return DataSourceInfoUtilities.getCountOfRegNonSlackFiles( - provider.get(), - currentDataSource, + return DataSourceInfoUtilities.getCountOfRegNonSlackFiles(provider.get(), currentDataSource, "mime_type NOT IN " + getSqlSet(setOfMimeTypes) - + " AND mime_type IS NOT NULL AND mime_type <> '' " - ); + + " AND mime_type IS NOT NULL AND mime_type <> '' "); } /** @@ -146,7 +109,6 @@ public class MimeTypeSummary implements DefaultUpdateGovernor { */ public Long getCountOfAllRegularFiles(DataSource dataSource) throws SleuthkitCaseProvider.SleuthkitCaseProviderException, TskCoreException, SQLException { - return DataSourceInfoUtilities.getCountOfRegNonSlackFiles(provider.get(), dataSource, null); } @@ -164,12 +126,7 @@ public class MimeTypeSummary implements DefaultUpdateGovernor { */ public Long getCountOfFilesWithNoMimeType(DataSource currentDataSource) throws SleuthkitCaseProvider.SleuthkitCaseProviderException, TskCoreException, SQLException { - - return DataSourceInfoUtilities.getCountOfRegNonSlackFiles( - provider.get(), - currentDataSource, - "(mime_type IS NULL OR mime_type = '') " - ); + return DataSourceInfoUtilities.getCountOfRegNonSlackFiles(provider.get(), currentDataSource, "(mime_type IS NULL OR mime_type = '') "); } /** diff --git a/Core/src/org/sleuthkit/autopsy/datasourcesummary/datamodel/PastCasesSummary.java b/Core/src/org/sleuthkit/autopsy/datasourcesummary/datamodel/PastCasesSummary.java old mode 100644 new mode 100755 index ea6c089fca..66e265e128 --- a/Core/src/org/sleuthkit/autopsy/datasourcesummary/datamodel/PastCasesSummary.java +++ b/Core/src/org/sleuthkit/autopsy/datasourcesummary/datamodel/PastCasesSummary.java @@ -1,7 +1,7 @@ /* * Autopsy Forensic Browser * - * Copyright 2019 Basis Technology Corp. + * Copyright 2021 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -31,7 +31,6 @@ import java.util.stream.Stream; import org.apache.commons.lang3.tuple.Pair; import org.sleuthkit.autopsy.centralrepository.ingestmodule.CentralRepoIngestModuleFactory; import org.sleuthkit.autopsy.datasourcesummary.datamodel.SleuthkitCaseProvider.SleuthkitCaseProviderException; -import org.sleuthkit.autopsy.datasourcesummary.uiutils.DefaultArtifactUpdateGovernor; import org.sleuthkit.datamodel.BlackboardArtifact; import org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE; import org.sleuthkit.datamodel.BlackboardAttribute; @@ -62,7 +61,7 @@ import org.sleuthkit.datamodel.TskCoreException; * d) The content of that TSK_COMMENT attribute will be of the form "Previous * Case: case1,case2...caseN" */ -public class PastCasesSummary implements DefaultArtifactUpdateGovernor { +public class PastCasesSummary { /** * Return type for results items in the past cases tab. @@ -87,22 +86,17 @@ public class PastCasesSummary implements DefaultArtifactUpdateGovernor { * @return Data for the cases with same id table. */ public List> getSameIdsResults() { - return sameIdsResults; + return Collections.unmodifiableList(sameIdsResults); } /** * @return Data for the tagged notable table. */ public List> getTaggedNotable() { - return taggedNotable; + return Collections.unmodifiableList(taggedNotable); } } - private static final Set ARTIFACT_UPDATE_TYPE_IDS = new HashSet<>(Arrays.asList( - ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT.getTypeID(), - ARTIFACT_TYPE.TSK_INTERESTING_ARTIFACT_HIT.getTypeID() - )); - private static final String CENTRAL_REPO_INGEST_NAME = CentralRepoIngestModuleFactory.getModuleName().toUpperCase().trim(); private static final BlackboardAttribute.Type TYPE_COMMENT = new BlackboardAttribute.Type(ATTRIBUTE_TYPE.TSK_COMMENT); private static final BlackboardAttribute.Type TYPE_ASSOCIATED_ARTIFACT = new BlackboardAttribute.Type(ATTRIBUTE_TYPE.TSK_ASSOCIATED_ARTIFACT); @@ -147,11 +141,6 @@ public class PastCasesSummary implements DefaultArtifactUpdateGovernor { this.logger = logger; } - @Override - public Set getArtifactTypeIdsForRefresh() { - return ARTIFACT_UPDATE_TYPE_IDS; - } - /** * Given the provided sources for an attribute, aims to determine if one of * those sources is the Central Repository Ingest Module. @@ -224,7 +213,7 @@ public class PastCasesSummary implements DefaultArtifactUpdateGovernor { * @return The list of unique cases and their occurrences sorted from max to * min. */ - private List> getCaseCounts(Stream cases) { + private static List> getCaseCounts(Stream cases) { Collection> groupedCases = cases // group by case insensitive compare of cases .collect(Collectors.groupingBy((caseStr) -> caseStr.toUpperCase().trim())) diff --git a/Core/src/org/sleuthkit/autopsy/datasourcesummary/datamodel/RecentFilesSummary.java b/Core/src/org/sleuthkit/autopsy/datasourcesummary/datamodel/RecentFilesSummary.java index 4f1a34fa73..1beb97766b 100755 --- a/Core/src/org/sleuthkit/autopsy/datasourcesummary/datamodel/RecentFilesSummary.java +++ b/Core/src/org/sleuthkit/autopsy/datasourcesummary/datamodel/RecentFilesSummary.java @@ -1,7 +1,7 @@ /* * Autopsy Forensic Browser * - * Copyright 2020 Basis Technology Corp. + * Copyright 2021 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -18,21 +18,18 @@ */ package org.sleuthkit.autopsy.datasourcesummary.datamodel; -import org.sleuthkit.autopsy.datasourcesummary.uiutils.DefaultArtifactUpdateGovernor; import java.nio.file.Paths; import java.text.DateFormat; import java.text.SimpleDateFormat; import java.util.ArrayList; -import java.util.Arrays; import java.util.Collections; -import java.util.HashSet; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Objects; -import java.util.Set; import java.util.stream.Collectors; import org.apache.commons.lang.StringUtils; +import org.sleuthkit.autopsy.datasourcesummary.datamodel.SleuthkitCaseProvider.SleuthkitCaseProviderException; import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.BlackboardArtifact; import org.sleuthkit.datamodel.BlackboardAttribute; @@ -40,13 +37,12 @@ import org.sleuthkit.datamodel.Content; import org.sleuthkit.datamodel.DataSource; import org.sleuthkit.datamodel.SleuthkitCase; import org.sleuthkit.datamodel.TskCoreException; -import org.sleuthkit.autopsy.datasourcesummary.datamodel.SleuthkitCaseProvider.SleuthkitCaseProviderException; import org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE; /** - * Helper class for getting data for the Recent Files Data Summary tab. + * Helper class for getting Recent Activity data. */ -public class RecentFilesSummary implements DefaultArtifactUpdateGovernor { +public class RecentFilesSummary { private final static BlackboardAttribute.Type DATETIME_ACCESSED_ATT = new BlackboardAttribute.Type(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED); private final static BlackboardAttribute.Type DOMAIN_ATT = new BlackboardAttribute.Type(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DOMAIN); @@ -58,14 +54,6 @@ public class RecentFilesSummary implements DefaultArtifactUpdateGovernor { private static final DateFormat DATETIME_FORMAT = new SimpleDateFormat("yyyy/MM/dd HH:mm:ss", Locale.getDefault()); - private static final Set ARTIFACT_UPDATE_TYPE_IDS = new HashSet<>(Arrays.asList( - ARTIFACT_TYPE.TSK_RECENT_OBJECT.getTypeID(), - ARTIFACT_TYPE.TSK_WEB_DOWNLOAD.getTypeID(), - ARTIFACT_TYPE.TSK_ASSOCIATED_OBJECT.getTypeID(), - ARTIFACT_TYPE.TSK_EMAIL_MSG.getTypeID(), - ARTIFACT_TYPE.TSK_MESSAGE.getTypeID() - )); - private final SleuthkitCaseProvider provider; /** @@ -88,11 +76,6 @@ public class RecentFilesSummary implements DefaultArtifactUpdateGovernor { this.provider = provider; } - @Override - public Set getArtifactTypeIdsForRefresh() { - return ARTIFACT_UPDATE_TYPE_IDS; - } - /** * Removes fileDetails entries with redundant paths, sorts by date * descending and limits to the limit provided. @@ -101,7 +84,7 @@ public class RecentFilesSummary implements DefaultArtifactUpdateGovernor { * @param limit The maximum number of entries to return. * @return The sorted limited list with unique paths. */ - private List getSortedLimited(List fileDetails, int limit) { + private static List getSortedLimited(List fileDetails, int limit) { Map fileDetailsMap = fileDetails.stream() .filter(details -> details != null) .collect(Collectors.toMap( @@ -122,7 +105,7 @@ public class RecentFilesSummary implements DefaultArtifactUpdateGovernor { * @param artifact The artifact. * @return The derived object or null if artifact is invalid. */ - private RecentFileDetails getRecentlyOpenedDocument(BlackboardArtifact artifact) { + private static RecentFileDetails getRecentlyOpenedDocument(BlackboardArtifact artifact) { String path = DataSourceInfoUtilities.getStringOrNull(artifact, PATH_ATT); Long lastOpened = DataSourceInfoUtilities.getLongOrNull(artifact, DATETIME_ACCESSED_ATT); @@ -170,7 +153,7 @@ public class RecentFilesSummary implements DefaultArtifactUpdateGovernor { * @param artifact The artifact. * @return The derived object or null if artifact is invalid. */ - private RecentDownloadDetails getRecentDownload(BlackboardArtifact artifact) { + private static RecentDownloadDetails getRecentDownload(BlackboardArtifact artifact) { Long accessedTime = DataSourceInfoUtilities.getLongOrNull(artifact, DATETIME_ACCESSED_ATT); String domain = DataSourceInfoUtilities.getStringOrNull(artifact, DOMAIN_ATT); String path = DataSourceInfoUtilities.getStringOrNull(artifact, PATH_ATT); @@ -187,7 +170,7 @@ public class RecentFilesSummary implements DefaultArtifactUpdateGovernor { * * @param count The count. */ - private void throwOnNonPositiveCount(int count) { + private static void throwOnNonPositiveCount(int count) { if (count < 1) { throw new IllegalArgumentException("Invalid count: value must be greater than 0."); } @@ -268,7 +251,7 @@ public class RecentFilesSummary implements DefaultArtifactUpdateGovernor { * @return The derived object or null. * @throws TskCoreException */ - private RecentAttachmentDetails getRecentAttachment(BlackboardArtifact artifact, SleuthkitCase skCase) throws TskCoreException { + private static RecentAttachmentDetails getRecentAttachment(BlackboardArtifact artifact, SleuthkitCase skCase) throws TskCoreException { // get associated artifact or return no result BlackboardAttribute attribute = artifact.getAttribute(ASSOCATED_ATT); if (attribute == null) { @@ -309,7 +292,7 @@ public class RecentFilesSummary implements DefaultArtifactUpdateGovernor { * * @return True if the given artifact is a message artifact */ - private boolean isMessageArtifact(BlackboardArtifact nodeArtifact) { + private static boolean isMessageArtifact(BlackboardArtifact nodeArtifact) { final int artifactTypeID = nodeArtifact.getArtifactTypeID(); return artifactTypeID == ARTIFACT_TYPE.TSK_EMAIL_MSG.getTypeID() || artifactTypeID == ARTIFACT_TYPE.TSK_MESSAGE.getTypeID(); diff --git a/Core/src/org/sleuthkit/autopsy/datasourcesummary/datamodel/SleuthkitCaseProvider.java b/Core/src/org/sleuthkit/autopsy/datasourcesummary/datamodel/SleuthkitCaseProvider.java old mode 100644 new mode 100755 diff --git a/Core/src/org/sleuthkit/autopsy/datasourcesummary/datamodel/TimelineDataSourceUtils.java b/Core/src/org/sleuthkit/autopsy/datasourcesummary/datamodel/TimelineDataSourceUtils.java old mode 100644 new mode 100755 index 40f76cce80..6528284911 --- a/Core/src/org/sleuthkit/autopsy/datasourcesummary/datamodel/TimelineDataSourceUtils.java +++ b/Core/src/org/sleuthkit/autopsy/datasourcesummary/datamodel/TimelineDataSourceUtils.java @@ -18,7 +18,6 @@ */ package org.sleuthkit.autopsy.datasourcesummary.datamodel; -import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; import org.sleuthkit.autopsy.timeline.TimeLineController; import org.sleuthkit.autopsy.timeline.TimeLineModule; import org.sleuthkit.autopsy.timeline.ui.filtering.datamodel.FilterState; @@ -60,10 +59,9 @@ public class TimelineDataSourceUtils { * @param dataSource The data source. * @return The root filter representing a default filter with only this data * source selected. - * @throws NoCurrentCaseException * @throws TskCoreException */ - public RootFilter getDataSourceFilter(DataSource dataSource) throws NoCurrentCaseException, TskCoreException { + public RootFilter getDataSourceFilter(DataSource dataSource) throws TskCoreException { RootFilterState filterState = getDataSourceFilterState(dataSource); return filterState == null ? null : filterState.getActiveFilter(); } @@ -75,10 +73,9 @@ public class TimelineDataSourceUtils { * @param dataSource The data source. * @return The root filter state representing a default filter with only * this data source selected. - * @throws NoCurrentCaseException * @throws TskCoreException */ - public RootFilterState getDataSourceFilterState(DataSource dataSource) throws NoCurrentCaseException, TskCoreException { + public RootFilterState getDataSourceFilterState(DataSource dataSource) throws TskCoreException { TimeLineController controller = TimeLineModule.getController(); RootFilterState dataSourceState = controller.getEventsModel().getDefaultEventFilterState().copyOf(); diff --git a/Core/src/org/sleuthkit/autopsy/datasourcesummary/datamodel/TimelineSummary.java b/Core/src/org/sleuthkit/autopsy/datasourcesummary/datamodel/TimelineSummary.java old mode 100644 new mode 100755 index a43d46764b..28c616a89f --- a/Core/src/org/sleuthkit/autopsy/datasourcesummary/datamodel/TimelineSummary.java +++ b/Core/src/org/sleuthkit/autopsy/datasourcesummary/datamodel/TimelineSummary.java @@ -1,7 +1,7 @@ /* * Autopsy Forensic Browser * - * Copyright 2020 Basis Technology Corp. + * Copyright 2020-2021 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -18,6 +18,8 @@ */ package org.sleuthkit.autopsy.datasourcesummary.datamodel; +import java.text.DateFormat; +import java.text.SimpleDateFormat; import java.time.Instant; import java.util.ArrayList; import java.util.Arrays; @@ -26,29 +28,25 @@ import java.util.Date; import java.util.HashMap; import java.util.HashSet; import java.util.List; +import java.util.Locale; import java.util.Map; import java.util.Set; import java.util.TimeZone; import org.joda.time.Interval; -import org.sleuthkit.autopsy.datasourcesummary.datamodel.SleuthkitCaseProvider.SleuthkitCaseProviderException; -import org.sleuthkit.autopsy.datasourcesummary.uiutils.DefaultUpdateGovernor; -import org.sleuthkit.autopsy.ingest.IngestManager; -import org.sleuthkit.autopsy.ingest.ModuleContentEvent; -import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.DataSource; import org.sleuthkit.datamodel.TimelineEvent; import org.sleuthkit.datamodel.TimelineEventType; import org.sleuthkit.datamodel.TimelineFilter.RootFilter; import org.sleuthkit.datamodel.TimelineManager; import org.sleuthkit.datamodel.TskCoreException; +import org.sleuthkit.autopsy.datasourcesummary.datamodel.SleuthkitCaseProvider.SleuthkitCaseProviderException; import java.util.function.Supplier; -import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; import org.sleuthkit.autopsy.core.UserPreferences; /** * Provides data source summary information pertaining to Timeline data. */ -public class TimelineSummary implements DefaultUpdateGovernor { +public class TimelineSummary { /** * A function for obtaining a Timeline RootFilter filtered to the specific @@ -61,16 +59,13 @@ public class TimelineSummary implements DefaultUpdateGovernor { * * @param dataSource The data source. * @return The timeline root filter. - * @throws NoCurrentCaseException + * @throws SleuthkitCaseProviderException * @throws TskCoreException */ - RootFilter apply(DataSource dataSource) throws NoCurrentCaseException, TskCoreException; + RootFilter apply(DataSource dataSource) throws SleuthkitCaseProviderException, TskCoreException; } private static final long DAY_SECS = 24 * 60 * 60; - private static final Set INGEST_JOB_EVENTS = new HashSet<>( - Arrays.asList(IngestManager.IngestJobEvent.COMPLETED, IngestManager.IngestJobEvent.CANCELLED)); - private static final Set FILE_SYSTEM_EVENTS = new HashSet<>(Arrays.asList( TimelineEventType.FILE_MODIFIED, @@ -105,39 +100,19 @@ public class TimelineSummary implements DefaultUpdateGovernor { this.filterFunction = filterFunction; } - @Override - public boolean isRefreshRequired(ModuleContentEvent evt) { - return true; - } - - @Override - public boolean isRefreshRequired(AbstractFile file) { - return true; - } - - @Override - public boolean isRefreshRequired(IngestManager.IngestJobEvent evt) { - return (evt != null && INGEST_JOB_EVENTS.contains(evt)); - } - - @Override - public Set getIngestJobEventUpdates() { - return INGEST_JOB_EVENTS; - } - /** * Retrieves timeline summary data. * - * @param dataSource The data source for which timeline data will be - * retrieved. + * @param dataSource The data source for which timeline data will be + * retrieved. * @param recentDaysNum The maximum number of most recent days' activity to - * include. + * include. + * * @return The retrieved data. * @throws SleuthkitCaseProviderException * @throws TskCoreException - * @throws NoCurrentCaseException */ - public TimelineSummaryData getData(DataSource dataSource, int recentDaysNum) throws SleuthkitCaseProviderException, TskCoreException, NoCurrentCaseException { + public TimelineSummaryData getTimelineSummaryData(DataSource dataSource, int recentDaysNum) throws SleuthkitCaseProviderException, TskCoreException { TimeZone timeZone = this.timeZoneProvider.get(); TimelineManager timelineManager = this.caseProvider.get().getTimelineManager(); @@ -174,10 +149,11 @@ public class TimelineSummary implements DefaultUpdateGovernor { * Given activity by day, converts to most recent days' activity handling * empty values. * - * @param dateCounts The day from epoch mapped to activity amounts for that - * day. + * @param dateCounts The day from epoch mapped to activity amounts for + * that day. * @param minRecentDay The minimum recent day in days from epoch. - * @param maxDay The maximum recent day in days from epoch; + * @param maxDay The maximum recent day in days from epoch; + * * @return The most recent daily activity amounts. */ private List getMostRecentActivityAmounts(Map dateCounts, long minRecentDay, long maxDay) { @@ -197,17 +173,18 @@ public class TimelineSummary implements DefaultUpdateGovernor { /** * Fetches timeline events per day for a particular data source. * - * @param dataSource The data source. + * @param dataSource The data source. * @param timelineManager The timeline manager to use while fetching the - * data. - * @param timeZone The time zone to use to determine which day activity - * belongs. + * data. + * @param timeZone The time zone to use to determine which day + * activity belongs. + * * @return A Map mapping days from epoch to the activity for that day. + * * @throws TskCoreException - * @throws NoCurrentCaseException */ private Map getTimelineEventsByDay(DataSource dataSource, TimelineManager timelineManager, TimeZone timeZone) - throws TskCoreException, NoCurrentCaseException { + throws TskCoreException, SleuthkitCaseProviderException { RootFilter rootFilter = this.filterFunction.apply(dataSource); // get events for data source @@ -251,12 +228,14 @@ public class TimelineSummary implements DefaultUpdateGovernor { /** * Main constructor. * - * @param minDate Earliest usage date recorded for the data source. - * @param maxDate Latest usage date recorded for the data source. + * @param minDate Earliest usage date recorded for the data + * source. + * @param maxDate Latest usage date recorded for the data + * source. * @param recentDaysActivity A list of activity prior to and including - * max date sorted by min to max date. - * @param dataSource The data source for which this data applies. the - * latest usage date by day. + * max date sorted by min to max date. + * @param dataSource The data source for which this data + * applies. the latest usage date by day. */ TimelineSummaryData(Date minDate, Date maxDate, List recentDaysActivity, DataSource dataSource) { this.minDate = minDate; @@ -281,7 +260,7 @@ public class TimelineSummary implements DefaultUpdateGovernor { /** * @return A list of activity prior to and including the latest usage - * date by day sorted min to max date. + * date by day sorted min to max date. */ public List getMostRecentDaysActivity() { return histogramActivity; @@ -307,8 +286,10 @@ public class TimelineSummary implements DefaultUpdateGovernor { /** * Main constructor. * - * @param day The day for which activity is being measured. - * @param fileActivityCount The amount of file activity timeline events. + * @param day The day for which activity is being + * measured. + * @param fileActivityCount The amount of file activity timeline + * events. * @param artifactActivityCount The amount of artifact timeline events. */ DailyActivityAmount(Date day, long fileActivityCount, long artifactActivityCount) { @@ -337,6 +318,29 @@ public class TimelineSummary implements DefaultUpdateGovernor { public long getArtifactActivityCount() { return artifactActivityCount; } - } + + /** + * Creates a DateFormat formatter that uses UTC for time zone. + * + * @param formatString The date format string. + * @return The data format. + */ + public static DateFormat getUtcFormat(String formatString) { + return new SimpleDateFormat(formatString, Locale.getDefault()); + } + + /** + * Formats a date using a DateFormat. In the event that the date is null, + * returns a null string. + * + * @param date The date to format. + * @param formatter The DateFormat to use to format the date. + * + * @return The formatted string generated from the formatter or null if the + * date is null. + */ + public static String formatDate(Date date, DateFormat formatter) { + return date == null ? null : formatter.format(date); + } } diff --git a/Core/src/org/sleuthkit/autopsy/datasourcesummary/datamodel/TypesSummary.java b/Core/src/org/sleuthkit/autopsy/datasourcesummary/datamodel/TypesSummary.java old mode 100644 new mode 100755 index ff4bcae0a0..b13e852b95 --- a/Core/src/org/sleuthkit/autopsy/datasourcesummary/datamodel/TypesSummary.java +++ b/Core/src/org/sleuthkit/autopsy/datasourcesummary/datamodel/TypesSummary.java @@ -1,7 +1,7 @@ /* * Autopsy Forensic Browser * - * Copyright 2019 - 2020 Basis Technology Corp. + * Copyright 2019 - 2021 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -18,26 +18,19 @@ */ package org.sleuthkit.autopsy.datasourcesummary.datamodel; -import org.sleuthkit.autopsy.datasourcesummary.uiutils.DefaultUpdateGovernor; +import java.awt.Color; import java.sql.SQLException; -import java.util.Arrays; -import java.util.HashSet; import java.util.Set; -import org.sleuthkit.autopsy.datasourcesummary.datamodel.SleuthkitCaseProvider.SleuthkitCaseProviderException; -import org.sleuthkit.autopsy.ingest.IngestManager; -import org.sleuthkit.autopsy.ingest.ModuleContentEvent; -import org.sleuthkit.datamodel.AbstractFile; +import org.sleuthkit.autopsy.coreutils.FileTypeUtils; import org.sleuthkit.datamodel.DataSource; import org.sleuthkit.datamodel.TskCoreException; import org.sleuthkit.datamodel.TskData; /** - * Provides information for the DataSourceSummaryCountsPanel. + * Helper class for getting summary information on the known files present in the + * specified DataSource.. */ -public class TypesSummary implements DefaultUpdateGovernor { - - private static final Set INGEST_JOB_EVENTS = new HashSet<>( - Arrays.asList(IngestManager.IngestJobEvent.COMPLETED, IngestManager.IngestJobEvent.CANCELLED)); +public class TypesSummary { private final SleuthkitCaseProvider provider; @@ -57,25 +50,6 @@ public class TypesSummary implements DefaultUpdateGovernor { this.provider = provider; } - @Override - public boolean isRefreshRequired(ModuleContentEvent evt) { - return true; - } - - @Override - public boolean isRefreshRequired(AbstractFile file) { - return true; - } - - @Override - public boolean isRefreshRequired(IngestManager.IngestJobEvent evt) { - return (evt != null && INGEST_JOB_EVENTS.contains(evt)); - } - - @Override - public Set getIngestJobEventUpdates() { - return INGEST_JOB_EVENTS; - } /** * Get count of regular files (not directories) in a data source. @@ -169,4 +143,59 @@ public class TypesSummary implements DefaultUpdateGovernor { return DataSourceInfoUtilities.getCountOfRegularFiles(provider.get(), currentDataSource, "type=" + TskData.TSK_DB_FILES_TYPE_ENUM.SLACK.getFileType()); } + + /** + * Information concerning a particular file type category. + */ + public static class FileTypeCategoryData { + + private final String label; + private final Set mimeTypes; + private final Color color; + + /** + * Main constructor. + * + * @param label The label for this slice. + * @param mimeTypes The mime types associated with this slice. + * @param color The color associated with this slice. + */ + public FileTypeCategoryData(String label, Set mimeTypes, Color color) { + this.label = label; + this.mimeTypes = mimeTypes; + this.color = color; + } + + /** + * Constructor that accepts FileTypeCategory. + * + * @param label The label for this slice. + * @param mimeTypes The mime types associated with this slice. + * @param color The color associated with this slice. + */ + public FileTypeCategoryData(String label, FileTypeUtils.FileTypeCategory fileCategory, Color color) { + this(label, fileCategory.getMediaTypes(), color); + } + + /** + * @return The label for this category. + */ + public String getLabel() { + return label; + } + + /** + * @return The mime types associated with this category. + */ + public Set getMimeTypes() { + return mimeTypes; + } + + /** + * @return The color associated with this category. + */ + public Color getColor() { + return color; + } + } } diff --git a/Core/src/org/sleuthkit/autopsy/datasourcesummary/datamodel/UserActivitySummary.java b/Core/src/org/sleuthkit/autopsy/datasourcesummary/datamodel/UserActivitySummary.java old mode 100644 new mode 100755 index 10f700d51f..a32867ffce --- a/Core/src/org/sleuthkit/autopsy/datasourcesummary/datamodel/UserActivitySummary.java +++ b/Core/src/org/sleuthkit/autopsy/datasourcesummary/datamodel/UserActivitySummary.java @@ -1,7 +1,7 @@ /* * Autopsy Forensic Browser * - * Copyright 2020 Basis Technology Corp. + * Copyright 2020-2021 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -19,7 +19,6 @@ package org.sleuthkit.autopsy.datasourcesummary.datamodel; import java.io.File; -import org.sleuthkit.autopsy.datasourcesummary.uiutils.DefaultArtifactUpdateGovernor; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; @@ -54,7 +53,7 @@ import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE; * time, the data being provided for domains is fictitious and is done as a * placeholder. */ -public class UserActivitySummary implements DefaultArtifactUpdateGovernor { +public class UserActivitySummary { /** * Functions that determine the folder name of a list of path elements. If @@ -138,16 +137,6 @@ public class UserActivitySummary implements DefaultArtifactUpdateGovernor { .compareToIgnoreCase((b.getProgramName() == null ? "" : b.getProgramName())); }; - private static final Set ARTIFACT_UPDATE_TYPE_IDS = new HashSet<>(Arrays.asList( - ARTIFACT_TYPE.TSK_WEB_SEARCH_QUERY.getTypeID(), - ARTIFACT_TYPE.TSK_MESSAGE.getTypeID(), - ARTIFACT_TYPE.TSK_EMAIL_MSG.getTypeID(), - ARTIFACT_TYPE.TSK_CALLLOG.getTypeID(), - ARTIFACT_TYPE.TSK_DEVICE_ATTACHED.getTypeID(), - ARTIFACT_TYPE.TSK_WEB_HISTORY.getTypeID(), - ARTIFACT_TYPE.TSK_PROG_RUN.getTypeID() - )); - private static final Set DEVICE_EXCLUDE_LIST = new HashSet<>(Arrays.asList("ROOT_HUB", "ROOT_HUB20")); private static final Set DOMAIN_EXCLUDE_LIST = new HashSet<>(Arrays.asList("127.0.0.1", "LOCALHOST")); @@ -186,27 +175,55 @@ public class UserActivitySummary implements DefaultArtifactUpdateGovernor { this.logger = logger; } - @Override - public Set getArtifactTypeIdsForRefresh() { - return ARTIFACT_UPDATE_TYPE_IDS; - } - /** * Throws an IllegalArgumentException if count <= 0. * * @param count The count being checked. */ - private void assertValidCount(int count) { + private static void assertValidCount(int count) { if (count <= 0) { throw new IllegalArgumentException("Count must be greater than 0"); } } + + /** + * Determines a short folder name if any. Otherwise, returns empty string. + * + * @param strPath The string path. + * @param applicationName The application name. + * + * @return The short folder name or empty string if not found. + */ + public static String getShortFolderName(String strPath, String applicationName) { + if (strPath == null) { + return ""; + } + + List pathEls = new ArrayList<>(Arrays.asList(applicationName)); + + File file = new File(strPath); + while (file != null && org.apache.commons.lang.StringUtils.isNotBlank(file.getName())) { + pathEls.add(file.getName()); + file = file.getParentFile(); + } + + Collections.reverse(pathEls); + + for (Function, String> matchEntry : SHORT_FOLDER_MATCHERS) { + String result = matchEntry.apply(pathEls); + if (org.apache.commons.lang.StringUtils.isNotBlank(result)) { + return result; + } + } + + return ""; + } /** * Gets a list of recent domains based on the datasource. * * @param dataSource The datasource to query for recent domains. - * @param count The max count of items to return. + * @param count The max count of items to return. * * @return The list of items retrieved from the database. * @@ -242,13 +259,13 @@ public class UserActivitySummary implements DefaultArtifactUpdateGovernor { * Creates a TopDomainsResult from data or null if no visit date exists * within DOMAIN_WINDOW_MS of mostRecentMs. * - * @param domain The domain. - * @param visits The list of the artifact and its associated time in - * milliseconds. + * @param domain The domain. + * @param visits The list of the artifact and its associated time in + * milliseconds. * @param mostRecentMs The most recent visit of any domain. * * @return The TopDomainsResult or null if no visits to this domain within - * 30 days of mostRecentMs. + * 30 days of mostRecentMs. */ private TopDomainsResult getDomainsResult(String domain, List> visits, long mostRecentMs) { long visitCount = 0; @@ -288,9 +305,9 @@ public class UserActivitySummary implements DefaultArtifactUpdateGovernor { * @param dataSource The datasource. * * @return A tuple where the first value is the latest web history accessed - * date in milliseconds and the second value maps normalized (lowercase; - * trimmed) domain names to when those domains were visited and the relevant - * artifact. + * date in milliseconds and the second value maps normalized + * (lowercase; trimmed) domain names to when those domains were + * visited and the relevant artifact. * * @throws TskCoreException * @throws SleuthkitCaseProviderException @@ -357,7 +374,7 @@ public class UserActivitySummary implements DefaultArtifactUpdateGovernor { * @param artifact The artifact. * * @return The TopWebSearchResult or null if the search string or date - * accessed cannot be determined. + * accessed cannot be determined. */ private static TopWebSearchResult getWebSearchResult(BlackboardArtifact artifact) { String searchString = DataSourceInfoUtilities.getStringOrNull(artifact, TYPE_TEXT); @@ -372,10 +389,11 @@ public class UserActivitySummary implements DefaultArtifactUpdateGovernor { * term. * * @param dataSource The data source. - * @param count The maximum number of records to be shown (must be > 0). + * @param count The maximum number of records to be shown (must be > + * 0). * * @return The list of most recent web searches where most recent search - * appears first. + * appears first. * * @throws * org.sleuthkit.autopsy.datasourcesummary.datamodel.SleuthkitCaseProvider.SleuthkitCaseProviderException @@ -462,6 +480,7 @@ public class UserActivitySummary implements DefaultArtifactUpdateGovernor { * * @param r1 A result. * @param r2 Another result. + * * @return The most recent one with a non-null date. */ private TopDeviceAttachedResult getMostRecentDevice(TopDeviceAttachedResult r1, TopDeviceAttachedResult r2) { @@ -480,10 +499,11 @@ public class UserActivitySummary implements DefaultArtifactUpdateGovernor { * Retrieves most recent devices used by most recent date attached. * * @param dataSource The data source. - * @param count The maximum number of records to be shown (must be > 0). + * @param count The maximum number of records to be shown (must be > + * 0). * * @return The list of most recent devices attached where most recent device - * attached appears first. + * attached appears first. * * @throws * org.sleuthkit.autopsy.datasourcesummary.datamodel.SleuthkitCaseProvider.SleuthkitCaseProviderException @@ -528,7 +548,7 @@ public class UserActivitySummary implements DefaultArtifactUpdateGovernor { * @param artifact The artifact. * * @return The TopAccountResult or null if the account type or message date - * cannot be determined. + * cannot be determined. */ private static TopAccountResult getMessageAccountResult(BlackboardArtifact artifact) { String type = DataSourceInfoUtilities.getStringOrNull(artifact, TYPE_MESSAGE_TYPE); @@ -542,12 +562,12 @@ public class UserActivitySummary implements DefaultArtifactUpdateGovernor { * Obtains a TopAccountResult from a blackboard artifact. The date is * maximum of any found dates for attribute types provided. * - * @param artifact The artifact. + * @param artifact The artifact. * @param messageType The type of message this is. - * @param dateAttrs The date attribute types. + * @param dateAttrs The date attribute types. * * @return The TopAccountResult or null if the account type or max date are - * not provided. + * not provided. */ private static TopAccountResult getAccountResult(BlackboardArtifact artifact, String messageType, BlackboardAttribute.Type... dateAttrs) { String type = messageType; @@ -638,39 +658,6 @@ public class UserActivitySummary implements DefaultArtifactUpdateGovernor { .collect(Collectors.toList()); } - /** - * Determines a short folder name if any. Otherwise, returns empty string. - * - * @param strPath The string path. - * @param applicationName The application name. - * - * @return The short folder name or empty string if not found. - */ - public String getShortFolderName(String strPath, String applicationName) { - if (strPath == null) { - return ""; - } - - List pathEls = new ArrayList<>(Arrays.asList(applicationName)); - - File file = new File(strPath); - while (file != null && org.apache.commons.lang.StringUtils.isNotBlank(file.getName())) { - pathEls.add(file.getName()); - file = file.getParentFile(); - } - - Collections.reverse(pathEls); - - for (Function, String> matchEntry : SHORT_FOLDER_MATCHERS) { - String result = matchEntry.apply(pathEls); - if (org.apache.commons.lang.StringUtils.isNotBlank(result)) { - return result; - } - } - - return ""; - } - /** * Creates a TopProgramsResult from a TSK_PROG_RUN blackboard artifact. * @@ -764,12 +751,12 @@ public class UserActivitySummary implements DefaultArtifactUpdateGovernor { * be ignored and all items will be returned. * * @param dataSource The datasource. If the datasource is null, an empty - * list will be returned. - * @param count The number of results to return. This value must be > 0 or - * an IllegalArgumentException will be thrown. + * list will be returned. + * @param count The number of results to return. This value must be > 0 + * or an IllegalArgumentException will be thrown. * * @return The sorted list and limited to the count if last run or run count - * information is available on any item. + * information is available on any item. * * @throws SleuthkitCaseProviderException * @throws TskCoreException @@ -840,7 +827,7 @@ public class UserActivitySummary implements DefaultArtifactUpdateGovernor { * Main constructor. * * @param lastAccessed The date of last access. - * @param artifact The relevant blackboard artifact. + * @param artifact The relevant blackboard artifact. */ public LastAccessedArtifact(Date lastAccessed, BlackboardArtifact artifact) { this.lastAccessed = lastAccessed; @@ -875,7 +862,7 @@ public class UserActivitySummary implements DefaultArtifactUpdateGovernor { * * @param searchString The search string. * @param dateAccessed The latest date searched. - * @param artifact The relevant blackboard artifact. + * @param artifact The relevant blackboard artifact. */ public TopWebSearchResult(String searchString, Date dateAccessed, BlackboardArtifact artifact) { super(dateAccessed, artifact); @@ -918,11 +905,11 @@ public class UserActivitySummary implements DefaultArtifactUpdateGovernor { /** * Main constructor. * - * @param deviceId The device id. + * @param deviceId The device id. * @param dateAccessed The date last attached. - * @param deviceMake The device make. - * @param deviceModel The device model. - * @param artifact The relevant blackboard artifact. + * @param deviceMake The device make. + * @param deviceModel The device model. + * @param artifact The relevant blackboard artifact. */ public TopDeviceAttachedResult(String deviceId, Date dateAccessed, String deviceMake, String deviceModel, BlackboardArtifact artifact) { super(dateAccessed, artifact); @@ -965,8 +952,8 @@ public class UserActivitySummary implements DefaultArtifactUpdateGovernor { * Main constructor. * * @param accountType The account type. - * @param lastAccess The date the account was last accessed. - * @param artifact The artifact indicating last access. + * @param lastAccess The date the account was last accessed. + * @param artifact The artifact indicating last access. */ public TopAccountResult(String accountType, Date lastAccess, BlackboardArtifact artifact) { super(lastAccess, artifact); @@ -992,10 +979,10 @@ public class UserActivitySummary implements DefaultArtifactUpdateGovernor { /** * Describes a top domain result. * - * @param domain The domain. + * @param domain The domain. * @param visitTimes The number of times it was visited. - * @param lastVisit The date of the last visit. - * @param artifact The relevant blackboard artifact. + * @param lastVisit The date of the last visit. + * @param artifact The relevant blackboard artifact. */ public TopDomainsResult(String domain, Long visitTimes, Date lastVisit, BlackboardArtifact artifact) { super(lastVisit, artifact); @@ -1032,8 +1019,8 @@ public class UserActivitySummary implements DefaultArtifactUpdateGovernor { * * @param programName The name of the program. * @param programPath The path of the program. - * @param runTimes The number of runs. - * @param artifact The relevant blackboard artifact. + * @param runTimes The number of runs. + * @param artifact The relevant blackboard artifact. */ TopProgramsResult(String programName, String programPath, Long runTimes, Date lastRun, BlackboardArtifact artifact) { super(lastRun, artifact); diff --git a/Core/src/org/sleuthkit/autopsy/datasourcesummary/ui/AnalysisPanel.java b/Core/src/org/sleuthkit/autopsy/datasourcesummary/ui/AnalysisPanel.java index 736dce7f3c..c8b708d371 100644 --- a/Core/src/org/sleuthkit/autopsy/datasourcesummary/ui/AnalysisPanel.java +++ b/Core/src/org/sleuthkit/autopsy/datasourcesummary/ui/AnalysisPanel.java @@ -1,7 +1,7 @@ /* * Autopsy Forensic Browser * - * Copyright 2020 Basis Technology Corp. + * Copyright 2020-2021 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -21,16 +21,12 @@ package org.sleuthkit.autopsy.datasourcesummary.ui; import java.util.Arrays; import java.util.List; import java.util.function.Function; -import java.util.stream.Collectors; -import java.util.stream.Stream; import org.apache.commons.lang3.tuple.Pair; import org.openide.util.NbBundle.Messages; -import org.sleuthkit.autopsy.datasourcesummary.datamodel.AnalysisSummary; import org.sleuthkit.autopsy.datasourcesummary.uiutils.ColumnModel; import org.sleuthkit.autopsy.datasourcesummary.uiutils.DataFetchWorker; -import org.sleuthkit.autopsy.datasourcesummary.uiutils.DataFetcher; +import org.sleuthkit.autopsy.datasourcesummary.datamodel.DataFetcher; import org.sleuthkit.autopsy.datasourcesummary.uiutils.DefaultCellModel; -import org.sleuthkit.autopsy.datasourcesummary.uiutils.ExcelExport.ExcelSheetExport; import org.sleuthkit.autopsy.datasourcesummary.uiutils.IngestRunningLabel; import org.sleuthkit.autopsy.datasourcesummary.uiutils.JTablePanel; import org.sleuthkit.datamodel.DataSource; @@ -101,10 +97,10 @@ public class AnalysisPanel extends BaseDataSourceSummaryPanel { * Creates a new DataSourceUserActivityPanel. */ public AnalysisPanel() { - this(new AnalysisSummary()); + this(new AnalysisSummaryGetter()); } - public AnalysisPanel(AnalysisSummary analysisData) { + public AnalysisPanel(AnalysisSummaryGetter analysisData) { super(analysisData); hashsetsFetcher = (dataSource) -> analysisData.getHashsetCounts(dataSource); @@ -229,17 +225,6 @@ public class AnalysisPanel extends BaseDataSourceSummaryPanel { ); }// //GEN-END:initComponents - @Override - List getExports(DataSource dataSource) { - return Stream.of( - getTableExport(hashsetsFetcher, DEFAULT_COLUMNS, Bundle.AnalysisPanel_hashsetHits_tabName(), dataSource), - getTableExport(keywordsFetcher, DEFAULT_COLUMNS, Bundle.AnalysisPanel_keywordHits_tabName(), dataSource), - getTableExport(interestingItemsFetcher, DEFAULT_COLUMNS, Bundle.AnalysisPanel_interestingItemHits_tabName(), dataSource)) - .filter(sheet -> sheet != null) - .collect(Collectors.toList()); - } - - // Variables declaration - do not modify//GEN-BEGIN:variables // End of variables declaration//GEN-END:variables } diff --git a/Core/src/org/sleuthkit/autopsy/datasourcesummary/ui/AnalysisSummaryGetter.java b/Core/src/org/sleuthkit/autopsy/datasourcesummary/ui/AnalysisSummaryGetter.java new file mode 100644 index 0000000000..135a5ef757 --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/datasourcesummary/ui/AnalysisSummaryGetter.java @@ -0,0 +1,105 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2020-2021 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.datasourcesummary.ui; + +import org.sleuthkit.autopsy.datasourcesummary.uiutils.DefaultArtifactUpdateGovernor; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashSet; +import java.util.List; +import java.util.Set; +import org.apache.commons.lang3.tuple.Pair; +import org.sleuthkit.autopsy.datasourcesummary.datamodel.AnalysisSummary; +import org.sleuthkit.autopsy.datasourcesummary.datamodel.SleuthkitCaseProvider.SleuthkitCaseProviderException; +import org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE; +import org.sleuthkit.datamodel.DataSource; +import org.sleuthkit.datamodel.TskCoreException; + +/** + * Wrapper class for converting + * org.sleuthkit.autopsy.contentutils.AnalysisSummary functionality into a + * DefaultArtifactUpdateGovernor used by data source analysis tab. + */ +public class AnalysisSummaryGetter implements DefaultArtifactUpdateGovernor { + + private static final Set ARTIFACT_UPDATE_TYPE_IDS = new HashSet<>(Arrays.asList( + ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT.getTypeID(), + ARTIFACT_TYPE.TSK_INTERESTING_ARTIFACT_HIT.getTypeID(), + ARTIFACT_TYPE.TSK_HASHSET_HIT.getTypeID(), + ARTIFACT_TYPE.TSK_KEYWORD_HIT.getTypeID() + )); + + private final AnalysisSummary analysisSummary; + + /** + * Main constructor. + */ + public AnalysisSummaryGetter() { + analysisSummary = new AnalysisSummary(); + } + + @Override + public Set getArtifactTypeIdsForRefresh() { + return Collections.unmodifiableSet(ARTIFACT_UPDATE_TYPE_IDS); + } + + /** + * Gets counts for hashset hits. + * + * @param dataSource The datasource for which to identify hashset hits. + * + * @return The hashset set name with the number of hits in descending order. + * + * @throws SleuthkitCaseProviderException + * @throws TskCoreException + */ + public List> getHashsetCounts(DataSource dataSource) throws SleuthkitCaseProviderException, TskCoreException { + return analysisSummary.getHashsetCounts(dataSource); + } + + /** + * Gets counts for keyword hits. + * + * @param dataSource The datasource for which to identify keyword hits. + * + * @return The keyword set name with the number of hits in descending order. + * + * @throws SleuthkitCaseProviderException + * @throws TskCoreException + */ + public List> getKeywordCounts(DataSource dataSource) throws SleuthkitCaseProviderException, TskCoreException { + return analysisSummary.getKeywordCounts(dataSource); + } + + /** + * Gets counts for interesting item hits. + * + * @param dataSource The datasource for which to identify interesting item + * hits. + * + * @return The interesting item set name with the number of hits in + * descending order. + * + * @throws SleuthkitCaseProviderException + * @throws TskCoreException + */ + public List> getInterestingItemCounts(DataSource dataSource) throws SleuthkitCaseProviderException, TskCoreException { + return analysisSummary.getInterestingItemCounts(dataSource); + } +} diff --git a/Core/src/org/sleuthkit/autopsy/datasourcesummary/ui/BaseDataSourceSummaryPanel.java b/Core/src/org/sleuthkit/autopsy/datasourcesummary/ui/BaseDataSourceSummaryPanel.java index 01773e5626..1cf103052a 100644 --- a/Core/src/org/sleuthkit/autopsy/datasourcesummary/ui/BaseDataSourceSummaryPanel.java +++ b/Core/src/org/sleuthkit/autopsy/datasourcesummary/ui/BaseDataSourceSummaryPanel.java @@ -38,16 +38,11 @@ import org.openide.util.NbBundle.Messages; import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; import org.sleuthkit.autopsy.coreutils.Logger; -import org.sleuthkit.autopsy.datasourcesummary.uiutils.ColumnModel; import org.sleuthkit.autopsy.datasourcesummary.uiutils.DataFetchResult; import org.sleuthkit.autopsy.datasourcesummary.uiutils.DataFetchWorker; import org.sleuthkit.autopsy.datasourcesummary.uiutils.DataFetchWorker.DataFetchComponents; -import org.sleuthkit.autopsy.datasourcesummary.uiutils.DataFetcher; +import org.sleuthkit.autopsy.datasourcesummary.datamodel.DataFetcher; import org.sleuthkit.autopsy.datasourcesummary.uiutils.EventUpdateHandler; -import org.sleuthkit.autopsy.datasourcesummary.uiutils.ExcelExport.ExcelExportException; -import org.sleuthkit.autopsy.datasourcesummary.uiutils.ExcelExport.ExcelSheetExport; -import org.sleuthkit.autopsy.datasourcesummary.uiutils.ExcelTableExport; -import org.sleuthkit.autopsy.datasourcesummary.uiutils.ExcelCellModel; import org.sleuthkit.autopsy.datasourcesummary.uiutils.GuiCellModel.DefaultMenuItem; import org.sleuthkit.autopsy.datasourcesummary.uiutils.GuiCellModel.MenuItem; import org.sleuthkit.autopsy.datasourcesummary.uiutils.LoadableComponent; @@ -453,14 +448,6 @@ abstract class BaseDataSourceSummaryPanel extends JPanel { */ protected abstract void onNewDataSource(DataSource dataSource); - /** - * Returns all the excel exportable items associated with the tab. - * - * @param dataSource The data source that results should be filtered. - * @return The excel exportable objects. - */ - abstract List getExports(DataSource dataSource); - /** * Runs a data fetcher and returns the result handling any possible errors * with a log message. @@ -485,100 +472,6 @@ abstract class BaseDataSourceSummaryPanel extends JPanel { } } - /** - * Function that converts data into a excel sheet data. - */ - protected interface ExcelExportFunction { - - /** - * Function that converts data into an excel sheet. - * - * @param data The data. - * @return The excel sheet export. - * @throws ExcelExportException - */ - ExcelSheetExport convert(T data) throws ExcelExportException; - } - - /** - * Helper method that converts data into an excel sheet export handling - * possible excel exceptions. - * - * @param excelConverter Function to convert data to an excel sheet export. - * @param data The data. If data is null, null will be returned. - * @param sheetName The name(s) of the sheet (to be used in the error - * message). - * @return The excel sheet export. - */ - protected static ExcelSheetExport convertToExcel(ExcelExportFunction excelConverter, T data, String sheetName) { - if (data == null) { - return null; - } - - try { - return excelConverter.convert(data); - } catch (ExcelExportException ex) { - logger.log(Level.WARNING, - String.format("There was an error while preparing export of worksheet(s): '%s'", - sheetName == null ? "" : sheetName), ex); - return null; - } - } - - /** - * Returns an excel sheet export given the fetching of data or null if no - * export created. - * - * @param dataFetcher The means of fetching data. - * @param excelConverter The means of converting data to excel. - * @param sheetName The name of the sheet (for error handling reporting). - * @param ds The data source to use for fetching data. - * @return The excel sheet export or null if no export could be generated. - */ - protected static ExcelSheetExport getExport( - DataFetcher dataFetcher, ExcelExportFunction excelConverter, - String sheetName, DataSource ds) { - - T data = getFetchResult(dataFetcher, sheetName, ds); - return convertToExcel(excelConverter, data, sheetName); - } - - /** - * Returns an excel table export of the data or null if no export created. - * - * @param columnsModel The model for the columns. - * @param sheetName The name for the sheet. - * @param data The data to be exported. - * @return The excel table export or null if no export could be generated. - */ - protected static ExcelSheetExport getTableExport(List> columnsModel, - String sheetName, List data) { - - return convertToExcel((dataList) -> new ExcelTableExport(sheetName, columnsModel, dataList), - data, - sheetName); - } - - /** - * Returns an excel table export of the data or null if no export created. - * - * @param dataFetcher The means of fetching data for the data source and the - * export. - * @param columnsModel The model for the columns. - * @param sheetName The name for the sheet. - * @param ds The data source. - * @return The excel export or null if no export created. - */ - protected static ExcelSheetExport getTableExport( - DataFetcher> dataFetcher, List> columnsModel, - String sheetName, DataSource ds) { - - return getExport(dataFetcher, - (dataList) -> new ExcelTableExport(sheetName, columnsModel, dataList), - sheetName, - ds); - } - /** * Utility method that shows a loading screen with loadable components, * create swing workers from the datafetch components and data source diff --git a/Core/src/org/sleuthkit/autopsy/datasourcesummary/ui/Bundle.properties-MERGED b/Core/src/org/sleuthkit/autopsy/datasourcesummary/ui/Bundle.properties-MERGED index a7b3c8870c..a7a4e5870f 100644 --- a/Core/src/org/sleuthkit/autopsy/datasourcesummary/ui/Bundle.properties-MERGED +++ b/Core/src/org/sleuthkit/autopsy/datasourcesummary/ui/Bundle.properties-MERGED @@ -6,19 +6,6 @@ AnalysisPanel_keywordHits_tabName=Keyword Hits AnalysisPanel_keywordSearchModuleName=Keyword Search BaseDataSourceSummaryPanel_goToArtifact=View Source Result BaseDataSourceSummaryPanel_goToFile=View Source File in Directory -ContainerPanel_export_acquisitionDetails=Acquisition Details: -ContainerPanel_export_deviceId=Device ID: -ContainerPanel_export_displayName=Display Name: -ContainerPanel_export_filePaths=File Paths: -ContainerPanel_export_imageType=Image Type: -ContainerPanel_export_md5=MD5: -ContainerPanel_export_originalName=Name: -ContainerPanel_export_sectorSize=Sector Size: -ContainerPanel_export_sha1=SHA1: -ContainerPanel_export_sha256=SHA256: -ContainerPanel_export_size=Size: -ContainerPanel_export_timeZone=Time Zone: -ContainerPanel_export_unallocatedSize=Unallocated Space: ContainerPanel_setFieldsForNonImageDataSource_na=N/A ContainerPanel_tabName=Container CTL_DataSourceSummaryAction=Data Source Summary @@ -62,7 +49,6 @@ DataSourceSummaryNode.column.type.header=Type DataSourceSummaryNode.viewDataSourceAction.text=Go to Data Source DataSourceSummaryTabbedPane_analysisTab_title=Analysis DataSourceSummaryTabbedPane_detailsTab_title=Container -DataSourceSummaryTabbedPane_exportTab_title=Export DataSourceSummaryTabbedPane_geolocationTab_title=Geolocation DataSourceSummaryTabbedPane_ingestHistoryTab_title=Ingest History DataSourceSummaryTabbedPane_pastCasesTab_title=Past Cases @@ -70,45 +56,27 @@ DataSourceSummaryTabbedPane_recentFileTab_title=Recent Files DataSourceSummaryTabbedPane_timelineTab_title=Timeline DataSourceSummaryTabbedPane_typesTab_title=Types DataSourceSummaryTabbedPane_userActivityTab_title=User Activity -ExcelExportAction_exportToXLSX_beginExport=Beginning Export... -# {0} - tabName -ExcelExportAction_exportToXLSX_gatheringTabData=Fetching Data for {0} Tab... -ExcelExportAction_exportToXLSX_writingToFile=Writing to File... -ExcelExportAction_getXLSXPath_directory=DataSourceSummary -ExcelExportAction_moduleName=Data Source Summary -ExcelExportAction_runXLSXExport_errorMessage=There was an error while exporting. -ExcelExportAction_runXLSXExport_errorTitle=Error While Exporting -ExcelExportAction_runXLSXExport_progressCancelActionTitle=Cancelling... -ExcelExportAction_runXLSXExport_progressCancelTitle=Cancel -# {0} - dataSource -ExcelExportAction_runXLSXExport_progressTitle=Exporting {0} to XLSX -ExcelExportDialog_title=Data Source Summary Exported +DataSourceUserActivitySummary_getRecentAccounts_calllogMessage=Call Log +DataSourceUserActivitySummary_getRecentAccounts_emailMessage=Email Message GeolocationPanel_cityColumn_title=Closest City GeolocationPanel_countColumn_title=Count GeolocationPanel_mostCommon_tabName=Most Common Cities GeolocationPanel_mostRecent_tabName=Most Recent Cities GeolocationPanel_onNoCrIngest_message=No results will be shown because the GPX Parser was not run. GeolocationPanel_unknownRow_title=Unknown -IngestJobExcelExport_endTimeColumn=End Time -IngestJobExcelExport_ingestStatusTimeColumn=Ingest Status -IngestJobExcelExport_moduleNameTimeColumn=Module Name -IngestJobExcelExport_sheetName=Ingest History -IngestJobExcelExport_startTimeColumn=Start Time -IngestJobExcelExport_versionColumn=Module Version PastCasesPanel_caseColumn_title=Case PastCasesPanel_countColumn_title=Count PastCasesPanel_notableFileTable_tabName=Cases with Common Notable PastCasesPanel_onNoCrIngest_message=No results will be shown because the Central Repository module was not run. PastCasesPanel_sameIdsTable_tabName=Past Cases with the Same Devices -RecentFilePanel_col_header_domain=Domain -RecentFilePanel_col_header_path=Path -RecentFilePanel_col_header_sender=Sender -RecentFilePanel_emailParserModuleName=Email Parser -RecentFilePanel_no_open_documents=No recently open documents found. RecentFilesPanel_attachmentsTable_tabName=Recent Attachments RecentFilesPanel_col_head_date=Date +RecentFilesPanel_col_header_domain=Domain +RecentFilesPanel_col_header_path=Path +RecentFilesPanel_col_header_sender=Sender RecentFilesPanel_docsTable_tabName=Recently Opened Documents RecentFilesPanel_downloadsTable_tabName=Recently Downloads +RecentFilesPanel_no_open_documents=No recently open documents found. SizeRepresentationUtil_units_bytes=bytes SizeRepresentationUtil_units_gigabytes=GB SizeRepresentationUtil_units_kilobytes=KB @@ -116,12 +84,6 @@ SizeRepresentationUtil_units_megabytes=MB SizeRepresentationUtil_units_petabytes=PB SizeRepresentationUtil_units_terabytes=TB TimelinePanel_earliestLabel_title=Earliest -TimelinePanel_getExports_activityRange=Activity Range -TimelinePanel_getExports_chartName=Last 30 Days -TimelinePanel_getExports_dateColumnHeader=Date -TimelinePanel_getExports_earliest=Earliest: -TimelinePanel_getExports_latest=Latest: -TimelinePanel_getExports_sheetName=Timeline TimelinePanel_latestLabel_title=Latest TimlinePanel_last30DaysChart_artifactEvts_title=Result Events TimlinePanel_last30DaysChart_fileEvts_title=File Events diff --git a/Core/src/org/sleuthkit/autopsy/datasourcesummary/ui/ContainerPanel.java b/Core/src/org/sleuthkit/autopsy/datasourcesummary/ui/ContainerPanel.java index a8e5fef64e..9278b3e349 100644 --- a/Core/src/org/sleuthkit/autopsy/datasourcesummary/ui/ContainerPanel.java +++ b/Core/src/org/sleuthkit/autopsy/datasourcesummary/ui/ContainerPanel.java @@ -1,7 +1,7 @@ /* * Autopsy Forensic Browser * - * Copyright 2019 Basis Technology Corp. + * Copyright 2019-2021 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -19,39 +19,23 @@ package org.sleuthkit.autopsy.datasourcesummary.ui; import java.beans.PropertyChangeEvent; -import java.sql.SQLException; -import java.util.ArrayList; import java.util.Arrays; -import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.logging.Level; -import java.util.stream.Collectors; -import java.util.stream.Stream; import org.sleuthkit.autopsy.coreutils.Logger; import javax.swing.table.DefaultTableModel; -import org.apache.commons.lang.StringUtils; import org.openide.util.NbBundle.Messages; import org.sleuthkit.autopsy.casemodule.Case; -import org.sleuthkit.autopsy.datasourcesummary.datamodel.ContainerSummary; -import org.sleuthkit.autopsy.datasourcesummary.datamodel.SleuthkitCaseProvider.SleuthkitCaseProviderException; -import static org.sleuthkit.autopsy.datasourcesummary.ui.BaseDataSourceSummaryPanel.getFetchResult; +import org.sleuthkit.autopsy.datasourcesummary.datamodel.ContainerSummary.ContainerDetails; +import org.sleuthkit.autopsy.datasourcesummary.datamodel.ContainerSummary.ImageDetails; import org.sleuthkit.autopsy.datasourcesummary.uiutils.DataFetchResult.ResultType; import org.sleuthkit.autopsy.datasourcesummary.uiutils.DataFetchWorker.DataFetchComponents; -import org.sleuthkit.autopsy.datasourcesummary.uiutils.DataFetcher; -import org.sleuthkit.autopsy.datasourcesummary.uiutils.DefaultCellModel; +import org.sleuthkit.autopsy.datasourcesummary.datamodel.DataFetcher; import org.sleuthkit.autopsy.datasourcesummary.uiutils.DefaultUpdateGovernor; -import org.sleuthkit.autopsy.datasourcesummary.uiutils.ExcelExport.ExcelSheetExport; -import org.sleuthkit.autopsy.datasourcesummary.uiutils.ExcelSpecialFormatExport; -import org.sleuthkit.autopsy.datasourcesummary.uiutils.ExcelSpecialFormatExport.ExcelItemExportable; -import org.sleuthkit.autopsy.datasourcesummary.uiutils.ExcelSpecialFormatExport.KeyValueItemExportable; -import org.sleuthkit.autopsy.datasourcesummary.uiutils.ExcelSpecialFormatExport.SingleCellExportable; -import org.sleuthkit.autopsy.datasourcesummary.uiutils.ExcelSpecialFormatExport.TitledExportable; import org.sleuthkit.autopsy.datasourcesummary.uiutils.UpdateGovernor; import org.sleuthkit.datamodel.DataSource; -import org.sleuthkit.datamodel.Image; -import org.sleuthkit.datamodel.TskCoreException; /** * Panel to display additional details associated with a specific DataSource @@ -61,182 +45,6 @@ import org.sleuthkit.datamodel.TskCoreException; }) class ContainerPanel extends BaseDataSourceSummaryPanel { - /** - * View model data for data source images. - */ - private static class ImageViewModel { - - private final long unallocatedSize; - private final long size; - private final long sectorSize; - - private final String timeZone; - private final String imageType; - - private final List paths; - private final String md5Hash; - private final String sha1Hash; - private final String sha256Hash; - - /** - * Main constructor. - * - * @param unallocatedSize Size in bytes of unallocated space. - * @param size Total size in bytes. - * @param sectorSize Sector size in bytes. - * @param timeZone The time zone. - * @param imageType The type of image. - * @param paths The source paths for the image. - * @param md5Hash The md5 hash or null. - * @param sha1Hash The sha1 hash or null. - * @param sha256Hash The sha256 hash or null. - */ - ImageViewModel(long unallocatedSize, long size, long sectorSize, - String timeZone, String imageType, List paths, String md5Hash, - String sha1Hash, String sha256Hash) { - this.unallocatedSize = unallocatedSize; - this.size = size; - this.sectorSize = sectorSize; - this.timeZone = timeZone; - this.imageType = imageType; - this.paths = paths == null ? Collections.emptyList() : new ArrayList<>(paths); - this.md5Hash = md5Hash; - this.sha1Hash = sha1Hash; - this.sha256Hash = sha256Hash; - } - - /** - * @return Size in bytes of unallocated space. - */ - long getUnallocatedSize() { - return unallocatedSize; - } - - /** - * @return Total size in bytes. - */ - long getSize() { - return size; - } - - /** - * @return Sector size in bytes. - */ - long getSectorSize() { - return sectorSize; - } - - /** - * @return The time zone. - */ - String getTimeZone() { - return timeZone; - } - - /** - * @return The type of image. - */ - String getImageType() { - return imageType; - } - - /** - * @return The source paths for the image. - */ - List getPaths() { - return paths; - } - - /** - * @return The md5 hash or null. - */ - String getMd5Hash() { - return md5Hash; - } - - /** - * @return The sha1 hash or null. - */ - String getSha1Hash() { - return sha1Hash; - } - - /** - * @return The sha256 hash or null. - */ - String getSha256Hash() { - return sha256Hash; - } - } - - /** - * View model for container data. - */ - private static class ContainerViewModel { - - private final String displayName; - private final String originalName; - private final String deviceIdValue; - private final String acquisitionDetails; - private final ImageViewModel imageViewModel; - - /** - * Main constructor. - * - * @param displayName The display name for this data source. - * @param originalName The original name for this data source. - * @param deviceIdValue The device id value for this data source. - * @param acquisitionDetails The acquisition details for this data - * source or null. - * @param imageViewModel If the data source is an image, the image view - * model for this data source or null if non-image. - */ - ContainerViewModel(String displayName, String originalName, String deviceIdValue, - String acquisitionDetails, ImageViewModel imageViewModel) { - this.displayName = displayName; - this.originalName = originalName; - this.deviceIdValue = deviceIdValue; - this.acquisitionDetails = acquisitionDetails; - this.imageViewModel = imageViewModel; - } - - /** - * @return The display name for this data source. - */ - String getDisplayName() { - return displayName; - } - - /** - * @return The original name for this data source. - */ - String getOriginalName() { - return originalName; - } - - /** - * @return The device id value for this data source. - */ - String getDeviceId() { - return deviceIdValue; - } - - /** - * @return The acquisition details for this data source or null. - */ - String getAcquisitionDetails() { - return acquisitionDetails; - } - - /** - * @return If the data source is an image, the image view model for this - * data source or null if non-image. - */ - ImageViewModel getImageViewModel() { - return imageViewModel; - } - } - // set of case events for which to call update (if the name changes, that will impact data shown) private static final Set CASE_EVENT_SET = new HashSet<>(Arrays.asList( Case.Events.DATA_SOURCE_NAME_CHANGED @@ -262,29 +70,29 @@ class ContainerPanel extends BaseDataSourceSummaryPanel { private static final Logger logger = Logger.getLogger(ContainerPanel.class.getName()); private final List> dataFetchComponents; - private final DataFetcher containerDataFetcher; + private final DataFetcher containerDataFetcher; /** * Creates a new form ContainerPanel. */ ContainerPanel() { - this(new ContainerSummary()); + this(new ContainerSummaryGetter()); } /** * Creates new form ContainerPanel. */ - ContainerPanel(ContainerSummary containerSummary) { + ContainerPanel(ContainerSummaryGetter containerSummary) { super(containerSummary, CONTAINER_UPDATES); - containerDataFetcher = (dataSource) -> getContainerViewModel(containerSummary, dataSource); + containerDataFetcher = (dataSource) -> containerSummary.getContainerDetails(dataSource); dataFetchComponents = Arrays.asList( new DataFetchComponents<>( containerDataFetcher, (result) -> { if (result != null && result.getResultType() == ResultType.SUCCESS) { - ContainerViewModel data = result.getData(); + ContainerDetails data = result.getData(); updateDetailsPanelData(data); } else { if (result == null) { @@ -313,92 +121,12 @@ class ContainerPanel extends BaseDataSourceSummaryPanel { fetchInformation(dataFetchComponents, dataSource); } - /** - * A means of retrieving data that could potentially throw an exception. - */ - private interface Retriever { - - /** - * Retrieves data of a certain type and possibly throws an exception. - * - * @return The data type. - * @throws TskCoreException - * @throws SleuthkitCaseProviderException - * @throws SQLException - */ - O retrieve() throws TskCoreException, SleuthkitCaseProviderException, SQLException; - } - - /** - * Retrieves data of a particular type and handles any exceptions that may - * be thrown by logging. - * - * @param retriever The retrieving function. - * @return The retrieved data. - */ - private static O retrieve(Retriever retriever) { - try { - return retriever.retrieve(); - } catch (TskCoreException | SleuthkitCaseProviderException | SQLException ex) { - logger.log(Level.WARNING, "Error while retrieving data.", ex); - return null; - } - } - - /** - * Generates a container view model object containing data to display about - * the data source. - * - * @param containerSummary The service providing data about the data source. - * @param ds The data source. - * @return The generated view model. - */ - private static ContainerViewModel getContainerViewModel(ContainerSummary containerSummary, DataSource ds) { - if (ds == null) { - return null; - } - - return new ContainerViewModel( - ds.getName(), - ds.getName(), - ds.getDeviceId(), - retrieve(() -> ds.getAcquisitionDetails()), - ds instanceof Image ? getImageViewModel(containerSummary, (Image) ds) : null - ); - } - - /** - * Generates an image view model object containing data to display about the - * image. - * - * @param containerSummary The service providing data about the image. - * @param image The image. - * @return The generated view model. - */ - private static ImageViewModel getImageViewModel(ContainerSummary containerSummary, Image image) { - if (image == null) { - return null; - } - - Long unallocSize = retrieve(() -> containerSummary.getSizeOfUnallocatedFiles(image)); - String imageType = image.getType().getName(); - Long size = image.getSize(); - Long sectorSize = image.getSsize(); - String timeZone = image.getTimeZone(); - List paths = image.getPaths() == null ? Collections.emptyList() : Arrays.asList(image.getPaths()); - String md5 = retrieve(() -> image.getMd5()); - String sha1 = retrieve(() -> image.getSha1()); - String sha256 = retrieve(() -> image.getSha256()); - - return new ImageViewModel(unallocSize, size, sectorSize, timeZone, imageType, paths, md5, sha1, sha256); - } - /** * Update the swing components with fetched data. * * @param viewModel The data source view model data. */ - private void updateDetailsPanelData(ContainerViewModel viewModel) { + private void updateDetailsPanelData(ContainerDetails viewModel) { clearTableValues(); if (viewModel == null) { return; @@ -409,8 +137,8 @@ class ContainerPanel extends BaseDataSourceSummaryPanel { deviceIdValue.setText(viewModel.getDeviceId()); acquisitionDetailsTextArea.setText(viewModel.getAcquisitionDetails()); - if (viewModel.getImageViewModel() != null) { - setFieldsForImage(viewModel.getImageViewModel()); + if (viewModel.getImageDetails() != null) { + setFieldsForImage(viewModel.getImageDetails()); } else { setFieldsForNonImageDataSource(); } @@ -445,7 +173,7 @@ class ContainerPanel extends BaseDataSourceSummaryPanel { * * @param viewModel The image view model data. */ - private void setFieldsForImage(ImageViewModel viewModel) { + private void setFieldsForImage(ImageDetails viewModel) { unallocatedSizeValue.setText(SizeRepresentationUtil.getSizeString(viewModel.getUnallocatedSize())); imageTypeValue.setText(viewModel.getImageType()); sizeValue.setText(SizeRepresentationUtil.getSizeString(viewModel.getSize())); @@ -480,84 +208,6 @@ class ContainerPanel extends BaseDataSourceSummaryPanel { ((DefaultTableModel) filePathsTable.getModel()).setRowCount(0); } - /** - * Divides acquisition details into key/value pairs to be displayed in - * separate cells in an excel export. - * - * @param acquisitionDetails The acquisition details. - * @return The list of key value pairs that can be incorporated into the - * excel export. - */ - private static List getAcquisitionDetails(String acquisitionDetails) { - if (StringUtils.isBlank(acquisitionDetails)) { - return Collections.emptyList(); - } else { - return Stream.of(acquisitionDetails.split("\\r?\\n")) - .map((line) -> (StringUtils.isBlank(line)) ? null : new SingleCellExportable(line)) - .filter(item -> item != null) - .collect(Collectors.toList()); - } - } - - @Override - @Messages({ - "ContainerPanel_export_displayName=Display Name:", - "ContainerPanel_export_originalName=Name:", - "ContainerPanel_export_deviceId=Device ID:", - "ContainerPanel_export_timeZone=Time Zone:", - "ContainerPanel_export_acquisitionDetails=Acquisition Details:", - "ContainerPanel_export_imageType=Image Type:", - "ContainerPanel_export_size=Size:", - "ContainerPanel_export_sectorSize=Sector Size:", - "ContainerPanel_export_md5=MD5:", - "ContainerPanel_export_sha1=SHA1:", - "ContainerPanel_export_sha256=SHA256:", - "ContainerPanel_export_unallocatedSize=Unallocated Space:", - "ContainerPanel_export_filePaths=File Paths:",}) - protected List getExports(DataSource ds) { - ContainerViewModel result = getFetchResult(containerDataFetcher, "Container sheets", ds); - if (ds == null || result == null) { - return Collections.emptyList(); - } - - String NA = Bundle.ContainerPanel_setFieldsForNonImageDataSource_na(); - DefaultCellModel NACell = new DefaultCellModel<>(NA); - - ImageViewModel imageModel = result.getImageViewModel(); - boolean hasImage = imageModel != null; - - DefaultCellModel timeZone = hasImage ? new DefaultCellModel<>(imageModel.getTimeZone()) : NACell; - DefaultCellModel imageType = hasImage ? new DefaultCellModel<>(imageModel.getImageType()) : NACell; - DefaultCellModel size = hasImage ? SizeRepresentationUtil.getBytesCell(imageModel.getSize()) : NACell; - DefaultCellModel sectorSize = hasImage ? SizeRepresentationUtil.getBytesCell(imageModel.getSectorSize()) : NACell; - DefaultCellModel md5 = hasImage ? new DefaultCellModel<>(imageModel.getMd5Hash()) : NACell; - DefaultCellModel sha1 = hasImage ? new DefaultCellModel<>(imageModel.getSha1Hash()) : NACell; - DefaultCellModel sha256 = hasImage ? new DefaultCellModel<>(imageModel.getSha256Hash()) : NACell; - DefaultCellModel unallocatedSize = hasImage ? SizeRepresentationUtil.getBytesCell(imageModel.getUnallocatedSize()) : NACell; - List paths = result.getImageViewModel() == null ? Collections.singletonList(NA) : result.getImageViewModel().getPaths(); - List cellPaths = paths.stream() - .map(SingleCellExportable::new) - .collect(Collectors.toList()); - - return Arrays.asList( - new ExcelSpecialFormatExport(Bundle.ContainerPanel_tabName(), Arrays.asList( - new KeyValueItemExportable(Bundle.ContainerPanel_export_displayName(), new DefaultCellModel<>(result.getDisplayName())), - new KeyValueItemExportable(Bundle.ContainerPanel_export_originalName(), new DefaultCellModel<>(result.getOriginalName())), - new KeyValueItemExportable(Bundle.ContainerPanel_export_deviceId(), new DefaultCellModel<>(result.getDeviceId())), - new KeyValueItemExportable(Bundle.ContainerPanel_export_timeZone(), timeZone), - new TitledExportable(Bundle.ContainerPanel_export_acquisitionDetails(), getAcquisitionDetails(result.getAcquisitionDetails())), - new KeyValueItemExportable(Bundle.ContainerPanel_export_imageType(), imageType), - new KeyValueItemExportable(Bundle.ContainerPanel_export_size(), size), - new KeyValueItemExportable(Bundle.ContainerPanel_export_sectorSize(), sectorSize), - new KeyValueItemExportable(Bundle.ContainerPanel_export_md5(), md5), - new KeyValueItemExportable(Bundle.ContainerPanel_export_sha1(), sha1), - new KeyValueItemExportable(Bundle.ContainerPanel_export_sha256(), sha256), - new KeyValueItemExportable(Bundle.ContainerPanel_export_unallocatedSize(), unallocatedSize), - new TitledExportable(Bundle.ContainerPanel_export_filePaths(), cellPaths) - ))); - - } - /** * This method is called from within the constructor to initialize the form. * WARNING: Do NOT modify this code. The content of this method is always diff --git a/Core/src/org/sleuthkit/autopsy/datasourcesummary/ui/ContainerSummaryGetter.java b/Core/src/org/sleuthkit/autopsy/datasourcesummary/ui/ContainerSummaryGetter.java new file mode 100644 index 0000000000..41be8c4644 --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/datasourcesummary/ui/ContainerSummaryGetter.java @@ -0,0 +1,140 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2020-2021 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.datasourcesummary.ui; + +import org.sleuthkit.autopsy.datasourcesummary.uiutils.DefaultArtifactUpdateGovernor; +import java.sql.SQLException; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashSet; +import java.util.Set; +import org.sleuthkit.autopsy.datasourcesummary.datamodel.SleuthkitCaseProvider; +import org.sleuthkit.autopsy.datasourcesummary.datamodel.ContainerSummary; +import org.sleuthkit.autopsy.ingest.ModuleContentEvent; +import org.sleuthkit.datamodel.AbstractFile; +import org.sleuthkit.datamodel.BlackboardArtifact; +import org.sleuthkit.datamodel.DataSource; +import org.sleuthkit.datamodel.TskCoreException; + +/** + * Wrapper class for converting + * org.sleuthkit.autopsy.contentutils.ContainerSummary functionality into a + * DefaultArtifactUpdateGovernor used by Container tab. + */ +public class ContainerSummaryGetter implements DefaultArtifactUpdateGovernor { + + private static final Set ARTIFACT_UPDATE_TYPE_IDS = new HashSet<>(Arrays.asList( + BlackboardArtifact.ARTIFACT_TYPE.TSK_OS_INFO.getTypeID(), + BlackboardArtifact.ARTIFACT_TYPE.TSK_DATA_SOURCE_USAGE.getTypeID() + )); + + private final ContainerSummary containerSummary; + + /** + * Main constructor. + */ + public ContainerSummaryGetter() { + containerSummary = new ContainerSummary(); + } + + @Override + public boolean isRefreshRequired(ModuleContentEvent evt) { + return true; + } + + @Override + public boolean isRefreshRequired(AbstractFile file) { + return true; + } + + @Override + public Set getArtifactTypeIdsForRefresh() { + return Collections.unmodifiableSet(ARTIFACT_UPDATE_TYPE_IDS); + } + + /** + * Gets the size of unallocated files in a particular datasource. + * + * @param currentDataSource The data source. + * + * @return The size or null if the query could not be executed. + * + * @throws SleuthkitCaseProviderException + * @throws TskCoreException + * @throws SQLException + */ + public Long getSizeOfUnallocatedFiles(DataSource currentDataSource) + throws SleuthkitCaseProvider.SleuthkitCaseProviderException, TskCoreException, SQLException { + return containerSummary.getSizeOfUnallocatedFiles(currentDataSource); + } + + /** + * Retrieves the concatenation of operating system attributes for a + * particular data source. + * + * @param dataSource The data source. + * + * @return The concatenated value or null if the query could not be + * executed. + * + * @throws SleuthkitCaseProviderException + * @throws TskCoreException + * @throws SQLException + */ + public String getOperatingSystems(DataSource dataSource) + throws SleuthkitCaseProvider.SleuthkitCaseProviderException, TskCoreException, SQLException { + return containerSummary.getOperatingSystems(dataSource); + } + + /** + * Retrieves the concatenation of data source usage for a particular data + * source. + * + * @param dataSource The data source. + * + * @return The concatenated value or null if the query could not be + * executed. + * + * @throws SleuthkitCaseProviderException + * @throws TskCoreException + * @throws SQLException + */ + public String getDataSourceType(DataSource dataSource) + throws SleuthkitCaseProvider.SleuthkitCaseProviderException, TskCoreException, SQLException { + return containerSummary.getDataSourceType(dataSource); + } + + /** + * Retrieves a container data model object containing data about the data + * source. + * + * @param dataSource The data source. + * + * @return The concatenated value or null if the query could not be + * executed. + * + * @throws SleuthkitCaseProviderException + * @throws TskCoreException + * @throws SQLException + */ + public ContainerSummary.ContainerDetails getContainerDetails(DataSource dataSource) + throws SleuthkitCaseProvider.SleuthkitCaseProviderException, TskCoreException, SQLException { + return containerSummary.getContainerDetails(dataSource); + } +} diff --git a/Core/src/org/sleuthkit/autopsy/datasourcesummary/ui/DataSourceBrowser.java b/Core/src/org/sleuthkit/autopsy/datasourcesummary/ui/DataSourceBrowser.java index 51433534e4..e6942058e9 100644 --- a/Core/src/org/sleuthkit/autopsy/datasourcesummary/ui/DataSourceBrowser.java +++ b/Core/src/org/sleuthkit/autopsy/datasourcesummary/ui/DataSourceBrowser.java @@ -18,6 +18,7 @@ */ package org.sleuthkit.autopsy.datasourcesummary.ui; +import org.sleuthkit.autopsy.datasourcesummary.datamodel.CaseDataSourcesSummary; import java.awt.Cursor; import org.sleuthkit.autopsy.datasourcesummary.uiutils.RightAlignedTableCellRenderer; import java.awt.EventQueue; @@ -42,7 +43,6 @@ import static javax.swing.SwingConstants.RIGHT; import javax.swing.SwingUtilities; import javax.swing.SwingWorker; import javax.swing.table.TableColumn; -import org.sleuthkit.autopsy.datasourcesummary.datamodel.CaseDataSourcesSummary; import org.sleuthkit.datamodel.DataSource; import org.sleuthkit.datamodel.IngestJobInfo; import org.sleuthkit.datamodel.SleuthkitCase; diff --git a/Core/src/org/sleuthkit/autopsy/datasourcesummary/ui/DataSourceSummaryTabbedPane.java b/Core/src/org/sleuthkit/autopsy/datasourcesummary/ui/DataSourceSummaryTabbedPane.java index 146863b58e..af53ab9260 100644 --- a/Core/src/org/sleuthkit/autopsy/datasourcesummary/ui/DataSourceSummaryTabbedPane.java +++ b/Core/src/org/sleuthkit/autopsy/datasourcesummary/ui/DataSourceSummaryTabbedPane.java @@ -25,12 +25,9 @@ import java.util.Arrays; import java.util.EnumSet; import java.util.List; import java.util.function.Consumer; -import java.util.function.Function; import org.openide.util.NbBundle.Messages; import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.casemodule.IngestJobInfoPanel; -import org.sleuthkit.autopsy.datasourcesummary.ui.ExcelExportAction.ExportableTab; -import org.sleuthkit.autopsy.datasourcesummary.uiutils.ExcelExport.ExcelSheetExport; import org.sleuthkit.datamodel.DataSource; /** @@ -46,8 +43,7 @@ import org.sleuthkit.datamodel.DataSource; "DataSourceSummaryTabbedPane_pastCasesTab_title=Past Cases", "DataSourceSummaryTabbedPane_analysisTab_title=Analysis", "DataSourceSummaryTabbedPane_geolocationTab_title=Geolocation", - "DataSourceSummaryTabbedPane_timelineTab_title=Timeline", - "DataSourceSummaryTabbedPane_exportTab_title=Export" + "DataSourceSummaryTabbedPane_timelineTab_title=Timeline" }) public class DataSourceSummaryTabbedPane extends javax.swing.JPanel { @@ -55,12 +51,11 @@ public class DataSourceSummaryTabbedPane extends javax.swing.JPanel { * Records of tab information (i.e. title, component, function to call on * new data source). */ - private class DataSourceTab implements ExportableTab { + private class DataSourceTab { private final String tabTitle; private final Component component; private final Consumer onDataSource; - private final Function> excelExporter; private final Runnable onClose; private final Runnable onInit; @@ -71,7 +66,7 @@ public class DataSourceSummaryTabbedPane extends javax.swing.JPanel { * @param panel The component to be displayed in the tab. */ DataSourceTab(String tabTitle, BaseDataSourceSummaryPanel panel) { - this(tabTitle, panel, panel::setDataSource, panel::getExports, panel::close, panel::init); + this(tabTitle, panel, panel::setDataSource, panel::close, panel::init); panel.setParentCloseListener(() -> notifyParentClose()); } @@ -90,12 +85,10 @@ public class DataSourceSummaryTabbedPane extends javax.swing.JPanel { * added to the tabbed pane. */ DataSourceTab(String tabTitle, Component component, Consumer onDataSource, - Function> excelExporter, Runnable onClose, - Runnable onInit) { + Runnable onClose, Runnable onInit) { this.tabTitle = tabTitle; this.component = component; this.onDataSource = onDataSource; - this.excelExporter = excelExporter; this.onClose = onClose; this.onInit = onInit; } @@ -103,7 +96,6 @@ public class DataSourceSummaryTabbedPane extends javax.swing.JPanel { /** * @return The title for the tab. */ - @Override public String getTabTitle() { return tabTitle; } @@ -122,11 +114,6 @@ public class DataSourceSummaryTabbedPane extends javax.swing.JPanel { return onDataSource; } - @Override - public List getExcelExports(DataSource dataSource) { - return excelExporter == null ? null : excelExporter.apply(dataSource); - } - /** * @return The action for closing resources in the tab. */ @@ -152,9 +139,6 @@ public class DataSourceSummaryTabbedPane extends javax.swing.JPanel { private Runnable notifyParentClose = null; private final IngestJobInfoPanel ingestHistoryPanel = new IngestJobInfoPanel(); - // create an export panel whose button triggers the export to XLSX action - private final ExportPanel exportPanel = new ExportPanel(); - private final List tabs = Arrays.asList( new DataSourceTab(Bundle.DataSourceSummaryTabbedPane_typesTab_title(), new TypesPanel()), new DataSourceTab(Bundle.DataSourceSummaryTabbedPane_userActivityTab_title(), new UserActivityPanel()), @@ -168,22 +152,11 @@ public class DataSourceSummaryTabbedPane extends javax.swing.JPanel { Bundle.DataSourceSummaryTabbedPane_ingestHistoryTab_title(), ingestHistoryPanel, ingestHistoryPanel::setDataSource, - IngestJobExcelExport::getExports, null, null), - new DataSourceTab(Bundle.DataSourceSummaryTabbedPane_detailsTab_title(), new ContainerPanel()), - new DataSourceTab( - Bundle.DataSourceSummaryTabbedPane_exportTab_title(), - exportPanel, - null, - null, - null, - null) + new DataSourceTab(Bundle.DataSourceSummaryTabbedPane_detailsTab_title(), new ContainerPanel()) ); - // the action that does the export - private final ExcelExportAction exportAction = new ExcelExportAction(tabs); - private DataSource dataSource = null; private CardLayout cardLayout; @@ -243,9 +216,6 @@ public class DataSourceSummaryTabbedPane extends javax.swing.JPanel { // set this to no datasource initially cardLayout.show(this, NO_DATASOURCE_PANE); - - // set action for when user requests xlsx export - exportPanel.setXlsxExportAction(() -> exportAction.accept(getDataSource())); } /** diff --git a/Core/src/org/sleuthkit/autopsy/datasourcesummary/ui/ExcelExportAction.java b/Core/src/org/sleuthkit/autopsy/datasourcesummary/ui/ExcelExportAction.java deleted file mode 100644 index 22e06c07ed..0000000000 --- a/Core/src/org/sleuthkit/autopsy/datasourcesummary/ui/ExcelExportAction.java +++ /dev/null @@ -1,301 +0,0 @@ -/* - * Autopsy Forensic Browser - * - * Copyright 2021 Basis Technology Corp. - * Contact: carrier sleuthkit org - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.sleuthkit.autopsy.datasourcesummary.ui; - -import java.awt.event.ActionEvent; -import java.awt.event.ActionListener; -import java.io.File; -import java.io.IOException; -import java.nio.file.Paths; -import java.text.DateFormat; -import java.text.SimpleDateFormat; -import java.util.ArrayList; -import java.util.Collections; -import java.util.Date; -import java.util.List; -import java.util.concurrent.CancellationException; -import java.util.concurrent.ExecutionException; -import java.util.function.Consumer; -import java.util.logging.Level; -import javax.swing.JOptionPane; -import javax.swing.SwingUtilities; -import javax.swing.SwingWorker; -import org.openide.util.NbBundle; -import org.openide.util.NbBundle.Messages; -import org.openide.windows.WindowManager; -import org.sleuthkit.autopsy.casemodule.Case; -import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; -import org.sleuthkit.autopsy.coreutils.FileUtil; -import org.sleuthkit.autopsy.coreutils.Logger; -import org.sleuthkit.autopsy.datasourcesummary.uiutils.ExcelExport; -import org.sleuthkit.autopsy.datasourcesummary.uiutils.ExcelExport.ExcelExportException; -import org.sleuthkit.autopsy.datasourcesummary.uiutils.ExcelExport.ExcelSheetExport; -import org.sleuthkit.autopsy.progress.ModalDialogProgressIndicator; -import org.sleuthkit.autopsy.progress.ProgressIndicator; -import org.sleuthkit.datamodel.DataSource; -import org.sleuthkit.datamodel.TskCoreException; - -/** - * Action that exports tab data to an excel workbook. - */ -@Messages({ - "ExcelExportAction_moduleName=Data Source Summary",}) -class ExcelExportAction implements Consumer { - - private static final Logger logger = Logger.getLogger(ExcelExportAction.class.getName()); - - /** - * A tab that can be exported. - */ - interface ExportableTab { - - /** - * Returns the name of the tab. - * - * @return The tab name. - */ - String getTabTitle(); - - /** - * Given the data source, provides the excel exports for this tab. - * - * @param dataSource The data source. - * @return The excel exports or null. - */ - List getExcelExports(DataSource dataSource); - } - - private final ExcelExport excelExport = ExcelExport.getInstance(); - private final List tabExports; - - /** - * Main constructor. - * - * @param tabExports The different tabs that may have excel exports. - */ - ExcelExportAction(List tabExports) { - this.tabExports = Collections.unmodifiableList(new ArrayList<>(tabExports)); - } - - /** - * Accepts the data source for which this export pertains, prompts user for - * output location, and exports the data. - * - * @param ds The data source. - */ - @Override - public void accept(DataSource ds) { - if (ds == null) { - return; - } - - File outputLoc = getXLSXPath(ds.getName()); - if (outputLoc == null) { - return; - } - - runXLSXExport(ds, outputLoc); - } - - /** - * Generates an xlsx path for the data source summary export. - * - * @param dataSourceName The name of the data source. - * @return The file to which the excel document should be written or null if - * file already exists or cancellation. - */ - @NbBundle.Messages({ - "ExcelExportAction_getXLSXPath_directory=DataSourceSummary",}) - private File getXLSXPath(String dataSourceName) { - // set initial path to reports directory with filename that is - // a combination of the data source name and time stamp - DateFormat dateFormat = new SimpleDateFormat("MM-dd-yyyy-HH-mm-ss"); - String fileName = String.format("%s-%s.xlsx", dataSourceName == null ? "" : FileUtil.escapeFileName(dataSourceName), dateFormat.format(new Date())); - try { - String reportsDir = Case.getCurrentCaseThrows().getReportDirectory(); - File reportsDirFile = Paths.get(reportsDir, Bundle.ExcelExportAction_getXLSXPath_directory()).toFile(); - if (!reportsDirFile.exists()) { - reportsDirFile.mkdirs(); - } - - return Paths.get(reportsDirFile.getAbsolutePath(), fileName).toFile(); - } catch (NoCurrentCaseException ex) { - logger.log(Level.WARNING, "Unable to find reports directory.", ex); - } - - return null; - } - - /** - * An action listener that handles cancellation of the export process. - */ - private class CancelExportListener implements ActionListener { - - private SwingWorker worker = null; - - @Override - public void actionPerformed(ActionEvent e) { - if (worker != null && !worker.isCancelled() && !worker.isDone()) { - worker.cancel(true); - } - } - - /** - * Returns the swing worker that could be cancelled. - * - * @return The swing worker that could be cancelled. - */ - SwingWorker getWorker() { - return worker; - } - - /** - * Sets the swing worker that could be cancelled. - * - * @param worker The swing worker that could be cancelled. - */ - void setWorker(SwingWorker worker) { - this.worker = worker; - } - } - - /** - * Handles managing the gui and exporting data from the tabs into an excel - * document. - * - * @param dataSource The data source. - * @param path The output path. - */ - @NbBundle.Messages({ - "# {0} - dataSource", - "ExcelExportAction_runXLSXExport_progressTitle=Exporting {0} to XLSX", - "ExcelExportAction_runXLSXExport_progressCancelTitle=Cancel", - "ExcelExportAction_runXLSXExport_progressCancelActionTitle=Cancelling...", - "ExcelExportAction_runXLSXExport_errorTitle=Error While Exporting", - "ExcelExportAction_runXLSXExport_errorMessage=There was an error while exporting.", - }) - private void runXLSXExport(DataSource dataSource, File path) { - - CancelExportListener cancelButtonListener = new CancelExportListener(); - - ProgressIndicator progressIndicator = new ModalDialogProgressIndicator( - WindowManager.getDefault().getMainWindow(), - Bundle.ExcelExportAction_runXLSXExport_progressTitle(dataSource.getName()), - new String[]{Bundle.ExcelExportAction_runXLSXExport_progressCancelTitle()}, - Bundle.ExcelExportAction_runXLSXExport_progressCancelTitle(), - cancelButtonListener - ); - - SwingWorker worker = new SwingWorker() { - @Override - protected Boolean doInBackground() throws Exception { - exportToXLSX(progressIndicator, dataSource, path); - return true; - } - - @Override - protected void done() { - try { - get(); - } catch (ExecutionException ex) { - logger.log(Level.WARNING, "Error while trying to export data source summary to xlsx.", ex); - JOptionPane.showMessageDialog(WindowManager.getDefault().getMainWindow(), - Bundle.ExcelExportAction_runXLSXExport_errorMessage(), - Bundle.ExcelExportAction_runXLSXExport_errorTitle(), - JOptionPane.ERROR_MESSAGE); - } catch (InterruptedException | CancellationException ex) { - // no op on cancellation - } finally { - progressIndicator.finish(); - } - } - }; - - cancelButtonListener.setWorker(worker); - worker.execute(); - } - - /** - * Action that handles updating progress and exporting data from the tabs. - * - * @param progressIndicator The progress indicator. - * @param dataSource The data source to be exported. - * @param path The path of the excel export. - * @throws InterruptedException - * @throws IOException - * @throws ExcelExportException - */ - @NbBundle.Messages({ - "ExcelExportAction_exportToXLSX_beginExport=Beginning Export...", - "# {0} - tabName", - "ExcelExportAction_exportToXLSX_gatheringTabData=Fetching Data for {0} Tab...", - "ExcelExportAction_exportToXLSX_writingToFile=Writing to File...",}) - - private void exportToXLSX(ProgressIndicator progressIndicator, DataSource dataSource, File path) - throws InterruptedException, IOException, ExcelExport.ExcelExportException { - - int exportWeight = 3; - int totalWeight = tabExports.size() + exportWeight; - progressIndicator.start(Bundle.ExcelExportAction_exportToXLSX_beginExport(), totalWeight); - List sheetExports = new ArrayList<>(); - for (int i = 0; i < tabExports.size(); i++) { - if (Thread.interrupted()) { - throw new InterruptedException("Export has been cancelled."); - } - - ExportableTab tab = tabExports.get(i); - progressIndicator.progress(Bundle.ExcelExportAction_exportToXLSX_gatheringTabData(tab == null ? "" : tab.getTabTitle()), i); - - List exports = tab.getExcelExports(dataSource); - if (exports != null) { - sheetExports.addAll(exports); - } - } - - if (Thread.interrupted()) { - throw new InterruptedException("Export has been cancelled."); - } - - progressIndicator.progress(Bundle.ExcelExportAction_exportToXLSX_writingToFile(), tabExports.size()); - excelExport.writeExcel(sheetExports, path); - - progressIndicator.finish(); - - try { - // add to reports - Case curCase = Case.getCurrentCaseThrows(); - curCase.addReport(path.getParent(), - Bundle.ExcelExportAction_moduleName(), - path.getName(), - dataSource); - - // and show finished dialog - SwingUtilities.invokeLater(() -> { - ExcelExportDialog dialog = new ExcelExportDialog(WindowManager.getDefault().getMainWindow(), path); - dialog.setResizable(false); - dialog.setLocationRelativeTo(WindowManager.getDefault().getMainWindow()); - dialog.setVisible(true); - dialog.toFront(); - }); - - } catch (NoCurrentCaseException | TskCoreException ex) { - logger.log(Level.WARNING, "There was an error attaching report to case.", ex); - } - } -} diff --git a/Core/src/org/sleuthkit/autopsy/datasourcesummary/ui/ExcelExportDialog.form b/Core/src/org/sleuthkit/autopsy/datasourcesummary/ui/ExcelExportDialog.form deleted file mode 100644 index 8342ce5326..0000000000 --- a/Core/src/org/sleuthkit/autopsy/datasourcesummary/ui/ExcelExportDialog.form +++ /dev/null @@ -1,114 +0,0 @@ - - -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
diff --git a/Core/src/org/sleuthkit/autopsy/datasourcesummary/ui/ExcelExportDialog.java b/Core/src/org/sleuthkit/autopsy/datasourcesummary/ui/ExcelExportDialog.java deleted file mode 100644 index ec16d08e46..0000000000 --- a/Core/src/org/sleuthkit/autopsy/datasourcesummary/ui/ExcelExportDialog.java +++ /dev/null @@ -1,143 +0,0 @@ -/* - * Autopsy Forensic Browser - * - * Copyright 2021 Basis Technology Corp. - * Contact: carrier sleuthkit org - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.sleuthkit.autopsy.datasourcesummary.ui; - -import java.awt.Cursor; -import java.awt.Desktop; -import java.awt.event.MouseAdapter; -import java.awt.event.MouseEvent; -import java.io.File; -import java.io.IOException; -import java.util.logging.Level; -import javax.swing.SwingUtilities; -import org.openide.util.NbBundle.Messages; -import org.sleuthkit.autopsy.coreutils.Logger; - -/** - * Dialog showing where the data source summary excel export can be located. - */ -@Messages({ - "ExcelExportDialog_title=Data Source Summary Exported" -}) -public class ExcelExportDialog extends javax.swing.JDialog { - - private static final Logger logger = Logger.getLogger(ExcelExportDialog.class.getName()); - - /** - * Creates new form ExcelExportDialog - */ - public ExcelExportDialog(java.awt.Frame parent, File filePath) { - super(parent, true); - - initComponents(); - setTitle(Bundle.ExcelExportDialog_title()); - - this.linkText.setText(filePath.getAbsolutePath()); - this.linkText.addMouseListener(new MouseAdapter() { - @Override - public void mouseClicked(MouseEvent e) { - SwingUtilities.invokeLater(() -> { - try { - Desktop.getDesktop().open(filePath); - } catch (IOException ex) { - logger.log(Level.WARNING, "Unable to open: " + filePath.getAbsolutePath(), ex); - } - }); - } - - }); - this.linkText.setCursor(Cursor.getPredefinedCursor(Cursor.HAND_CURSOR)); - } - - /** - * This method is called from within the constructor to initialize the form. - * WARNING: Do NOT modify this code. The content of this method is always - * regenerated by the Form Editor. - */ - @SuppressWarnings("unchecked") - // //GEN-BEGIN:initComponents - private void initComponents() { - - javax.swing.JLabel titleLabel = new javax.swing.JLabel(); - javax.swing.JButton okButton = new javax.swing.JButton(); - javax.swing.JScrollPane linkTextScrollPane = new javax.swing.JScrollPane(); - linkText = new javax.swing.JTextArea(); - - setDefaultCloseOperation(javax.swing.WindowConstants.DISPOSE_ON_CLOSE); - - org.openide.awt.Mnemonics.setLocalizedText(titleLabel, org.openide.util.NbBundle.getMessage(ExcelExportDialog.class, "ExcelExportDialog.titleLabel.text")); // NOI18N - - org.openide.awt.Mnemonics.setLocalizedText(okButton, org.openide.util.NbBundle.getMessage(ExcelExportDialog.class, "ExcelExportDialog.okButton.text")); // NOI18N - okButton.addActionListener(new java.awt.event.ActionListener() { - public void actionPerformed(java.awt.event.ActionEvent evt) { - okButtonActionPerformed(evt); - } - }); - - linkText.setEditable(false); - linkText.setBackground(null); - linkText.setColumns(20); - linkText.setForeground(java.awt.Color.BLUE); - linkText.setLineWrap(true); - linkText.setRows(1); - linkText.setWrapStyleWord(true); - linkText.setBorder(null); - linkText.setOpaque(false); - linkTextScrollPane.setViewportView(linkText); - - javax.swing.GroupLayout layout = new javax.swing.GroupLayout(getContentPane()); - getContentPane().setLayout(layout); - layout.setHorizontalGroup( - layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) - .addGroup(javax.swing.GroupLayout.Alignment.TRAILING, layout.createSequentialGroup() - .addContainerGap() - .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.TRAILING) - .addComponent(linkTextScrollPane) - .addGroup(layout.createSequentialGroup() - .addGap(0, 0, Short.MAX_VALUE) - .addComponent(okButton)) - .addGroup(javax.swing.GroupLayout.Alignment.LEADING, layout.createSequentialGroup() - .addComponent(titleLabel) - .addGap(0, 116, Short.MAX_VALUE))) - .addContainerGap()) - ); - layout.setVerticalGroup( - layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) - .addGroup(layout.createSequentialGroup() - .addContainerGap() - .addComponent(titleLabel) - .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) - .addComponent(linkTextScrollPane, javax.swing.GroupLayout.PREFERRED_SIZE, 39, javax.swing.GroupLayout.PREFERRED_SIZE) - .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) - .addComponent(okButton) - .addContainerGap()) - ); - - pack(); - }// //GEN-END:initComponents - - private void okButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_okButtonActionPerformed - dispose(); - }//GEN-LAST:event_okButtonActionPerformed - - - // Variables declaration - do not modify//GEN-BEGIN:variables - private javax.swing.JTextArea linkText; - // End of variables declaration//GEN-END:variables -} diff --git a/Core/src/org/sleuthkit/autopsy/datasourcesummary/ui/ExportPanel.form b/Core/src/org/sleuthkit/autopsy/datasourcesummary/ui/ExportPanel.form deleted file mode 100644 index 908facb8d6..0000000000 --- a/Core/src/org/sleuthkit/autopsy/datasourcesummary/ui/ExportPanel.form +++ /dev/null @@ -1,68 +0,0 @@ - - -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
diff --git a/Core/src/org/sleuthkit/autopsy/datasourcesummary/ui/ExportPanel.java b/Core/src/org/sleuthkit/autopsy/datasourcesummary/ui/ExportPanel.java deleted file mode 100644 index 7e635c3f64..0000000000 --- a/Core/src/org/sleuthkit/autopsy/datasourcesummary/ui/ExportPanel.java +++ /dev/null @@ -1,105 +0,0 @@ -/* - * Autopsy Forensic Browser - * - * Copyright 2021 Basis Technology Corp. - * Contact: carrier sleuthkit org - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.sleuthkit.autopsy.datasourcesummary.ui; - -/** - * The panel that provides options for exporting data source summary data. - */ -public class ExportPanel extends javax.swing.JPanel { - - private Runnable xlsxExportAction; - - /** - * Creates new form ExportPanel - */ - public ExportPanel() { - initComponents(); - } - - /** - * Returns the action that handles exporting to excel. - * - * @return The action that handles exporting to excel. - */ - public Runnable getXlsxExportAction() { - return xlsxExportAction; - } - - /** - * Sets the action that handles exporting to excel. - * - * @param onXlsxExport The action that handles exporting to excel. - */ - public void setXlsxExportAction(Runnable onXlsxExport) { - this.xlsxExportAction = onXlsxExport; - } - - /** - * This method is called from within the constructor to initialize the form. - * WARNING: Do NOT modify this code. The content of this method is always - * regenerated by the Form Editor. - */ - @SuppressWarnings("unchecked") - // //GEN-BEGIN:initComponents - private void initComponents() { - - javax.swing.JButton xlsxExportButton = new javax.swing.JButton(); - javax.swing.JLabel xlsxExportMessage = new javax.swing.JLabel(); - - org.openide.awt.Mnemonics.setLocalizedText(xlsxExportButton, org.openide.util.NbBundle.getMessage(ExportPanel.class, "ExportPanel.xlsxExportButton.text")); // NOI18N - xlsxExportButton.addActionListener(new java.awt.event.ActionListener() { - public void actionPerformed(java.awt.event.ActionEvent evt) { - xlsxExportButtonActionPerformed(evt); - } - }); - - org.openide.awt.Mnemonics.setLocalizedText(xlsxExportMessage, org.openide.util.NbBundle.getMessage(ExportPanel.class, "ExportPanel.xlsxExportMessage.text")); // NOI18N - - javax.swing.GroupLayout layout = new javax.swing.GroupLayout(this); - this.setLayout(layout); - layout.setHorizontalGroup( - layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) - .addGroup(layout.createSequentialGroup() - .addContainerGap() - .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) - .addComponent(xlsxExportMessage) - .addComponent(xlsxExportButton)) - .addContainerGap(62, Short.MAX_VALUE)) - ); - layout.setVerticalGroup( - layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) - .addGroup(layout.createSequentialGroup() - .addContainerGap() - .addComponent(xlsxExportMessage) - .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) - .addComponent(xlsxExportButton) - .addContainerGap(250, Short.MAX_VALUE)) - ); - }// //GEN-END:initComponents - - private void xlsxExportButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_xlsxExportButtonActionPerformed - if (this.xlsxExportAction != null) { - xlsxExportAction.run(); - } - }//GEN-LAST:event_xlsxExportButtonActionPerformed - - - // Variables declaration - do not modify//GEN-BEGIN:variables - // End of variables declaration//GEN-END:variables -} diff --git a/Core/src/org/sleuthkit/autopsy/datasourcesummary/ui/GeolocationPanel.java b/Core/src/org/sleuthkit/autopsy/datasourcesummary/ui/GeolocationPanel.java index be7279afca..2551e7a3ae 100644 --- a/Core/src/org/sleuthkit/autopsy/datasourcesummary/ui/GeolocationPanel.java +++ b/Core/src/org/sleuthkit/autopsy/datasourcesummary/ui/GeolocationPanel.java @@ -34,7 +34,6 @@ import org.openide.util.NbBundle.Messages; import org.openide.util.actions.CallableSystemAction; import org.openide.windows.TopComponent; import org.openide.windows.WindowManager; -import org.sleuthkit.autopsy.datasourcesummary.datamodel.GeolocationSummary; import org.sleuthkit.autopsy.datasourcesummary.datamodel.GeolocationSummary.CityCountsList; import org.sleuthkit.autopsy.datasourcesummary.datamodel.GeolocationSummary.CityData; import org.sleuthkit.autopsy.datasourcesummary.datamodel.GeolocationSummary.CityRecordCount; @@ -43,9 +42,8 @@ import org.sleuthkit.autopsy.datasourcesummary.uiutils.ColumnModel; import org.sleuthkit.autopsy.datasourcesummary.uiutils.DataFetchResult; import org.sleuthkit.autopsy.datasourcesummary.uiutils.DataFetchWorker; import org.sleuthkit.autopsy.datasourcesummary.uiutils.DataFetchWorker.DataFetchComponents; -import org.sleuthkit.autopsy.datasourcesummary.uiutils.DataFetcher; +import org.sleuthkit.autopsy.datasourcesummary.datamodel.DataFetcher; import org.sleuthkit.autopsy.datasourcesummary.uiutils.DefaultCellModel; -import org.sleuthkit.autopsy.datasourcesummary.uiutils.ExcelExport; import org.sleuthkit.autopsy.datasourcesummary.uiutils.IngestRunningLabel; import org.sleuthkit.autopsy.datasourcesummary.uiutils.JTablePanel; import org.sleuthkit.autopsy.geolocation.GeoFilter; @@ -79,9 +77,9 @@ public class GeolocationPanel extends BaseDataSourceSummaryPanel { * Main constructor. * * @param mostRecentData The data to be displayed in the most recent - * table. + * table. * @param mostCommonData The data to be displayed in the most common - * table. + * table. */ GeolocationViewModel(List> mostRecentData, List> mostCommonData) { this.mostRecentData = mostRecentData; @@ -147,7 +145,7 @@ public class GeolocationPanel extends BaseDataSourceSummaryPanel { private final IngestRunningLabel ingestRunningLabel = new IngestRunningLabel(); - private final GeolocationSummary whereUsedData; + private final GeolocationSummaryGetter whereUsedData; private final DataFetcher geolocationFetcher; @@ -155,15 +153,15 @@ public class GeolocationPanel extends BaseDataSourceSummaryPanel { * Main constructor. */ public GeolocationPanel() { - this(new GeolocationSummary()); + this(new GeolocationSummaryGetter()); } /** * Main constructor. * - * @param whereUsedData The GeolocationSummary instance to use. + * @param whereUsedData The GeolocationSummaryGetter instance to use. */ - public GeolocationPanel(GeolocationSummary whereUsedData) { + public GeolocationPanel(GeolocationSummaryGetter whereUsedData) { super(whereUsedData); this.whereUsedData = whereUsedData; @@ -183,7 +181,7 @@ public class GeolocationPanel extends BaseDataSourceSummaryPanel { * Means of rendering data to be shown in the tables. * * @param result The result of fetching data for a data source and - * processing into view model data. + * processing into view model data. */ private void handleData(DataFetchResult result) { showCityContent(DataFetchResult.getSubResult(result, (dr) -> dr.getMostCommonData()), mostCommonTable, commonViewInGeolocationBtn); @@ -194,6 +192,7 @@ public class GeolocationPanel extends BaseDataSourceSummaryPanel { * Retrieves the city name to display from the record. * * @param record The record for the city to display. + * * @return The display name (city, country). */ private static String getCityName(CityRecord record) { @@ -221,6 +220,7 @@ public class GeolocationPanel extends BaseDataSourceSummaryPanel { * formats the city name). * * @param cityCount The CityRecordCount representing a row. + * * @return The city/count pair to be displayed as a row. */ private Pair formatRecord(CityRecordCount cityCount) { @@ -239,7 +239,8 @@ public class GeolocationPanel extends BaseDataSourceSummaryPanel { * 'unknown'). * * @param countsList The CityCountsList object representing the data to be - * displayed in the table. + * displayed in the table. + * * @return The list of city/count tuples to be displayed as a row. */ private List> formatList(CityCountsList countsList) { @@ -263,10 +264,11 @@ public class GeolocationPanel extends BaseDataSourceSummaryPanel { } /** - * Converts CityData from GeolocationSummary into data that can be directly - * put into table in this panel. + * Converts CityData from GeolocationSummaryGetter into data that can be + * directly put into table in this panel. * * @param cityData The city data. + * * @return The view model data. */ private GeolocationViewModel convertToViewModel(CityData cityData) { @@ -280,8 +282,8 @@ public class GeolocationPanel extends BaseDataSourceSummaryPanel { /** * Shows data in a particular table. * - * @param result The result to be displayed in the table. - * @param table The table where the data will be displayed. + * @param result The result to be displayed in the table. + * @param table The table where the data will be displayed. * @param goToGeolocation The corresponding geolocation navigation button. */ private void showCityContent(DataFetchResult>> result, JTablePanel> table, JButton goToGeolocation) { @@ -296,9 +298,9 @@ public class GeolocationPanel extends BaseDataSourceSummaryPanel { * Action to open the geolocation window. * * @param dataSource The data source for which the window should filter. - * @param daysLimit The limit for how recently the waypoints should be (for - * most recent table) or null for most recent filter to not be set (for most - * common table). + * @param daysLimit The limit for how recently the waypoints should be (for + * most recent table) or null for most recent filter to + * not be set (for most common table). */ private void openGeolocationWindow(DataSource dataSource, Integer daysLimit) { // notify dialog (if in dialog) should close. @@ -349,19 +351,6 @@ public class GeolocationPanel extends BaseDataSourceSummaryPanel { onNewDataSource(dataFetchComponents, tables, dataSource); } - @Override - List getExports(DataSource dataSource) { - GeolocationViewModel model = getFetchResult(geolocationFetcher, "Geolocation sheets", dataSource); - if (model == null) { - return Collections.emptyList(); - } - - return Arrays.asList( - getTableExport(DEFAULT_TEMPLATE, Bundle.GeolocationPanel_mostRecent_tabName(), model.getMostRecentData()), - getTableExport(DEFAULT_TEMPLATE, Bundle.GeolocationPanel_mostCommon_tabName(), model.getMostCommonData()) - ); - } - @Override public void close() { ingestRunningLabel.unregister(); diff --git a/Core/src/org/sleuthkit/autopsy/datasourcesummary/ui/GeolocationSummaryGetter.java b/Core/src/org/sleuthkit/autopsy/datasourcesummary/ui/GeolocationSummaryGetter.java new file mode 100644 index 0000000000..2278b4ac60 --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/datasourcesummary/ui/GeolocationSummaryGetter.java @@ -0,0 +1,78 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2020-2021 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.datasourcesummary.ui; + +import java.io.IOException; +import java.util.List; +import java.util.Set; +import org.sleuthkit.autopsy.datasourcesummary.datamodel.GeolocationSummary; +import org.sleuthkit.autopsy.datasourcesummary.datamodel.GeolocationSummary.CityData; +import org.sleuthkit.autopsy.datasourcesummary.datamodel.SleuthkitCaseProvider.SleuthkitCaseProviderException; +import org.sleuthkit.autopsy.datasourcesummary.uiutils.DefaultArtifactUpdateGovernor; +import org.sleuthkit.autopsy.geolocation.datamodel.GeoLocationDataException; +import org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE; +import org.sleuthkit.datamodel.DataSource; + +/** + * Wrapper class for converting + * org.sleuthkit.autopsy.contentutils.GeolocationSummary functionality into a + * DefaultArtifactUpdateGovernor used by GeolocationPanel tab. + */ +public class GeolocationSummaryGetter implements DefaultArtifactUpdateGovernor { + + private final GeolocationSummary geoSummary; + + /** + * Default constructor. + */ + public GeolocationSummaryGetter() { + geoSummary = new GeolocationSummary(); + } + + /** + * @return Returns all the geolocation artifact types. + */ + public List getGeoTypes() { + return GeolocationSummary.getGeoTypes(); + } + + @Override + public Set getArtifactTypeIdsForRefresh() { + return GeolocationSummary.getArtifactTypeIdsForRefresh(); + } + + /** + * Get this list of hits per city where the list is sorted descending by + * number of found hits (i.e. most hits is first index). + * + * @param dataSource The data source. + * @param daysCount Number of days to go back. + * @param maxCount Maximum number of results. + * + * @return The sorted list. + * + * @throws SleuthkitCaseProviderException + * @throws GeoLocationDataException + * @throws InterruptedException + */ + public CityData getCityCounts(DataSource dataSource, int daysCount, int maxCount) + throws SleuthkitCaseProviderException, GeoLocationDataException, InterruptedException, IOException { + return geoSummary.getCityCounts(dataSource, daysCount, maxCount); + } +} diff --git a/Core/src/org/sleuthkit/autopsy/datasourcesummary/ui/MimeTypeSummaryGetter.java b/Core/src/org/sleuthkit/autopsy/datasourcesummary/ui/MimeTypeSummaryGetter.java new file mode 100644 index 0000000000..5468ec668a --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/datasourcesummary/ui/MimeTypeSummaryGetter.java @@ -0,0 +1,150 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2020-2021 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.datasourcesummary.ui; + +import org.sleuthkit.autopsy.datasourcesummary.uiutils.DefaultUpdateGovernor; +import java.sql.SQLException; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashSet; +import java.util.Set; +import org.sleuthkit.autopsy.datasourcesummary.datamodel.SleuthkitCaseProvider; +import org.sleuthkit.autopsy.datasourcesummary.datamodel.SleuthkitCaseProvider.SleuthkitCaseProviderException; +import org.sleuthkit.autopsy.datasourcesummary.datamodel.MimeTypeSummary; +import org.sleuthkit.autopsy.ingest.IngestManager; +import org.sleuthkit.autopsy.ingest.ModuleContentEvent; +import org.sleuthkit.datamodel.AbstractFile; +import org.sleuthkit.datamodel.DataSource; +import org.sleuthkit.datamodel.TskCoreException; + +/** + * Wrapper class for converting org.sleuthkit.autopsy.contentutils.TypesSummary + * functionality into a DefaultArtifactUpdateGovernor used by TypesPanel tab. + */ +public class MimeTypeSummaryGetter implements DefaultUpdateGovernor { + + private static final Set INGEST_JOB_EVENTS = new HashSet<>( + Arrays.asList(IngestManager.IngestJobEvent.COMPLETED, IngestManager.IngestJobEvent.CANCELLED)); + + private final MimeTypeSummary mimeTypeSummary; + + /** + * Main constructor. + */ + public MimeTypeSummaryGetter() { + mimeTypeSummary = new MimeTypeSummary(); + } + + @Override + public boolean isRefreshRequired(ModuleContentEvent evt) { + return true; + } + + @Override + public boolean isRefreshRequired(AbstractFile file) { + return true; + } + + @Override + public boolean isRefreshRequired(IngestManager.IngestJobEvent evt) { + return (evt != null && INGEST_JOB_EVENTS.contains(evt)); + } + + @Override + public Set getIngestJobEventUpdates() { + return Collections.unmodifiableSet(INGEST_JOB_EVENTS); + } + + /** + * Get the number of files in the case database for the current data source + * which have the specified mimetypes. + * + * @param currentDataSource the data source which we are finding a file + * count + * + * @param setOfMimeTypes the set of mime types which we are finding the + * number of occurences of + * + * @return a Long value which represents the number of occurrences of the + * specified mime types in the current case for the specified data + * source, null if no count was retrieved + * + * @throws SleuthkitCaseProviderException + * @throws TskCoreException + * @throws SQLException + */ + public Long getCountOfFilesForMimeTypes(DataSource currentDataSource, Set setOfMimeTypes) + throws SleuthkitCaseProvider.SleuthkitCaseProviderException, TskCoreException, SQLException { + return mimeTypeSummary.getCountOfFilesForMimeTypes(currentDataSource, setOfMimeTypes); + } + + /** + * Get the number of files in the case database for the current data source + * which do not have the specified mimetypes. + * + * @param currentDataSource the data source which we are finding a file + * count + * + * @param setOfMimeTypes the set of mime types that should be excluded. + * + * @return a Long value which represents the number of files that do not + * have the specific mime type, but do have a mime type. + * + * @throws SleuthkitCaseProviderException + * @throws TskCoreException + * @throws SQLException + */ + public Long getCountOfFilesNotInMimeTypes(DataSource currentDataSource, Set setOfMimeTypes) + throws SleuthkitCaseProvider.SleuthkitCaseProviderException, TskCoreException, SQLException { + return mimeTypeSummary.getCountOfFilesNotInMimeTypes(currentDataSource, setOfMimeTypes); + } + + /** + * Get a count of all regular files in a datasource. + * + * @param dataSource The datasource. + * + * @return The count of regular files. + * + * @throws SleuthkitCaseProviderException + * @throws TskCoreException + * @throws SQLException + */ + public Long getCountOfAllRegularFiles(DataSource dataSource) + throws SleuthkitCaseProvider.SleuthkitCaseProviderException, TskCoreException, SQLException { + return mimeTypeSummary.getCountOfAllRegularFiles(dataSource); + } + + /** + * Gets the number of files in the data source with no assigned mime type. + * + * @param currentDataSource The data source. + * + * @return The number of files with no mime type or null if there is an + * issue searching the data source. + * + * @throws SleuthkitCaseProviderException + * @throws TskCoreException + * @throws SQLException + */ + public Long getCountOfFilesWithNoMimeType(DataSource currentDataSource) + throws SleuthkitCaseProvider.SleuthkitCaseProviderException, TskCoreException, SQLException { + return mimeTypeSummary.getCountOfFilesWithNoMimeType(currentDataSource); + } +} diff --git a/Core/src/org/sleuthkit/autopsy/datasourcesummary/ui/PastCasesPanel.java b/Core/src/org/sleuthkit/autopsy/datasourcesummary/ui/PastCasesPanel.java index a3169db375..08679169d3 100644 --- a/Core/src/org/sleuthkit/autopsy/datasourcesummary/ui/PastCasesPanel.java +++ b/Core/src/org/sleuthkit/autopsy/datasourcesummary/ui/PastCasesPanel.java @@ -19,21 +19,16 @@ package org.sleuthkit.autopsy.datasourcesummary.ui; import java.util.Arrays; -import java.util.Collections; import java.util.List; import org.apache.commons.lang3.tuple.Pair; import org.openide.util.NbBundle.Messages; -import org.sleuthkit.autopsy.datasourcesummary.datamodel.PastCasesSummary; import org.sleuthkit.autopsy.datasourcesummary.datamodel.PastCasesSummary.PastCasesResult; -import static org.sleuthkit.autopsy.datasourcesummary.ui.BaseDataSourceSummaryPanel.getFetchResult; -import static org.sleuthkit.autopsy.datasourcesummary.ui.BaseDataSourceSummaryPanel.getTableExport; import org.sleuthkit.autopsy.datasourcesummary.uiutils.ColumnModel; import org.sleuthkit.autopsy.datasourcesummary.uiutils.DataFetchResult; import org.sleuthkit.autopsy.datasourcesummary.uiutils.DataFetchWorker; import org.sleuthkit.autopsy.datasourcesummary.uiutils.DataFetchWorker.DataFetchComponents; -import org.sleuthkit.autopsy.datasourcesummary.uiutils.DataFetcher; +import org.sleuthkit.autopsy.datasourcesummary.datamodel.DataFetcher; import org.sleuthkit.autopsy.datasourcesummary.uiutils.DefaultCellModel; -import org.sleuthkit.autopsy.datasourcesummary.uiutils.ExcelExport; import org.sleuthkit.autopsy.datasourcesummary.uiutils.IngestRunningLabel; import org.sleuthkit.autopsy.datasourcesummary.uiutils.JTablePanel; import org.sleuthkit.datamodel.DataSource; @@ -84,19 +79,19 @@ public class PastCasesPanel extends BaseDataSourceSummaryPanel { private final IngestRunningLabel ingestRunningLabel = new IngestRunningLabel(); private final DataFetcher pastCasesFetcher; - + public PastCasesPanel() { - this(new PastCasesSummary()); + this(new PastCasesSummaryGetter()); } /** * Creates new form PastCasesPanel */ - public PastCasesPanel(PastCasesSummary pastCaseData) { + public PastCasesPanel(PastCasesSummaryGetter pastCaseData) { super(pastCaseData); this.pastCasesFetcher = (dataSource) -> pastCaseData.getPastCasesData(dataSource); - + // set up data acquisition methods dataFetchComponents = Arrays.asList( new DataFetchWorker.DataFetchComponents<>( @@ -128,19 +123,6 @@ public class PastCasesPanel extends BaseDataSourceSummaryPanel { onNewDataSource(dataFetchComponents, tables, dataSource); } - @Override - List getExports(DataSource dataSource) { - PastCasesResult result = getFetchResult(pastCasesFetcher, "Past cases sheets", dataSource); - if (result == null) { - return Collections.emptyList(); - } - - return Arrays.asList( - getTableExport(DEFAULT_TEMPLATE, Bundle.PastCasesPanel_notableFileTable_tabName(), result.getTaggedNotable()), - getTableExport(DEFAULT_TEMPLATE, Bundle.PastCasesPanel_sameIdsTable_tabName(), result.getSameIdsResults()) - ); - } - @Override public void close() { ingestRunningLabel.unregister(); diff --git a/Core/src/org/sleuthkit/autopsy/datasourcesummary/ui/PastCasesSummaryGetter.java b/Core/src/org/sleuthkit/autopsy/datasourcesummary/ui/PastCasesSummaryGetter.java new file mode 100644 index 0000000000..2a2c0b8a55 --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/datasourcesummary/ui/PastCasesSummaryGetter.java @@ -0,0 +1,71 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2019 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.datasourcesummary.ui; + +import java.util.Arrays; +import java.util.Collections; +import java.util.HashSet; +import java.util.Set; +import org.sleuthkit.autopsy.datasourcesummary.datamodel.SleuthkitCaseProvider; +import org.sleuthkit.autopsy.datasourcesummary.datamodel.SleuthkitCaseProvider.SleuthkitCaseProviderException; +import org.sleuthkit.autopsy.datasourcesummary.uiutils.DefaultArtifactUpdateGovernor; +import org.sleuthkit.autopsy.datasourcesummary.datamodel.PastCasesSummary; +import org.sleuthkit.autopsy.datasourcesummary.datamodel.PastCasesSummary.PastCasesResult; +import org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE; +import org.sleuthkit.datamodel.DataSource; +import org.sleuthkit.datamodel.TskCoreException; + +/** + * Wrapper class for converting + * org.sleuthkit.autopsy.contentutils.PastCasesSummary functionality into a + * DefaultArtifactUpdateGovernor used by PastCases tab. + */ +public class PastCasesSummaryGetter implements DefaultArtifactUpdateGovernor { + + private static final Set ARTIFACT_UPDATE_TYPE_IDS = new HashSet<>(Arrays.asList( + ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT.getTypeID(), + ARTIFACT_TYPE.TSK_INTERESTING_ARTIFACT_HIT.getTypeID() + )); + + private final PastCasesSummary pastSummary; + + public PastCasesSummaryGetter() { + pastSummary = new PastCasesSummary(); + } + + @Override + public Set getArtifactTypeIdsForRefresh() { + return Collections.unmodifiableSet(ARTIFACT_UPDATE_TYPE_IDS); + } + + /** + * Returns the past cases data to be shown in the past cases tab. + * + * @param dataSource The data source. + * + * @return The retrieved data or null if null dataSource. + * + * @throws SleuthkitCaseProviderException + * @throws TskCoreException + */ + public PastCasesResult getPastCasesData(DataSource dataSource) + throws SleuthkitCaseProvider.SleuthkitCaseProviderException, TskCoreException { + return pastSummary.getPastCasesData(dataSource); + } +} diff --git a/Core/src/org/sleuthkit/autopsy/datasourcesummary/ui/RecentFilesGetter.java b/Core/src/org/sleuthkit/autopsy/datasourcesummary/ui/RecentFilesGetter.java new file mode 100755 index 0000000000..5275ec2c62 --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/datasourcesummary/ui/RecentFilesGetter.java @@ -0,0 +1,116 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2020-2021 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.datasourcesummary.ui; + +import java.util.Arrays; +import java.util.Collections; +import java.util.HashSet; +import org.sleuthkit.autopsy.datasourcesummary.uiutils.DefaultArtifactUpdateGovernor; +import java.util.List; +import java.util.Set; +import org.sleuthkit.autopsy.datasourcesummary.datamodel.RecentFilesSummary; +import org.sleuthkit.autopsy.datasourcesummary.datamodel.RecentFilesSummary.RecentAttachmentDetails; +import org.sleuthkit.autopsy.datasourcesummary.datamodel.RecentFilesSummary.RecentDownloadDetails; +import org.sleuthkit.autopsy.datasourcesummary.datamodel.RecentFilesSummary.RecentFileDetails; +import org.sleuthkit.datamodel.DataSource; +import org.sleuthkit.datamodel.TskCoreException; +import org.sleuthkit.autopsy.datasourcesummary.datamodel.SleuthkitCaseProvider.SleuthkitCaseProviderException; +import org.sleuthkit.datamodel.BlackboardArtifact; + +/** + * Wrapper class for converting + * org.sleuthkit.autopsy.contentutils.RecentFilesSummary functionality into a + * DefaultArtifactUpdateGovernor used by Recent Files Data Summary tab. + */ +public class RecentFilesGetter implements DefaultArtifactUpdateGovernor { + + private static final Set ARTIFACT_UPDATE_TYPE_IDS = new HashSet<>(Arrays.asList( + BlackboardArtifact.ARTIFACT_TYPE.TSK_RECENT_OBJECT.getTypeID(), + BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_DOWNLOAD.getTypeID(), + BlackboardArtifact.ARTIFACT_TYPE.TSK_ASSOCIATED_OBJECT.getTypeID(), + BlackboardArtifact.ARTIFACT_TYPE.TSK_EMAIL_MSG.getTypeID(), + BlackboardArtifact.ARTIFACT_TYPE.TSK_MESSAGE.getTypeID() + )); + + private final RecentFilesSummary recentSummary; + + /** + * Default constructor. + */ + public RecentFilesGetter() { + recentSummary = new RecentFilesSummary(); + } + + @Override + public Set getArtifactTypeIdsForRefresh() { + return Collections.unmodifiableSet(ARTIFACT_UPDATE_TYPE_IDS); + } + + /** + * Return a list of the most recently opened documents based on the + * TSK_RECENT_OBJECT artifact. + * + * @param dataSource The data source to query. + * @param maxCount The maximum number of results to return, pass 0 to get + * a list of all results. + * + * @return A list RecentFileDetails representing the most recently opened + * documents or an empty list if none were found. + * + * @throws SleuthkitCaseProviderException + * @throws TskCoreException + */ + public List getRecentlyOpenedDocuments(DataSource dataSource, int maxCount) throws SleuthkitCaseProviderException, TskCoreException { + return recentSummary.getRecentlyOpenedDocuments(dataSource, maxCount); + } + + /** + * Return a list of the most recent downloads based on the value of the the + * artifact TSK_DATETIME_ACCESSED attribute. + * + * @param dataSource Data source to query. + * @param maxCount Maximum number of results to return, passing 0 will + * return all results. + * + * @return A list of RecentFileDetails objects or empty list if none were + * found. + * + * @throws TskCoreException + * @throws SleuthkitCaseProviderException + */ + public List getRecentDownloads(DataSource dataSource, int maxCount) throws TskCoreException, SleuthkitCaseProviderException { + return recentSummary.getRecentDownloads(dataSource, maxCount); + } + + /** + * Returns a list of the most recent message attachments. + * + * @param dataSource Data source to query. + * @param maxCount Maximum number of results to return, passing 0 will + * return all results. + * + * @return A list of RecentFileDetails of the most recent attachments. + * + * @throws SleuthkitCaseProviderException + * @throws TskCoreException + */ + public List getRecentAttachments(DataSource dataSource, int maxCount) throws SleuthkitCaseProviderException, TskCoreException { + return recentSummary.getRecentAttachments(dataSource, maxCount); + } +} diff --git a/Core/src/org/sleuthkit/autopsy/datasourcesummary/ui/RecentFilesPanel.java b/Core/src/org/sleuthkit/autopsy/datasourcesummary/ui/RecentFilesPanel.java index f89651072c..0bc09d59ec 100755 --- a/Core/src/org/sleuthkit/autopsy/datasourcesummary/ui/RecentFilesPanel.java +++ b/Core/src/org/sleuthkit/autopsy/datasourcesummary/ui/RecentFilesPanel.java @@ -1,7 +1,7 @@ /* * Autopsy Forensic Browser * - * Copyright 2020 Basis Technology Corp. + * Copyright 2020-2021 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -27,20 +27,15 @@ import java.util.List; import java.util.Locale; import java.util.function.Function; import java.util.function.Supplier; -import java.util.stream.Collectors; -import java.util.stream.Stream; import org.openide.util.NbBundle.Messages; -import org.sleuthkit.autopsy.datasourcesummary.datamodel.RecentFilesSummary; import org.sleuthkit.autopsy.datasourcesummary.datamodel.RecentFilesSummary.RecentAttachmentDetails; import org.sleuthkit.autopsy.datasourcesummary.datamodel.RecentFilesSummary.RecentDownloadDetails; import org.sleuthkit.autopsy.datasourcesummary.datamodel.RecentFilesSummary.RecentFileDetails; -import static org.sleuthkit.autopsy.datasourcesummary.ui.BaseDataSourceSummaryPanel.getTableExport; import org.sleuthkit.autopsy.datasourcesummary.uiutils.CellModelTableCellRenderer; import org.sleuthkit.autopsy.datasourcesummary.uiutils.ColumnModel; import org.sleuthkit.autopsy.datasourcesummary.uiutils.DataFetchWorker; -import org.sleuthkit.autopsy.datasourcesummary.uiutils.DataFetcher; +import org.sleuthkit.autopsy.datasourcesummary.datamodel.DataFetcher; import org.sleuthkit.autopsy.datasourcesummary.uiutils.DefaultCellModel; -import org.sleuthkit.autopsy.datasourcesummary.uiutils.ExcelExport; import org.sleuthkit.autopsy.datasourcesummary.uiutils.GuiCellModel.MenuItem; import org.sleuthkit.autopsy.datasourcesummary.uiutils.IngestRunningLabel; import org.sleuthkit.autopsy.datasourcesummary.uiutils.JTablePanel; @@ -70,7 +65,7 @@ public final class RecentFilesPanel extends BaseDataSourceSummaryPanel { private final DataFetcher> attachmentsFetcher; private final List>> docsTemplate = Arrays.asList( - new ColumnModel<>(Bundle.RecentFilePanel_col_header_path(), + new ColumnModel<>(Bundle.RecentFilesPanel_col_header_path(), (prog) -> { return new DefaultCellModel<>(prog.getPath()) .setPopupMenuRetriever(getPopupFunct(prog)); @@ -80,12 +75,12 @@ public final class RecentFilesPanel extends BaseDataSourceSummaryPanel { 80)); private final List>> downloadsTemplate = Arrays.asList( - new ColumnModel<>(Bundle.RecentFilePanel_col_header_domain(), + new ColumnModel<>(Bundle.RecentFilesPanel_col_header_domain(), (prog) -> { return new DefaultCellModel<>(prog.getWebDomain()) .setPopupMenuRetriever(getPopupFunct(prog)); }, 100), - new ColumnModel<>(Bundle.RecentFilePanel_col_header_path(), + new ColumnModel<>(Bundle.RecentFilesPanel_col_header_path(), (prog) -> { return new DefaultCellModel<>(prog.getPath()) .setPopupMenuRetriever(getPopupFunct(prog)); @@ -95,7 +90,7 @@ public final class RecentFilesPanel extends BaseDataSourceSummaryPanel { 80)); private final List>> attachmentsTemplate = Arrays.asList( - new ColumnModel<>(Bundle.RecentFilePanel_col_header_path(), + new ColumnModel<>(Bundle.RecentFilesPanel_col_header_path(), (prog) -> { return new DefaultCellModel<>(prog.getPath()) .setPopupMenuRetriever(getPopupFunct(prog)); @@ -103,7 +98,7 @@ public final class RecentFilesPanel extends BaseDataSourceSummaryPanel { new ColumnModel<>(Bundle.RecentFilesPanel_col_head_date(), getDateFunct(), 80), - new ColumnModel<>(Bundle.RecentFilePanel_col_header_sender(), + new ColumnModel<>(Bundle.RecentFilesPanel_col_header_sender(), (prog) -> { return new DefaultCellModel<>(prog.getSender()) .setPopupMenuRetriever(getPopupFunct(prog)); @@ -114,19 +109,18 @@ public final class RecentFilesPanel extends BaseDataSourceSummaryPanel { */ @Messages({ "RecentFilesPanel_col_head_date=Date", - "RecentFilePanel_col_header_domain=Domain", - "RecentFilePanel_col_header_path=Path", - "RecentFilePanel_col_header_sender=Sender", - "RecentFilePanel_emailParserModuleName=Email Parser" + "RecentFilesPanel_col_header_domain=Domain", + "RecentFilesPanel_col_header_path=Path", + "RecentFilesPanel_col_header_sender=Sender" }) public RecentFilesPanel() { - this(new RecentFilesSummary()); + this(new RecentFilesGetter()); } /** * Creates new form RecentFilesPanel */ - public RecentFilesPanel(RecentFilesSummary dataHandler) { + public RecentFilesPanel(RecentFilesGetter dataHandler) { super(dataHandler); docsFetcher = (dataSource) -> dataHandler.getRecentlyOpenedDocuments(dataSource, 10); downloadsFetcher = (dataSource) -> dataHandler.getRecentDownloads(dataSource, 10); @@ -137,15 +131,16 @@ public final class RecentFilesPanel extends BaseDataSourceSummaryPanel { } /** - * Returns a function that gets the date from the RecentFileDetails object and - * converts into a DefaultCellModel to be displayed in a table. + * Returns a function that gets the date from the RecentFileDetails object + * and converts into a DefaultCellModel to be displayed in a table. * - * @return The function that determines the date cell from a RecentFileDetails object. + * @return The function that determines the date cell from a + * RecentFileDetails object. */ private Function> getDateFunct() { return (T lastAccessed) -> { Function dateParser = (dt) -> dt == null ? "" : DATETIME_FORMAT.format(dt); - return new DefaultCellModel<>(new Date(lastAccessed.getDateAsLong() * 1000), dateParser, DATETIME_FORMAT_STR) + return new DefaultCellModel<>(new Date(lastAccessed.getDateAsLong() * 1000), dateParser) .setPopupMenuRetriever(getPopupFunct(lastAccessed)); }; } @@ -155,9 +150,10 @@ public final class RecentFilesPanel extends BaseDataSourceSummaryPanel { * items. * * @param record The RecentFileDetails instance. + * * @return The menu items list containing one action or navigating to the - * appropriate artifact/file and closing the data source summary dialog if - * open. + * appropriate artifact/file and closing the data source summary + * dialog if open. */ private Supplier> getPopupFunct(RecentFileDetails record) { return () -> { @@ -190,16 +186,6 @@ public final class RecentFilesPanel extends BaseDataSourceSummaryPanel { onNewDataSource(dataFetchComponents, tablePanelList, dataSource); } - @Override - List getExports(DataSource dataSource) { - return Stream.of( - getTableExport(docsFetcher, docsTemplate, Bundle.RecentFilesPanel_docsTable_tabName(), dataSource), - getTableExport(downloadsFetcher, downloadsTemplate, Bundle.RecentFilesPanel_downloadsTable_tabName(), dataSource), - getTableExport(attachmentsFetcher, attachmentsTemplate, Bundle.RecentFilesPanel_attachmentsTable_tabName(), dataSource)) - .filter(sheet -> sheet != null) - .collect(Collectors.toList()); - } - @Override public void close() { ingestRunningLabel.unregister(); @@ -216,7 +202,7 @@ public final class RecentFilesPanel extends BaseDataSourceSummaryPanel { } @Messages({ - "RecentFilePanel_no_open_documents=No recently open documents found." + "RecentFilesPanel_no_open_documents=No recently open documents found." }) /** * Setup the data model and columns for the recently open table. diff --git a/Core/src/org/sleuthkit/autopsy/datasourcesummary/ui/SizeRepresentationUtil.java b/Core/src/org/sleuthkit/autopsy/datasourcesummary/ui/SizeRepresentationUtil.java index 74b9be06a8..65f7402b04 100644 --- a/Core/src/org/sleuthkit/autopsy/datasourcesummary/ui/SizeRepresentationUtil.java +++ b/Core/src/org/sleuthkit/autopsy/datasourcesummary/ui/SizeRepresentationUtil.java @@ -1,7 +1,7 @@ /* * Autopsy Forensic Browser * - * Copyright 2020 Basis Technology Corp. + * Copyright 2020-2021 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -19,8 +19,6 @@ package org.sleuthkit.autopsy.datasourcesummary.ui; import java.text.DecimalFormat; -import java.util.Arrays; -import java.util.List; import org.openide.util.NbBundle; import org.sleuthkit.autopsy.datasourcesummary.uiutils.DefaultCellModel; @@ -45,28 +43,23 @@ public final class SizeRepresentationUtil { "SizeRepresentationUtil_units_petabytes=PB" }) enum SizeUnit { - BYTES(Bundle.SizeRepresentationUtil_units_bytes(), "#", 0), - KB(Bundle.SizeRepresentationUtil_units_kilobytes(), "#,##0.00,", 1), - MB(Bundle.SizeRepresentationUtil_units_megabytes(), "#,##0.00,,", 2), - GB(Bundle.SizeRepresentationUtil_units_gigabytes(), "#,##0.00,,,", 3), - TB(Bundle.SizeRepresentationUtil_units_terabytes(), "#,##0.00,,,,", 4), - PB(Bundle.SizeRepresentationUtil_units_petabytes(), "#,##0.00,,,,,", 5); + BYTES(Bundle.SizeRepresentationUtil_units_bytes(), 0), + KB(Bundle.SizeRepresentationUtil_units_kilobytes(), 1), + MB(Bundle.SizeRepresentationUtil_units_megabytes(), 2), + GB(Bundle.SizeRepresentationUtil_units_gigabytes(), 3), + TB(Bundle.SizeRepresentationUtil_units_terabytes(), 4), + PB(Bundle.SizeRepresentationUtil_units_petabytes(), 5); private final String suffix; - private final String excelFormatString; private final long divisor; /** * Main constructor. * @param suffix The string suffix to use for size unit. - * @param excelFormatString The excel format string to use for this size unit. * @param power The power of 1000 of bytes for this size unit. */ - SizeUnit(String suffix, String excelFormatString, int power) { + SizeUnit(String suffix, int power) { this.suffix = suffix; - - // based on https://www.mrexcel.com/board/threads/how-do-i-format-cells-to-show-gb-mb-kb.140135/ - this.excelFormatString = String.format("%s \"%s\"", excelFormatString, suffix); this.divisor = (long) Math.pow(SIZE_CONVERSION_CONSTANT, power); } @@ -77,13 +70,6 @@ public final class SizeRepresentationUtil { return suffix; } - /** - * @return The excel format string to use for this size unit. - */ - public String getExcelFormatString() { - return excelFormatString; - } - /** * @return The divisor to convert from bytes to this unit. */ @@ -114,8 +100,7 @@ public final class SizeRepresentationUtil { return SizeUnit.values()[0]; } - for (int unitsIndex = 0; unitsIndex < SizeUnit.values().length; unitsIndex++) { - SizeUnit unit = SizeUnit.values()[unitsIndex]; + for (SizeUnit unit : SizeUnit.values()) { long result = size / unit.getDivisor(); if (result < SIZE_CONVERSION_CONSTANT) { return unit; @@ -126,14 +111,14 @@ public final class SizeRepresentationUtil { } /** - * Get a long size in bytes as a string formated to be read by users. + * Get a long size in bytes as a string formatted to be read by users. * * @param size Long value representing a size in byte.s * @param format The means of formatting the number. * @param showFullSize Optionally show the number of bytes in the * datasource. * - * @return Return a string formated with a user friendly version of the size + * @return Return a string formatted with a user friendly version of the size * as a string, returns empty String when provided empty size. */ static String getSizeString(Long size, DecimalFormat format, boolean showFullSize) { @@ -168,12 +153,7 @@ public final class SizeRepresentationUtil { if (bytes == null) { return new DefaultCellModel<>(""); } else { - SizeUnit unit = SizeRepresentationUtil.getSizeUnit(bytes); - if (unit == null) { - unit = SizeUnit.BYTES; - } - - return new DefaultCellModel(bytes, SizeRepresentationUtil::getSizeString, unit.getExcelFormatString()); + return new DefaultCellModel<>(bytes, SizeRepresentationUtil::getSizeString); } } diff --git a/Core/src/org/sleuthkit/autopsy/datasourcesummary/ui/TimelinePanel.java b/Core/src/org/sleuthkit/autopsy/datasourcesummary/ui/TimelinePanel.java index 94af4b7545..4b16aef7a3 100644 --- a/Core/src/org/sleuthkit/autopsy/datasourcesummary/ui/TimelinePanel.java +++ b/Core/src/org/sleuthkit/autopsy/datasourcesummary/ui/TimelinePanel.java @@ -1,7 +1,7 @@ /* * Autopsy Forensic Browser * - * Copyright 2020 Basis Technology Corp. + * Copyright 2020-2021 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -20,39 +20,29 @@ package org.sleuthkit.autopsy.datasourcesummary.ui; import java.awt.Color; import java.text.DateFormat; -import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Arrays; -import java.util.Collections; import java.util.Date; import java.util.List; -import java.util.Locale; import java.util.logging.Level; import org.apache.commons.collections.CollectionUtils; import org.joda.time.DateTime; import org.joda.time.Interval; import org.openide.util.NbBundle.Messages; import org.openide.util.actions.CallableSystemAction; -import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.datasourcesummary.datamodel.TimelineDataSourceUtils; import org.sleuthkit.autopsy.datasourcesummary.datamodel.TimelineSummary; import org.sleuthkit.autopsy.datasourcesummary.datamodel.TimelineSummary.DailyActivityAmount; import org.sleuthkit.autopsy.datasourcesummary.datamodel.TimelineSummary.TimelineSummaryData; -import org.sleuthkit.autopsy.datasourcesummary.uiutils.BarChartExport; import org.sleuthkit.autopsy.datasourcesummary.uiutils.BarChartPanel; -import org.sleuthkit.autopsy.datasourcesummary.uiutils.BarChartSeries; -import org.sleuthkit.autopsy.datasourcesummary.uiutils.BarChartPanel.OrderedKey; +import org.sleuthkit.autopsy.datasourcesummary.uiutils.BarChartSeries.OrderedKey; import org.sleuthkit.autopsy.datasourcesummary.uiutils.BarChartSeries.BarChartItem; import org.sleuthkit.autopsy.datasourcesummary.uiutils.DataFetchResult; import org.sleuthkit.autopsy.datasourcesummary.uiutils.DataFetchWorker; import org.sleuthkit.autopsy.datasourcesummary.uiutils.DataFetchWorker.DataFetchComponents; -import org.sleuthkit.autopsy.datasourcesummary.uiutils.DataFetcher; -import org.sleuthkit.autopsy.datasourcesummary.uiutils.DefaultCellModel; -import org.sleuthkit.autopsy.datasourcesummary.uiutils.ExcelExport; -import org.sleuthkit.autopsy.datasourcesummary.uiutils.ExcelSpecialFormatExport; -import org.sleuthkit.autopsy.datasourcesummary.uiutils.ExcelSpecialFormatExport.KeyValueItemExportable; -import org.sleuthkit.autopsy.datasourcesummary.uiutils.ExcelSpecialFormatExport.TitledExportable; +import org.sleuthkit.autopsy.datasourcesummary.datamodel.DataFetcher; +import org.sleuthkit.autopsy.datasourcesummary.uiutils.BarChartSeries; import org.sleuthkit.autopsy.datasourcesummary.uiutils.IngestRunningLabel; import org.sleuthkit.autopsy.datasourcesummary.uiutils.LoadableComponent; import org.sleuthkit.autopsy.datasourcesummary.uiutils.LoadableLabel; @@ -78,20 +68,10 @@ public class TimelinePanel extends BaseDataSourceSummaryPanel { private static final long serialVersionUID = 1L; private static final String EARLIEST_LATEST_FORMAT_STR = "MMM d, yyyy"; - private static final DateFormat EARLIEST_LATEST_FORMAT = getUtcFormat(EARLIEST_LATEST_FORMAT_STR); - private static final DateFormat CHART_FORMAT = getUtcFormat("MMM d, yyyy"); + private static final DateFormat EARLIEST_LATEST_FORMAT = TimelineSummary.getUtcFormat(EARLIEST_LATEST_FORMAT_STR); + private static final DateFormat CHART_FORMAT = TimelineSummary.getUtcFormat("MMM d, yyyy"); private static final int MOST_RECENT_DAYS_COUNT = 30; - /** - * Creates a DateFormat formatter that uses UTC for time zone. - * - * @param formatString The date format string. - * @return The data format. - */ - private static DateFormat getUtcFormat(String formatString) { - return new SimpleDateFormat(formatString, Locale.getDefault()); - } - // components displayed in the tab private final IngestRunningLabel ingestRunningLabel = new IngestRunningLabel(); private final LoadableLabel earliestLabel = new LoadableLabel(Bundle.TimelinePanel_earliestLabel_title()); @@ -108,13 +88,13 @@ public class TimelinePanel extends BaseDataSourceSummaryPanel { private final List> dataFetchComponents; public TimelinePanel() { - this(new TimelineSummary()); + this(new TimelineSummaryGetter()); } /** * Creates new form PastCasesPanel */ - public TimelinePanel(TimelineSummary timelineData) { + public TimelinePanel(TimelineSummaryGetter timelineData) { super(timelineData); dataFetcher = (dataSource) -> timelineData.getData(dataSource, MOST_RECENT_DAYS_COUNT); @@ -126,29 +106,18 @@ public class TimelinePanel extends BaseDataSourceSummaryPanel { initComponents(); } - /** - * Formats a date using a DateFormat. In the event that the date is null, - * returns a null string. - * - * @param date The date to format. - * @param formatter The DateFormat to use to format the date. - * @return The formatted string generated from the formatter or null if the - * date is null. - */ - private static String formatDate(Date date, DateFormat formatter) { - return date == null ? null : formatter.format(date); - } - private static final Color FILE_EVT_COLOR = new Color(228, 22, 28); private static final Color ARTIFACT_EVT_COLOR = new Color(21, 227, 100); /** - * Converts DailyActivityAmount data retrieved from TimelineSummary into - * data to be displayed as a bar chart. + * Converts DailyActivityAmount data retrieved from TimelineSummaryGetter + * into data to be displayed as a bar chart. * - * @param recentDaysActivity The data retrieved from TimelineSummary. + * @param recentDaysActivity The data retrieved from + * TimelineSummaryGetter. * @param showIntermediateDates If true, shows all dates. If false, shows - * only first and last date. + * only first and last date. + * * @return The data to be displayed in the BarChart. */ private List parseChartData(List recentDaysActivity, boolean showIntermediateDates) { @@ -167,7 +136,7 @@ public class TimelinePanel extends BaseDataSourceSummaryPanel { long fileAmt = curItem.getFileActivityCount(); long artifactAmt = curItem.getArtifactActivityCount() * 100; String formattedDate = (showIntermediateDates || i == 0 || i == recentDaysActivity.size() - 1) - ? formatDate(curItem.getDay(), CHART_FORMAT) : ""; + ? TimelineSummary.formatDate(curItem.getDay(), CHART_FORMAT) : ""; OrderedKey thisKey = new OrderedKey(formattedDate, i); fileEvtCounts.add(new BarChartItem(thisKey, fileAmt)); @@ -191,8 +160,8 @@ public class TimelinePanel extends BaseDataSourceSummaryPanel { * @param result The result to be displayed on this tab. */ private void handleResult(DataFetchResult result) { - earliestLabel.showDataFetchResult(DataFetchResult.getSubResult(result, r -> formatDate(r.getMinDate(), EARLIEST_LATEST_FORMAT))); - latestLabel.showDataFetchResult(DataFetchResult.getSubResult(result, r -> formatDate(r.getMaxDate(), EARLIEST_LATEST_FORMAT))); + earliestLabel.showDataFetchResult(DataFetchResult.getSubResult(result, r -> TimelineSummary.formatDate(r.getMinDate(), EARLIEST_LATEST_FORMAT))); + latestLabel.showDataFetchResult(DataFetchResult.getSubResult(result, r -> TimelineSummary.formatDate(r.getMaxDate(), EARLIEST_LATEST_FORMAT))); last30DaysChart.showDataFetchResult(DataFetchResult.getSubResult(result, r -> parseChartData(r.getMostRecentDaysActivity(), false))); if (result != null @@ -242,8 +211,8 @@ public class TimelinePanel extends BaseDataSourceSummaryPanel { * Action that occurs when 'View in Timeline' button is pressed. * * @param dataSource The data source to filter to. - * @param minDate The min date for the zoom of the window. - * @param maxDate The max date for the zoom of the window. + * @param minDate The min date for the zoom of the window. + * @param maxDate The max date for the zoom of the window. */ private void openFilteredChart(DataSource dataSource, Date minDate, Date maxDate) { OpenTimelineAction openTimelineAction = CallableSystemAction.get(OpenTimelineAction.class); @@ -266,7 +235,7 @@ public class TimelinePanel extends BaseDataSourceSummaryPanel { if (minDate != null && maxDate != null) { timeSpan = new Interval(new DateTime(minDate), new DateTime(maxDate)); } - } catch (NoCurrentCaseException | TskCoreException ex) { + } catch (TskCoreException ex) { logger.log(Level.WARNING, "Unable to view time range in Timeline view", ex); } @@ -293,43 +262,6 @@ public class TimelinePanel extends BaseDataSourceSummaryPanel { super.close(); } - /** - * Create a default cell model to be use with excel export in the earliest / - * latest date format. - * - * @param date The date. - * @return The cell model. - */ - private static DefaultCellModel getEarliestLatestCell(Date date) { - return new DefaultCellModel<>(date, (dt) -> dt == null ? "" : EARLIEST_LATEST_FORMAT.format(dt), EARLIEST_LATEST_FORMAT_STR); - } - - @Messages({ - "TimelinePanel_getExports_sheetName=Timeline", - "TimelinePanel_getExports_activityRange=Activity Range", - "TimelinePanel_getExports_earliest=Earliest:", - "TimelinePanel_getExports_latest=Latest:", - "TimelinePanel_getExports_dateColumnHeader=Date", - "TimelinePanel_getExports_chartName=Last 30 Days",}) - @Override - List getExports(DataSource dataSource) { - TimelineSummaryData summaryData = getFetchResult(dataFetcher, "Timeline", dataSource); - if (summaryData == null) { - return Collections.emptyList(); - } - - return Arrays.asList( - new ExcelSpecialFormatExport(Bundle.TimelinePanel_getExports_sheetName(), - Arrays.asList( - new TitledExportable(Bundle.TimelinePanel_getExports_activityRange(), Collections.emptyList()), - new KeyValueItemExportable(Bundle.TimelinePanel_getExports_earliest(), getEarliestLatestCell(summaryData.getMinDate())), - new KeyValueItemExportable(Bundle.TimelinePanel_getExports_latest(), getEarliestLatestCell(summaryData.getMaxDate())), - new BarChartExport(Bundle.TimelinePanel_getExports_dateColumnHeader(), - "#,###", - Bundle.TimelinePanel_getExports_chartName(), - parseChartData(summaryData.getMostRecentDaysActivity(), true))))); - } - /** * This method is called from within the constructor to initialize the form. * WARNING: Do NOT modify this code. The content of this method is always diff --git a/Core/src/org/sleuthkit/autopsy/datasourcesummary/ui/TimelineSummaryGetter.java b/Core/src/org/sleuthkit/autopsy/datasourcesummary/ui/TimelineSummaryGetter.java new file mode 100644 index 0000000000..81be37577f --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/datasourcesummary/ui/TimelineSummaryGetter.java @@ -0,0 +1,88 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2020 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.datasourcesummary.ui; + +import java.util.Arrays; +import java.util.Collections; +import java.util.HashSet; +import java.util.Set; +import org.sleuthkit.autopsy.datasourcesummary.datamodel.SleuthkitCaseProvider.SleuthkitCaseProviderException; +import org.sleuthkit.autopsy.datasourcesummary.uiutils.DefaultUpdateGovernor; +import org.sleuthkit.autopsy.datasourcesummary.datamodel.TimelineSummary; +import org.sleuthkit.autopsy.ingest.IngestManager; +import org.sleuthkit.autopsy.ingest.ModuleContentEvent; +import org.sleuthkit.datamodel.AbstractFile; +import org.sleuthkit.datamodel.DataSource; +import org.sleuthkit.datamodel.TskCoreException; +import org.sleuthkit.autopsy.datasourcesummary.datamodel.TimelineSummary.TimelineSummaryData; + +/** + * Provides data source summary information pertaining to Timeline data. + */ +public class TimelineSummaryGetter implements DefaultUpdateGovernor { + + private static final Set INGEST_JOB_EVENTS = new HashSet<>( + Arrays.asList(IngestManager.IngestJobEvent.COMPLETED, IngestManager.IngestJobEvent.CANCELLED)); + + private final TimelineSummary timelineSummary; + + /** + * Default constructor. + */ + public TimelineSummaryGetter() { + timelineSummary = new TimelineSummary(); + } + + @Override + public boolean isRefreshRequired(ModuleContentEvent evt) { + return true; + } + + @Override + public boolean isRefreshRequired(AbstractFile file) { + return true; + } + + @Override + public boolean isRefreshRequired(IngestManager.IngestJobEvent evt) { + return (evt != null && INGEST_JOB_EVENTS.contains(evt)); + } + + @Override + public Set getIngestJobEventUpdates() { + return Collections.unmodifiableSet(INGEST_JOB_EVENTS); + } + + /** + * Retrieves timeline summary data. + * + * @param dataSource The data source for which timeline data will be + * retrieved. + * @param recentDaysNum The maximum number of most recent days' activity to + * include. + * + * @return The retrieved data. + * + * @throws SleuthkitCaseProviderException + * @throws TskCoreException + */ + public TimelineSummaryData getData(DataSource dataSource, int recentDaysNum) throws SleuthkitCaseProviderException, TskCoreException { + return timelineSummary.getTimelineSummaryData(dataSource, recentDaysNum); + } +} diff --git a/Core/src/org/sleuthkit/autopsy/datasourcesummary/ui/TypesPanel.java b/Core/src/org/sleuthkit/autopsy/datasourcesummary/ui/TypesPanel.java index e11e638459..f1421665c5 100644 --- a/Core/src/org/sleuthkit/autopsy/datasourcesummary/ui/TypesPanel.java +++ b/Core/src/org/sleuthkit/autopsy/datasourcesummary/ui/TypesPanel.java @@ -1,7 +1,7 @@ /* * Autopsy Forensic Browser * - * Copyright 2019 Basis Technology Corp. + * Copyright 2019-2021 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -23,31 +23,23 @@ import java.sql.SQLException; import java.text.DecimalFormat; import java.util.ArrayList; import java.util.Arrays; -import java.util.Collections; import java.util.HashSet; import java.util.List; -import java.util.Set; import java.util.stream.Collectors; import java.util.stream.Stream; import org.openide.util.NbBundle.Messages; import org.sleuthkit.autopsy.coreutils.FileTypeUtils.FileTypeCategory; -import org.sleuthkit.autopsy.datasourcesummary.datamodel.TypesSummary; -import org.sleuthkit.autopsy.datasourcesummary.datamodel.ContainerSummary; -import org.sleuthkit.autopsy.datasourcesummary.datamodel.MimeTypeSummary; +import org.sleuthkit.autopsy.datasourcesummary.datamodel.DataSourceInfoUtilities; +import org.sleuthkit.autopsy.datasourcesummary.datamodel.TypesSummary.FileTypeCategoryData; import org.sleuthkit.autopsy.datasourcesummary.datamodel.SleuthkitCaseProvider.SleuthkitCaseProviderException; import org.sleuthkit.autopsy.datasourcesummary.uiutils.DataFetchResult; import org.sleuthkit.autopsy.datasourcesummary.uiutils.DataFetchResult.ResultType; import org.sleuthkit.autopsy.datasourcesummary.uiutils.DataFetchWorker; import org.sleuthkit.autopsy.datasourcesummary.uiutils.DataFetchWorker.DataFetchComponents; -import org.sleuthkit.autopsy.datasourcesummary.uiutils.DataFetcher; -import org.sleuthkit.autopsy.datasourcesummary.uiutils.DefaultCellModel; -import org.sleuthkit.autopsy.datasourcesummary.uiutils.ExcelExport; -import org.sleuthkit.autopsy.datasourcesummary.uiutils.ExcelSpecialFormatExport; -import org.sleuthkit.autopsy.datasourcesummary.uiutils.ExcelSpecialFormatExport.KeyValueItemExportable; +import org.sleuthkit.autopsy.datasourcesummary.datamodel.DataFetcher; import org.sleuthkit.autopsy.datasourcesummary.uiutils.IngestRunningLabel; import org.sleuthkit.autopsy.datasourcesummary.uiutils.LoadableComponent; import org.sleuthkit.autopsy.datasourcesummary.uiutils.LoadableLabel; -import org.sleuthkit.autopsy.datasourcesummary.uiutils.PieChartExport; import org.sleuthkit.autopsy.datasourcesummary.uiutils.PieChartPanel; import org.sleuthkit.autopsy.datasourcesummary.uiutils.PieChartItem; @@ -95,7 +87,7 @@ class TypesPanel extends BaseDataSourceSummaryPanel { * @param usefulContent True if this is useful content; false if there * is 0 mime type information. */ - public TypesPieChartData(List pieSlices, boolean usefulContent) { + TypesPieChartData(List pieSlices, boolean usefulContent) { this.pieSlices = pieSlices; this.usefulContent = usefulContent; } @@ -103,78 +95,20 @@ class TypesPanel extends BaseDataSourceSummaryPanel { /** * @return The pie chart data. */ - public List getPieSlices() { + List getPieSlices() { return pieSlices; } /** * @return Whether or not the data is usefulContent. */ - public boolean isUsefulContent() { + boolean isUsefulContent() { return usefulContent; } } - /** - * Information concerning a particular category in the file types pie chart. - */ - private static class TypesPieCategory { - - private final String label; - private final Set mimeTypes; - private final Color color; - - /** - * Main constructor. - * - * @param label The label for this slice. - * @param mimeTypes The mime types associated with this slice. - * @param color The color associated with this slice. - */ - TypesPieCategory(String label, Set mimeTypes, Color color) { - this.label = label; - this.mimeTypes = mimeTypes; - this.color = color; - } - - /** - * Constructor that accepts FileTypeCategory. - * - * @param label The label for this slice. - * @param mimeTypes The mime types associated with this slice. - * @param color The color associated with this slice. - */ - TypesPieCategory(String label, FileTypeCategory fileCategory, Color color) { - this(label, fileCategory.getMediaTypes(), color); - } - - /** - * @return The label for this category. - */ - String getLabel() { - return label; - } - - /** - * @return The mime types associated with this category. - */ - Set getMimeTypes() { - return mimeTypes; - } - - /** - * @return The color associated with this category. - */ - Color getColor() { - return color; - } - } - private static final long serialVersionUID = 1L; private static final DecimalFormat INTEGER_SIZE_FORMAT = new DecimalFormat("#"); - private static final String COMMA_FORMAT_STR = "#,###"; - - private static final DecimalFormat COMMA_FORMATTER = new DecimalFormat(COMMA_FORMAT_STR); private static final Color IMAGES_COLOR = new Color(156, 39, 176); private static final Color VIDEOS_COLOR = Color.YELLOW; @@ -186,13 +120,13 @@ class TypesPanel extends BaseDataSourceSummaryPanel { private static final Color NOT_ANALYZED_COLOR = Color.WHITE; // All file type categories. - private static final List FILE_MIME_TYPE_CATEGORIES = Arrays.asList( - new TypesPieCategory(Bundle.TypesPanel_fileMimeTypesChart_images_title(), FileTypeCategory.IMAGE.getMediaTypes(), IMAGES_COLOR), - new TypesPieCategory(Bundle.TypesPanel_fileMimeTypesChart_videos_title(), FileTypeCategory.VIDEO.getMediaTypes(), VIDEOS_COLOR), - new TypesPieCategory(Bundle.TypesPanel_fileMimeTypesChart_audio_title(), FileTypeCategory.AUDIO.getMediaTypes(), AUDIO_COLOR), - new TypesPieCategory(Bundle.TypesPanel_fileMimeTypesChart_documents_title(), FileTypeCategory.DOCUMENTS.getMediaTypes(), DOCUMENTS_COLOR), - new TypesPieCategory(Bundle.TypesPanel_fileMimeTypesChart_executables_title(), FileTypeCategory.EXECUTABLE.getMediaTypes(), EXECUTABLES_COLOR), - new TypesPieCategory(Bundle.TypesPanel_fileMimeTypesChart_unknown_title(), new HashSet<>(Arrays.asList("application/octet-stream")), UNKNOWN_COLOR) + private static final List FILE_MIME_TYPE_CATEGORIES = Arrays.asList( + new FileTypeCategoryData(Bundle.TypesPanel_fileMimeTypesChart_images_title(), FileTypeCategory.IMAGE.getMediaTypes(), IMAGES_COLOR), + new FileTypeCategoryData(Bundle.TypesPanel_fileMimeTypesChart_videos_title(), FileTypeCategory.VIDEO.getMediaTypes(), VIDEOS_COLOR), + new FileTypeCategoryData(Bundle.TypesPanel_fileMimeTypesChart_audio_title(), FileTypeCategory.AUDIO.getMediaTypes(), AUDIO_COLOR), + new FileTypeCategoryData(Bundle.TypesPanel_fileMimeTypesChart_documents_title(), FileTypeCategory.DOCUMENTS.getMediaTypes(), DOCUMENTS_COLOR), + new FileTypeCategoryData(Bundle.TypesPanel_fileMimeTypesChart_executables_title(), FileTypeCategory.EXECUTABLE.getMediaTypes(), EXECUTABLES_COLOR), + new FileTypeCategoryData(Bundle.TypesPanel_fileMimeTypesChart_unknown_title(), new HashSet<>(Arrays.asList("application/octet-stream")), UNKNOWN_COLOR) ); private final DataFetcher usageFetcher; @@ -237,8 +171,8 @@ class TypesPanel extends BaseDataSourceSummaryPanel { /** * Creates a new TypesPanel. */ - public TypesPanel() { - this(new MimeTypeSummary(), new TypesSummary(), new ContainerSummary()); + TypesPanel() { + this(new MimeTypeSummaryGetter(), new TypesSummaryGetter(), new ContainerSummaryGetter()); } @Override @@ -254,10 +188,10 @@ class TypesPanel extends BaseDataSourceSummaryPanel { * @param typeData The service for file types data. * @param containerData The service for container information. */ - public TypesPanel( - MimeTypeSummary mimeTypeData, - TypesSummary typeData, - ContainerSummary containerData) { + TypesPanel( + MimeTypeSummaryGetter mimeTypeData, + TypesSummaryGetter typeData, + ContainerSummaryGetter containerData) { super(mimeTypeData, typeData, containerData); @@ -282,13 +216,13 @@ class TypesPanel extends BaseDataSourceSummaryPanel { size -> SizeRepresentationUtil.getSizeString(size, INTEGER_SIZE_FORMAT, false)))), new DataFetchWorker.DataFetchComponents<>(typesFetcher, this::showMimeTypeCategories), new DataFetchWorker.DataFetchComponents<>(allocatedFetcher, - countRes -> allocatedLabel.showDataFetchResult(DataFetchResult.getSubResult(countRes, (count) -> getStringOrZero(count)))), + countRes -> allocatedLabel.showDataFetchResult(DataFetchResult.getSubResult(countRes, (count) -> DataSourceInfoUtilities.getStringOrZero(count)))), new DataFetchWorker.DataFetchComponents<>(unallocatedFetcher, - countRes -> unallocatedLabel.showDataFetchResult(DataFetchResult.getSubResult(countRes, (count) -> getStringOrZero(count)))), + countRes -> unallocatedLabel.showDataFetchResult(DataFetchResult.getSubResult(countRes, (count) -> DataSourceInfoUtilities.getStringOrZero(count)))), new DataFetchWorker.DataFetchComponents<>(slackFetcher, - countRes -> slackLabel.showDataFetchResult(DataFetchResult.getSubResult(countRes, (count) -> getStringOrZero(count)))), + countRes -> slackLabel.showDataFetchResult(DataFetchResult.getSubResult(countRes, (count) -> DataSourceInfoUtilities.getStringOrZero(count)))), new DataFetchWorker.DataFetchComponents<>(directoriesFetcher, - countRes -> directoriesLabel.showDataFetchResult(DataFetchResult.getSubResult(countRes, (count) -> getStringOrZero(count)))) + countRes -> directoriesLabel.showDataFetchResult(DataFetchResult.getSubResult(countRes, (count) -> DataSourceInfoUtilities.getStringOrZero(count)))) ); initComponents(); @@ -312,7 +246,7 @@ class TypesPanel extends BaseDataSourceSummaryPanel { * * @return The pie chart items. */ - private TypesPieChartData getMimeTypeCategoriesModel(MimeTypeSummary mimeTypeData, DataSource dataSource) + private TypesPieChartData getMimeTypeCategoriesModel(MimeTypeSummaryGetter mimeTypeData, DataSource dataSource) throws SQLException, SleuthkitCaseProviderException, TskCoreException { if (dataSource == null) { @@ -323,8 +257,8 @@ class TypesPanel extends BaseDataSourceSummaryPanel { List fileCategoryItems = new ArrayList<>(); long categoryTotalCount = 0; - for (TypesPieCategory cat : FILE_MIME_TYPE_CATEGORIES) { - long thisValue = getLongOrZero(mimeTypeData.getCountOfFilesForMimeTypes(dataSource, cat.getMimeTypes())); + for (FileTypeCategoryData cat : FILE_MIME_TYPE_CATEGORIES) { + long thisValue = DataSourceInfoUtilities.getLongOrZero(mimeTypeData.getCountOfFilesForMimeTypes(dataSource, cat.getMimeTypes())); categoryTotalCount += thisValue; fileCategoryItems.add(new PieChartItem( @@ -334,10 +268,10 @@ class TypesPanel extends BaseDataSourceSummaryPanel { } // get a count of all files with no mime type - long noMimeTypeCount = getLongOrZero(mimeTypeData.getCountOfFilesWithNoMimeType(dataSource)); + long noMimeTypeCount = DataSourceInfoUtilities.getLongOrZero(mimeTypeData.getCountOfFilesWithNoMimeType(dataSource)); // get a count of all regular files - long allRegularFiles = getLongOrZero(mimeTypeData.getCountOfAllRegularFiles(dataSource)); + long allRegularFiles = DataSourceInfoUtilities.getLongOrZero(mimeTypeData.getCountOfAllRegularFiles(dataSource)); // create entry for mime types in other category long otherCount = allRegularFiles - (categoryTotalCount + noMimeTypeCount); @@ -390,89 +324,6 @@ class TypesPanel extends BaseDataSourceSummaryPanel { } } - /** - * Returns the long value or zero if longVal is null. - * - * @param longVal The long value. - * - * @return The long value or 0 if provided value is null. - */ - private static long getLongOrZero(Long longVal) { - return longVal == null ? 0 : longVal; - } - - /** - * Returns string value of long with comma separators. If null returns a - * string of '0'. - * - * @param longVal The long value. - * - * @return The string value of the long. - */ - private static String getStringOrZero(Long longVal) { - return longVal == null ? "0" : COMMA_FORMATTER.format(longVal); - } - - /** - * Returns a key value pair to be exported in a sheet. - * - * @param fetcher The means of fetching the data. - * @param key The key to use. - * @param dataSource The data source containing the data. - * @return The key value pair to be exported. - */ - private static KeyValueItemExportable getStrExportable(DataFetcher fetcher, String key, DataSource dataSource) { - String result = getFetchResult(fetcher, "Types", dataSource); - return (result == null) ? null : new KeyValueItemExportable(key, new DefaultCellModel<>(result)); - } - - /** - * Returns a key value pair to be exported in a sheet formatting the long - * with commas separated by orders of 1000. - * - * @param fetcher The means of fetching the data. - * @param key The string key for this key value pair. - * @param dataSource The data source. - * @return The key value pair. - */ - private static KeyValueItemExportable getCountExportable(DataFetcher fetcher, String key, DataSource dataSource) { - Long count = getFetchResult(fetcher, "Types", dataSource); - return (count == null) ? null : new KeyValueItemExportable(key, - new DefaultCellModel(count, COMMA_FORMATTER::format, COMMA_FORMAT_STR)); - } - - @Override - List getExports(DataSource dataSource) { - if (dataSource == null) { - return Collections.emptyList(); - } - - // Retrieve data to create the types pie chart - TypesPieChartData typesData = TypesPanel.getFetchResult(typesFetcher, "Types", dataSource); - PieChartExport typesChart = (typesData == null || !typesData.isUsefulContent()) ? null : - new PieChartExport( - Bundle.TypesPanel_fileMimeTypesChart_title(), - Bundle.TypesPanel_fileMimeTypesChart_valueLabel(), - "#,###", - Bundle.TypesPanel_fileMimeTypesChart_title(), - typesData.getPieSlices()); - - return Arrays.asList(new ExcelSpecialFormatExport(Bundle.TypesPanel_excelTabName(), - Stream.of( - getStrExportable(usageFetcher, Bundle.TypesPanel_usageLabel_title(), dataSource), - getStrExportable(osFetcher, Bundle.TypesPanel_osLabel_title(), dataSource), - new KeyValueItemExportable(Bundle.TypesPanel_sizeLabel_title(), - SizeRepresentationUtil.getBytesCell(getFetchResult(sizeFetcher, "Types", dataSource))), - typesChart, - getCountExportable(allocatedFetcher, Bundle.TypesPanel_filesByCategoryTable_allocatedRow_title(), dataSource), - getCountExportable(unallocatedFetcher, Bundle.TypesPanel_filesByCategoryTable_unallocatedRow_title(), dataSource), - getCountExportable(slackFetcher, Bundle.TypesPanel_filesByCategoryTable_slackRow_title(), dataSource), - getCountExportable(directoriesFetcher, Bundle.TypesPanel_filesByCategoryTable_directoryRow_title(), dataSource)) - .filter(sheet -> sheet != null) - .collect(Collectors.toList()) - )); - } - /** * This method is called from within the constructor to initialize the form. * WARNING: Do NOT modify this code. The content of this method is always diff --git a/Core/src/org/sleuthkit/autopsy/datasourcesummary/ui/TypesSummaryGetter.java b/Core/src/org/sleuthkit/autopsy/datasourcesummary/ui/TypesSummaryGetter.java new file mode 100644 index 0000000000..fee3f178cc --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/datasourcesummary/ui/TypesSummaryGetter.java @@ -0,0 +1,154 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2019 - 2020 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.datasourcesummary.ui; + +import org.sleuthkit.autopsy.datasourcesummary.uiutils.DefaultUpdateGovernor; +import java.sql.SQLException; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashSet; +import java.util.Set; +import org.sleuthkit.autopsy.datasourcesummary.datamodel.SleuthkitCaseProvider; +import org.sleuthkit.autopsy.datasourcesummary.datamodel.SleuthkitCaseProvider.SleuthkitCaseProviderException; +import org.sleuthkit.autopsy.datasourcesummary.datamodel.TypesSummary; +import org.sleuthkit.autopsy.ingest.IngestManager; +import org.sleuthkit.autopsy.ingest.ModuleContentEvent; +import org.sleuthkit.datamodel.AbstractFile; +import org.sleuthkit.datamodel.DataSource; +import org.sleuthkit.datamodel.TskCoreException; + +/** + * Wrapper class for converting org.sleuthkit.autopsy.contentutils.TypesSummary + * functionality into a DefaultArtifactUpdateGovernor used by + * DataSourceSummaryCountsPanel. + */ +public class TypesSummaryGetter implements DefaultUpdateGovernor { + + private static final Set INGEST_JOB_EVENTS = new HashSet<>( + Arrays.asList(IngestManager.IngestJobEvent.COMPLETED, IngestManager.IngestJobEvent.CANCELLED)); + + private final TypesSummary typesSummary; + + /** + * Main constructor. + */ + public TypesSummaryGetter() { + typesSummary = new TypesSummary(); + } + + @Override + public boolean isRefreshRequired(ModuleContentEvent evt) { + return true; + } + + @Override + public boolean isRefreshRequired(AbstractFile file) { + return true; + } + + @Override + public boolean isRefreshRequired(IngestManager.IngestJobEvent evt) { + return (evt != null && INGEST_JOB_EVENTS.contains(evt)); + } + + @Override + public Set getIngestJobEventUpdates() { + return Collections.unmodifiableSet(INGEST_JOB_EVENTS); + } + + /** + * Get count of regular files (not directories) in a data source. + * + * @param currentDataSource The data source. + * + * @return The count. + * + * @throws SleuthkitCaseProviderException + * @throws TskCoreException + * @throws SQLException + */ + public Long getCountOfFiles(DataSource currentDataSource) + throws SleuthkitCaseProvider.SleuthkitCaseProviderException, TskCoreException, SQLException { + return typesSummary.getCountOfFiles(currentDataSource); + } + + /** + * Get count of allocated files in a data source. + * + * @param currentDataSource The data source. + * + * @return The count. + * + * @throws SleuthkitCaseProviderException + * @throws TskCoreException + * @throws SQLException + */ + public Long getCountOfAllocatedFiles(DataSource currentDataSource) + throws SleuthkitCaseProvider.SleuthkitCaseProviderException, TskCoreException, SQLException { + return typesSummary.getCountOfAllocatedFiles(currentDataSource); + } + + /** + * Get count of unallocated files in a data source. + * + * @param currentDataSource The data source. + * + * @return The count. + * + * @throws SleuthkitCaseProviderException + * @throws TskCoreException + * @throws SQLException + */ + public Long getCountOfUnallocatedFiles(DataSource currentDataSource) + throws SleuthkitCaseProvider.SleuthkitCaseProviderException, TskCoreException, SQLException { + return typesSummary.getCountOfUnallocatedFiles(currentDataSource); + } + + /** + * Get count of directories in a data source. + * + * @param currentDataSource The data source. + * + * @return The count. + * + * @throws SleuthkitCaseProviderException + * @throws TskCoreException + * @throws SQLException + */ + public Long getCountOfDirectories(DataSource currentDataSource) + throws SleuthkitCaseProvider.SleuthkitCaseProviderException, TskCoreException, SQLException { + return typesSummary.getCountOfDirectories(currentDataSource); + } + + /** + * Get count of slack files in a data source. + * + * @param currentDataSource The data source. + * + * @return The count. + * + * @throws SleuthkitCaseProviderException + * @throws TskCoreException + * @throws SQLException + */ + public Long getCountOfSlackFiles(DataSource currentDataSource) + throws SleuthkitCaseProvider.SleuthkitCaseProviderException, TskCoreException, SQLException { + return typesSummary.getCountOfSlackFiles(currentDataSource); + } +} diff --git a/Core/src/org/sleuthkit/autopsy/datasourcesummary/ui/UserActivityPanel.java b/Core/src/org/sleuthkit/autopsy/datasourcesummary/ui/UserActivityPanel.java index 2cde769403..46f5deca36 100644 --- a/Core/src/org/sleuthkit/autopsy/datasourcesummary/ui/UserActivityPanel.java +++ b/Core/src/org/sleuthkit/autopsy/datasourcesummary/ui/UserActivityPanel.java @@ -1,7 +1,7 @@ /* * Autopsy Forensic Browser * - * Copyright 2020 Basis Technology Corp. + * Copyright 2020-2021 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -26,23 +26,19 @@ import java.util.Date; import java.util.List; import java.util.Locale; import java.util.function.Function; -import java.util.stream.Collectors; -import java.util.stream.Stream; import org.apache.commons.lang.StringUtils; import org.openide.util.NbBundle.Messages; import org.sleuthkit.autopsy.datasourcesummary.datamodel.UserActivitySummary; import org.sleuthkit.autopsy.datasourcesummary.datamodel.UserActivitySummary.LastAccessedArtifact; import org.sleuthkit.autopsy.datasourcesummary.datamodel.UserActivitySummary.TopAccountResult; import org.sleuthkit.autopsy.datasourcesummary.datamodel.UserActivitySummary.TopDeviceAttachedResult; -import org.sleuthkit.autopsy.datasourcesummary.datamodel.UserActivitySummary.TopWebSearchResult; import org.sleuthkit.autopsy.datasourcesummary.datamodel.UserActivitySummary.TopDomainsResult; import org.sleuthkit.autopsy.datasourcesummary.datamodel.UserActivitySummary.TopProgramsResult; -import static org.sleuthkit.autopsy.datasourcesummary.ui.BaseDataSourceSummaryPanel.getTableExport; +import org.sleuthkit.autopsy.datasourcesummary.datamodel.UserActivitySummary.TopWebSearchResult; import org.sleuthkit.autopsy.datasourcesummary.uiutils.ColumnModel; import org.sleuthkit.autopsy.datasourcesummary.uiutils.DataFetchWorker.DataFetchComponents; -import org.sleuthkit.autopsy.datasourcesummary.uiutils.DataFetcher; +import org.sleuthkit.autopsy.datasourcesummary.datamodel.DataFetcher; import org.sleuthkit.autopsy.datasourcesummary.uiutils.DefaultCellModel; -import org.sleuthkit.autopsy.datasourcesummary.uiutils.ExcelExport; import org.sleuthkit.autopsy.datasourcesummary.uiutils.GuiCellModel.MenuItem; import org.sleuthkit.autopsy.datasourcesummary.uiutils.IngestRunningLabel; import org.sleuthkit.autopsy.datasourcesummary.uiutils.JTablePanel; @@ -265,22 +261,22 @@ public class UserActivityPanel extends BaseDataSourceSummaryPanel { private final IngestRunningLabel ingestRunningLabel = new IngestRunningLabel(); private final List> dataFetchComponents; - private final UserActivitySummary userActivityData; + private final UserActivitySummaryGetter userActivityData; /** * Creates a new UserActivityPanel. */ public UserActivityPanel() { - this(new UserActivitySummary()); + this(new UserActivitySummaryGetter()); } /** * Creates a new UserActivityPanel. * * @param userActivityData Class from which to obtain remaining user - * activity data. + * activity data. */ - public UserActivityPanel(UserActivitySummary userActivityData) { + public UserActivityPanel(UserActivitySummaryGetter userActivityData) { super(userActivityData); this.userActivityData = userActivityData; @@ -320,7 +316,7 @@ public class UserActivityPanel extends BaseDataSourceSummaryPanel { private Function> getDateFunct() { return (T lastAccessed) -> { Function dateParser = (dt) -> dt == null ? "" : DATETIME_FORMAT.format(dt); - return new DefaultCellModel<>(lastAccessed.getLastAccessed(), dateParser, DATETIME_FORMAT_STR) + return new DefaultCellModel<>(lastAccessed.getLastAccessed(), dateParser) .setPopupMenu(getPopup(lastAccessed)); }; } @@ -332,7 +328,8 @@ public class UserActivityPanel extends BaseDataSourceSummaryPanel { * @param record The LastAccessedArtifact instance. * * @return The menu items list containing one action or navigating to the - * appropriate artifact and closing the data source summary dialog if open. + * appropriate artifact and closing the data source summary dialog + * if open. */ private List getPopup(LastAccessedArtifact record) { return record == null ? null : Arrays.asList(getArtifactNavigateItem(record.getArtifact())); @@ -341,13 +338,13 @@ public class UserActivityPanel extends BaseDataSourceSummaryPanel { /** * Queries DataSourceTopProgramsSummary instance for short folder name. * - * @param path The path for the application. + * @param path The path for the application. * @param appName The application name. * * @return The underlying short folder name if one exists. */ - private String getShortFolderName(String path, String appName) { - return this.userActivityData.getShortFolderName(path, appName); + private static String getShortFolderName(String path, String appName) { + return UserActivitySummary.getShortFolderName(path, appName); } @Override @@ -366,18 +363,6 @@ public class UserActivityPanel extends BaseDataSourceSummaryPanel { super.close(); } - @Override - List getExports(DataSource dataSource) { - return Stream.of( - getTableExport(topProgramsFetcher, topProgramsTemplate, Bundle.UserActivityPanel_TopProgramsTableModel_tabName(), dataSource), - getTableExport(topDomainsFetcher, topDomainsTemplate, Bundle.UserActivityPanel_TopDomainsTableModel_tabName(), dataSource), - getTableExport(topWebSearchesFetcher, topWebSearchesTemplate, Bundle.UserActivityPanel_TopWebSearchTableModel_tabName(), dataSource), - getTableExport(topDevicesAttachedFetcher, topDevicesTemplate, Bundle.UserActivityPanel_TopDeviceAttachedTableModel_tabName(), dataSource), - getTableExport(topAccountsFetcher, topAccountsTemplate, Bundle.UserActivityPanel_TopAccountTableModel_tabName(), dataSource)) - .filter(sheet -> sheet != null) - .collect(Collectors.toList()); - } - /** * This method is called from within the constructor to initialize the form. * WARNING: Do NOT modify this code. The content of this method is always diff --git a/Core/src/org/sleuthkit/autopsy/datasourcesummary/ui/UserActivitySummaryGetter.java b/Core/src/org/sleuthkit/autopsy/datasourcesummary/ui/UserActivitySummaryGetter.java new file mode 100644 index 0000000000..e9f9dd30b0 --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/datasourcesummary/ui/UserActivitySummaryGetter.java @@ -0,0 +1,162 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2020-2021 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.datasourcesummary.ui; + +import org.sleuthkit.autopsy.datasourcesummary.uiutils.DefaultArtifactUpdateGovernor; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashSet; +import java.util.List; +import java.util.Set; +import org.openide.util.NbBundle.Messages; +import org.sleuthkit.autopsy.datasourcesummary.datamodel.SleuthkitCaseProvider.SleuthkitCaseProviderException; +import org.sleuthkit.datamodel.DataSource; +import org.sleuthkit.datamodel.TskCoreException; +import org.sleuthkit.autopsy.datasourcesummary.datamodel.UserActivitySummary; +import org.sleuthkit.autopsy.datasourcesummary.datamodel.UserActivitySummary.TopAccountResult; +import org.sleuthkit.autopsy.datasourcesummary.datamodel.UserActivitySummary.TopDeviceAttachedResult; +import org.sleuthkit.autopsy.datasourcesummary.datamodel.UserActivitySummary.TopDomainsResult; +import org.sleuthkit.autopsy.datasourcesummary.datamodel.UserActivitySummary.TopProgramsResult; +import org.sleuthkit.autopsy.datasourcesummary.datamodel.UserActivitySummary.TopWebSearchResult; +import static org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE; + +/** + * Wrapper class for converting + * org.sleuthkit.autopsy.contentutils.UserActivitySummary functionality into a + * DefaultArtifactUpdateGovernor used by UserActivityPanel tab. + */ +public class UserActivitySummaryGetter implements DefaultArtifactUpdateGovernor { + + private static final Set ARTIFACT_UPDATE_TYPE_IDS = new HashSet<>(Arrays.asList( + ARTIFACT_TYPE.TSK_WEB_SEARCH_QUERY.getTypeID(), + ARTIFACT_TYPE.TSK_MESSAGE.getTypeID(), + ARTIFACT_TYPE.TSK_EMAIL_MSG.getTypeID(), + ARTIFACT_TYPE.TSK_CALLLOG.getTypeID(), + ARTIFACT_TYPE.TSK_DEVICE_ATTACHED.getTypeID(), + ARTIFACT_TYPE.TSK_WEB_HISTORY.getTypeID(), + ARTIFACT_TYPE.TSK_PROG_RUN.getTypeID() + )); + + private final UserActivitySummary userActivity; + + public UserActivitySummaryGetter() { + userActivity = new UserActivitySummary(); + } + + @Override + public Set getArtifactTypeIdsForRefresh() { + return Collections.unmodifiableSet(ARTIFACT_UPDATE_TYPE_IDS); + } + + /** + * Gets a list of recent domains based on the datasource. + * + * @param dataSource The datasource to query for recent domains. + * @param count The max count of items to return. + * + * @return The list of items retrieved from the database. + * + * @throws InterruptedException + */ + public List getRecentDomains(DataSource dataSource, int count) throws TskCoreException, SleuthkitCaseProviderException { + return userActivity.getRecentDomains(dataSource, count); + } + + /** + * Retrieves most recent web searches by most recent date grouped by search + * term. + * + * @param dataSource The data source. + * @param count The maximum number of records to be shown (must be > + * 0). + * + * @return The list of most recent web searches where most recent search + * appears first. + * + * @throws + * org.sleuthkit.autopsy.datasourcesummary.datamodel.SleuthkitCaseProvider.SleuthkitCaseProviderException + * @throws TskCoreException + */ + public List getMostRecentWebSearches(DataSource dataSource, int count) throws SleuthkitCaseProviderException, TskCoreException { + return userActivity.getMostRecentWebSearches(dataSource, count); + } + + /** + * Retrieves most recent devices used by most recent date attached. + * + * @param dataSource The data source. + * @param count The maximum number of records to be shown (must be > + * 0). + * + * @return The list of most recent devices attached where most recent device + * attached appears first. + * + * @throws + * org.sleuthkit.autopsy.datasourcesummary.datamodel.SleuthkitCaseProvider.SleuthkitCaseProviderException + * @throws TskCoreException + */ + public List getRecentDevices(DataSource dataSource, int count) throws SleuthkitCaseProviderException, TskCoreException { + return userActivity.getRecentDevices(dataSource, count); + } + + /** + * Retrieves most recent account used by most recent date for a message + * sent. + * + * @param dataSource The data source. + * @param count The maximum number of records to be shown (must be > + * 0). + * + * @return The list of most recent accounts used where the most recent + * account by last message sent occurs first. + * + * @throws + * org.sleuthkit.autopsy.datasourcesummary.datamodel.SleuthkitCaseProvider.SleuthkitCaseProviderException + * @throws TskCoreException + */ + @Messages({ + "DataSourceUserActivitySummary_getRecentAccounts_emailMessage=Email Message", + "DataSourceUserActivitySummary_getRecentAccounts_calllogMessage=Call Log",}) + public List getRecentAccounts(DataSource dataSource, int count) throws SleuthkitCaseProviderException, TskCoreException { + return userActivity.getRecentAccounts(dataSource, count); + } + + /** + * Retrieves the top programs results for the given data source limited to + * the count provided as a parameter. The highest run times are at the top + * of the list. If that information isn't available the last run date is + * used. If both, the last run date and the number of run times are + * unavailable, the programs will be sorted alphabetically, the count will + * be ignored and all items will be returned. + * + * @param dataSource The datasource. If the datasource is null, an empty + * list will be returned. + * @param count The number of results to return. This value must be > 0 + * or an IllegalArgumentException will be thrown. + * + * @return The sorted list and limited to the count if last run or run count + * information is available on any item. + * + * @throws SleuthkitCaseProviderException + * @throws TskCoreException + */ + public List getTopPrograms(DataSource dataSource, int count) throws SleuthkitCaseProviderException, TskCoreException { + return userActivity.getTopPrograms(dataSource, count); + } +} diff --git a/Core/src/org/sleuthkit/autopsy/datasourcesummary/uiutils/BarChartPanel.java b/Core/src/org/sleuthkit/autopsy/datasourcesummary/uiutils/BarChartPanel.java index 21f3bca572..eb363a8a47 100644 --- a/Core/src/org/sleuthkit/autopsy/datasourcesummary/uiutils/BarChartPanel.java +++ b/Core/src/org/sleuthkit/autopsy/datasourcesummary/uiutils/BarChartPanel.java @@ -39,84 +39,6 @@ import org.sleuthkit.autopsy.datasourcesummary.uiutils.BarChartSeries.BarChartIt */ public class BarChartPanel extends AbstractLoadableComponent> { - /** - * JFreeChart bar charts don't preserve the order of bars provided to the - * chart, but instead uses the comparable nature to order items. This - * provides order using a provided index as well as the value for the axis. - */ - public static class OrderedKey implements Comparable { - - private final Object keyValue; - private final int keyIndex; - - /** - * Main constructor. - * - * @param keyValue The value for the key to be displayed in the domain - * axis. - * @param keyIndex The index at which it will be displayed. - */ - public OrderedKey(Object keyValue, int keyIndex) { - this.keyValue = keyValue; - this.keyIndex = keyIndex; - } - - /** - * @return The value for the key to be displayed in the domain axis. - */ - Object getKeyValue() { - return keyValue; - } - - /** - * @return The index at which it will be displayed. - */ - int getKeyIndex() { - return keyIndex; - } - - @Override - public int compareTo(OrderedKey o) { - // this will have a higher value than null. - if (o == null) { - return 1; - } - - // compare by index - return Integer.compare(this.getKeyIndex(), o.getKeyIndex()); - } - - @Override - public int hashCode() { - int hash = 3; - return hash; - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - if (obj == null) { - return false; - } - if (getClass() != obj.getClass()) { - return false; - } - final OrderedKey other = (OrderedKey) obj; - if (this.keyIndex != other.keyIndex) { - return false; - } - return true; - } - - @Override - public String toString() { - // use toString on the key. - return this.getKeyValue() == null ? null : this.getKeyValue().toString(); - } - } - private static final long serialVersionUID = 1L; private static final Font DEFAULT_FONT = new JLabel().getFont(); diff --git a/Core/src/org/sleuthkit/autopsy/datasourcesummary/uiutils/BarChartSeries.java b/Core/src/org/sleuthkit/autopsy/datasourcesummary/uiutils/BarChartSeries.java index c1626f34b2..8709fc8ae1 100644 --- a/Core/src/org/sleuthkit/autopsy/datasourcesummary/uiutils/BarChartSeries.java +++ b/Core/src/org/sleuthkit/autopsy/datasourcesummary/uiutils/BarChartSeries.java @@ -98,4 +98,82 @@ public class BarChartSeries { return key; } + /** + * JFreeChart bar charts don't preserve the order of bars provided to the + * chart, but instead uses the comparable nature to order items. This + * provides order using a provided index as well as the value for the axis. + */ + public static class OrderedKey implements Comparable { + + private final Object keyValue; + private final int keyIndex; + + /** + * Main constructor. + * + * @param keyValue The value for the key to be displayed in the domain + * axis. + * @param keyIndex The index at which it will be displayed. + */ + public OrderedKey(Object keyValue, int keyIndex) { + this.keyValue = keyValue; + this.keyIndex = keyIndex; + } + + /** + * @return The value for the key to be displayed in the domain axis. + */ + Object getKeyValue() { + return keyValue; + } + + /** + * @return The index at which it will be displayed. + */ + int getKeyIndex() { + return keyIndex; + } + + @Override + public int compareTo(OrderedKey o) { + // this will have a higher value than null. + if (o == null) { + return 1; + } + + // compare by index + return Integer.compare(this.getKeyIndex(), o.getKeyIndex()); + } + + @Override + public int hashCode() { + int hash = 3; + return hash; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + final OrderedKey other = (OrderedKey) obj; + if (this.keyIndex != other.keyIndex) { + return false; + } + return true; + } + + @Override + public String toString() { + // use toString on the key. + return this.getKeyValue() == null ? null : this.getKeyValue().toString(); + } + } + } diff --git a/Core/src/org/sleuthkit/autopsy/datasourcesummary/uiutils/Bundle.properties-MERGED b/Core/src/org/sleuthkit/autopsy/datasourcesummary/uiutils/Bundle.properties-MERGED index ad10f70c22..c06bc6850a 100644 --- a/Core/src/org/sleuthkit/autopsy/datasourcesummary/uiutils/Bundle.properties-MERGED +++ b/Core/src/org/sleuthkit/autopsy/datasourcesummary/uiutils/Bundle.properties-MERGED @@ -1,7 +1,5 @@ AbstractLoadableComponent_errorMessage_defaultText=There was an error loading results. AbstractLoadableComponent_loadingMessage_defaultText=Loading results... AbstractLoadableComponent_noDataExists_defaultText=No data exists. -# {0} - sheetNumber -ExcelExport_writeExcel_noSheetName=Sheet {0} IngestRunningLabel_defaultMessage=Ingest is currently running. PieChartPanel_noDataLabel=No Data diff --git a/Core/src/org/sleuthkit/autopsy/datasourcesummary/uiutils/DataFetchWorker.java b/Core/src/org/sleuthkit/autopsy/datasourcesummary/uiutils/DataFetchWorker.java index 46e68a1f55..8d007c2a01 100644 --- a/Core/src/org/sleuthkit/autopsy/datasourcesummary/uiutils/DataFetchWorker.java +++ b/Core/src/org/sleuthkit/autopsy/datasourcesummary/uiutils/DataFetchWorker.java @@ -18,6 +18,7 @@ */ package org.sleuthkit.autopsy.datasourcesummary.uiutils; +import org.sleuthkit.autopsy.datasourcesummary.datamodel.DataFetcher; import java.util.concurrent.ExecutionException; import java.util.function.Consumer; import javax.swing.SwingWorker; diff --git a/Core/src/org/sleuthkit/autopsy/datasourcesummary/uiutils/DefaultCellModel.java b/Core/src/org/sleuthkit/autopsy/datasourcesummary/uiutils/DefaultCellModel.java index 215f71469e..81c176954a 100644 --- a/Core/src/org/sleuthkit/autopsy/datasourcesummary/uiutils/DefaultCellModel.java +++ b/Core/src/org/sleuthkit/autopsy/datasourcesummary/uiutils/DefaultCellModel.java @@ -27,7 +27,7 @@ import java.util.function.Supplier; /** * The default cell model. */ -public class DefaultCellModel implements GuiCellModel, ExcelCellModel { +public class DefaultCellModel implements GuiCellModel { private final T data; private final String text; @@ -35,7 +35,6 @@ public class DefaultCellModel implements GuiCellModel, ExcelCellModel { private CellModel.HorizontalAlign horizontalAlignment; private List popupMenu; private Supplier> menuItemSupplier; - private final String excelFormatString; /** * Main constructor. @@ -43,18 +42,7 @@ public class DefaultCellModel implements GuiCellModel, ExcelCellModel { * @param data The data to be displayed in the cell. */ public DefaultCellModel(T data) { - this(data, null, null); - } - - /** - * Constructor. - * - * @param data The data to be displayed in the cell. - * @param stringConverter The means of converting that data to a string or - * null to use .toString method on object. - */ - public DefaultCellModel(T data, Function stringConverter) { - this(data, stringConverter, null); + this(data, null); } /** @@ -63,15 +51,9 @@ public class DefaultCellModel implements GuiCellModel, ExcelCellModel { * @param data The data to be displayed in the cell. * @param stringConverter The means of converting that data to a string or * null to use .toString method on object. - * @param excelFormatString The apache poi excel format string to use with - * the data. - * - * NOTE: Only certain data types can be exported. See - * ExcelTableExport.createCell() for types. */ - public DefaultCellModel(T data, Function stringConverter, String excelFormatString) { + public DefaultCellModel(T data, Function stringConverter) { this.data = data; - this.excelFormatString = excelFormatString; if (stringConverter == null) { text = this.data == null ? "" : this.data.toString(); @@ -86,11 +68,6 @@ public class DefaultCellModel implements GuiCellModel, ExcelCellModel { return this.data; } - @Override - public String getExcelFormatString() { - return this.excelFormatString; - } - @Override public String getText() { return text; diff --git a/Core/src/org/sleuthkit/autopsy/directorytree/FileSearchAction.java b/Core/src/org/sleuthkit/autopsy/directorytree/FileSearchAction.java index 9ec86b7900..a3f64f1e13 100644 --- a/Core/src/org/sleuthkit/autopsy/directorytree/FileSearchAction.java +++ b/Core/src/org/sleuthkit/autopsy/directorytree/FileSearchAction.java @@ -1,15 +1,15 @@ /* * Autopsy Forensic Browser - * - * Copyright 2011 Basis Technology Corp. + * + * Copyright 2011-2021 Basis Technology Corp. * Contact: carrier sleuthkit org - * + * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -28,14 +28,26 @@ import org.openide.util.Lookup; */ public class FileSearchAction extends AbstractAction { + private final Long dataSourceId; + + public FileSearchAction(String title, long dataSourceID) { + super(title); + dataSourceId = dataSourceID; + } + public FileSearchAction(String title) { super(title); + dataSourceId = null; } @Override public void actionPerformed(ActionEvent e) { FileSearchProvider searcher = Lookup.getDefault().lookup(FileSearchProvider.class); - searcher.showDialog(); + if (dataSourceId == null) { + searcher.showDialog(); + } else { + searcher.showDialog(dataSourceId); + } } } diff --git a/Core/src/org/sleuthkit/autopsy/directorytree/FileSearchProvider.java b/Core/src/org/sleuthkit/autopsy/directorytree/FileSearchProvider.java index 991126e2c5..523a4a69bd 100644 --- a/Core/src/org/sleuthkit/autopsy/directorytree/FileSearchProvider.java +++ b/Core/src/org/sleuthkit/autopsy/directorytree/FileSearchProvider.java @@ -1,15 +1,15 @@ /* * Autopsy Forensic Browser - * - * Copyright 2011 Basis Technology Corp. + * + * Copyright 2011-2021 Basis Technology Corp. * Contact: carrier sleuthkit org - * + * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -23,5 +23,8 @@ package org.sleuthkit.autopsy.directorytree; */ public interface FileSearchProvider { + public void showDialog(Long dataSourceID); + + @Deprecated public void showDialog(); } diff --git a/Core/src/org/sleuthkit/autopsy/discovery/search/AbstractFilter.java b/Core/src/org/sleuthkit/autopsy/discovery/search/AbstractFilter.java index bb2e258f2e..eed8a106f4 100644 --- a/Core/src/org/sleuthkit/autopsy/discovery/search/AbstractFilter.java +++ b/Core/src/org/sleuthkit/autopsy/discovery/search/AbstractFilter.java @@ -1,7 +1,7 @@ /* * Autopsy * - * Copyright 2020 Basis Technology Corp. + * Copyright 2020-2021 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -54,14 +54,18 @@ public abstract class AbstractFilter { * @param caseDb The case database * @param centralRepoDb The central repo database. Can be null if the * filter does not require it. + * @param context The SearchContext the search which is applying this + * filter is being performed from. * * @return The list of results that match this filter (and any that came * before it) * * @throws DiscoveryException + * @throws SearchCancellationException Thrown when the user has cancelled + * the search. */ public List applyAlternateFilter(List currentResults, SleuthkitCase caseDb, - CentralRepository centralRepoDb) throws DiscoveryException { + CentralRepository centralRepoDb, SearchContext context) throws DiscoveryException, SearchCancellationException { return new ArrayList<>(); } diff --git a/Core/src/org/sleuthkit/autopsy/discovery/search/DiscoveryAttributes.java b/Core/src/org/sleuthkit/autopsy/discovery/search/DiscoveryAttributes.java index 16e0e80f1b..c1436ff890 100644 --- a/Core/src/org/sleuthkit/autopsy/discovery/search/DiscoveryAttributes.java +++ b/Core/src/org/sleuthkit/autopsy/discovery/search/DiscoveryAttributes.java @@ -1,7 +1,7 @@ /* * Autopsy * - * Copyright 2020 Basis Technology Corp. + * Copyright 2020-2021 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -78,10 +78,14 @@ public class DiscoveryAttributes { * @param caseDb The case database. * @param centralRepoDb The central repository database. Can be null if * not needed. + * @param context The SearchContext the search which is applying + * this filter is being performed from. * * @throws DiscoveryException + * @throws SearchCancellationException - Thrown when the user has + * cancelled the search. */ - public void addAttributeToResults(List results, SleuthkitCase caseDb, CentralRepository centralRepoDb) throws DiscoveryException { + public void addAttributeToResults(List results, SleuthkitCase caseDb, CentralRepository centralRepoDb, SearchContext context) throws DiscoveryException, SearchCancellationException { // Default is to do nothing } } @@ -154,10 +158,13 @@ public class DiscoveryAttributes { @Override public void addAttributeToResults(List results, SleuthkitCase caseDb, - CentralRepository centralRepoDb) throws DiscoveryException { + CentralRepository centralRepoDb, SearchContext context) throws DiscoveryException, SearchCancellationException { try { - Map> domainsToCategories = getDomainsWithWebCategories(caseDb); + Map> domainsToCategories = getDomainsWithWebCategories(caseDb, context); for (Result result : results) { + if (context.searchIsCancelled()) { + throw new SearchCancellationException("The search was cancelled while Domain Category Attribute was being added."); + } if (result instanceof ResultDomain) { ResultDomain domain = (ResultDomain) result; domain.addWebCategories(domainsToCategories.get(domain.getDomain())); @@ -172,14 +179,29 @@ public class DiscoveryAttributes { * Loads all TSK_WEB_CATEGORY artifacts and maps the domain attribute to * the category name attribute. Each ResultDomain is then parsed and * matched against this map of values. + * + * @param caseDb The case database. + * @param context The SearchContext the search which is applying this + * filter is being performed from. + * + * @return domainToCategory - A map of the domain names to the category + * name attribute they are classified as. + * + * @throws TskCoreException + * @throws InterruptedException + * @throws SearchCancellationException - Thrown when the user has + * cancelled the search. */ - private Map> getDomainsWithWebCategories(SleuthkitCase caseDb) throws TskCoreException, InterruptedException { + private Map> getDomainsWithWebCategories(SleuthkitCase caseDb, SearchContext context) throws TskCoreException, InterruptedException, SearchCancellationException { Map> domainToCategory = new HashMap<>(); for (BlackboardArtifact artifact : caseDb.getBlackboardArtifacts(TSK_WEB_CATEGORIZATION)) { if (Thread.currentThread().isInterrupted()) { throw new InterruptedException(); } + if (context.searchIsCancelled()) { + throw new SearchCancellationException("Search was cancelled while getting domains for artifact type: " + artifact.getDisplayName()); + } BlackboardAttribute webCategory = artifact.getAttribute(new BlackboardAttribute.Type(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_NAME)); BlackboardAttribute domain = artifact.getAttribute(new BlackboardAttribute.Type(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DOMAIN)); if (webCategory != null && domain != null) { @@ -206,14 +228,16 @@ public class DiscoveryAttributes { @Override public void addAttributeToResults(List results, SleuthkitCase caseDb, - CentralRepository centralRepoDb) throws DiscoveryException { + CentralRepository centralRepoDb, SearchContext context) throws DiscoveryException, SearchCancellationException { // Get pairs of (object ID, keyword list name) for all files in the list of files that have // keyword list hits. String selectQuery = createSetNameClause(results, BlackboardArtifact.ARTIFACT_TYPE.TSK_KEYWORD_HIT.getTypeID(), BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME.getTypeID()); - SetKeywordListNamesCallback callback = new SetKeywordListNamesCallback(results); + if (context.searchIsCancelled()) { + throw new SearchCancellationException("The search was cancelled while Keyword List Attribute was being added."); + } try { caseDb.getCaseDbAccessManager().select(selectQuery, callback); } catch (TskCoreException ex) { @@ -278,8 +302,20 @@ public class DiscoveryAttributes { * Example: query for notable status of google.com. Result: notable With * this map, all domain instances that represent google.com can be updated * after one simple lookup. + * + * @param domainsBatch The list of ResultDomains to organize. + * @param attributeType The type of correlation attribute being organized. + * @param context The SearchContext the search which is applying this + * filter is being performed from. + * + * @return resultDomainTable - A map of the normalized domain name to the + * list of ResultDomain objects which are part of that normalized + * domain. + * + * @throws SearchCancellationException - Thrown when the user has cancelled + * the search. */ - private static Map> organizeByValue(List domainsBatch, CorrelationAttributeInstance.Type attributeType) { + private static Map> organizeByValue(List domainsBatch, CorrelationAttributeInstance.Type attributeType, SearchContext context) throws SearchCancellationException { final Map> resultDomainTable = new HashMap<>(); for (ResultDomain domainInstance : domainsBatch) { try { @@ -288,6 +324,9 @@ public class DiscoveryAttributes { final List bucket = resultDomainTable.getOrDefault(normalizedDomain, new ArrayList<>()); bucket.add(domainInstance); resultDomainTable.put(normalizedDomain, bucket); + if (context.searchIsCancelled()) { + throw new SearchCancellationException("Search was cancelled while orgainizing domains by their normalized value."); + } } catch (CorrelationAttributeNormalizationException ex) { logger.log(Level.INFO, String.format("Domain [%s] failed normalization, skipping...", domainInstance.getDomain())); } @@ -322,39 +361,73 @@ public class DiscoveryAttributes { @Override public void addAttributeToResults(List results, SleuthkitCase caseDb, - CentralRepository centralRepoDb) throws DiscoveryException { + CentralRepository centralRepoDb, SearchContext context) throws DiscoveryException, SearchCancellationException { if (centralRepoDb != null) { - processFilesWithCr(results, centralRepoDb); + processFilesWithCr(results, centralRepoDb, context); } } - private void processFilesWithCr(List results, CentralRepository centralRepo) throws DiscoveryException { + /** + * Helper method to batch the domain results and check for notability. + * + * @param results The results which are being checked for previously + * being notable in the CR. + * @param centralRepo The central repository being used to check for + * notability. + * @param context The SearchContext the search which is applying + * this filter is being performed from. + * + * @throws DiscoveryException + * @throws SearchCancellationException - Thrown when the user has + * cancelled the search. + */ + private void processFilesWithCr(List results, CentralRepository centralRepo, SearchContext context) throws DiscoveryException, SearchCancellationException { List domainsBatch = new ArrayList<>(); for (Result result : results) { + if (context.searchIsCancelled()) { + throw new SearchCancellationException("The search was cancelled while Previously Notable attribute was being calculated with the CR."); + } if (result.getType() == SearchData.Type.DOMAIN) { domainsBatch.add((ResultDomain) result); if (domainsBatch.size() == DOMAIN_BATCH_SIZE) { - queryPreviouslyNotable(domainsBatch, centralRepo); + queryPreviouslyNotable(domainsBatch, centralRepo, context); domainsBatch.clear(); } } } - queryPreviouslyNotable(domainsBatch, centralRepo); + queryPreviouslyNotable(domainsBatch, centralRepo, context); } - private void queryPreviouslyNotable(List domainsBatch, CentralRepository centralRepo) throws DiscoveryException { + /** + * Helper method to check a batch of domains for notability. + * + * + * @param domainsBatch The list of ResultDomains to check for + * notability. + * @param centralRepo The central repository being used to check for + * notability. + * @param context The SearchContext the search which is applying + * this filter is being performed from. + * + * @throws DiscoveryException + * @throws SearchCancellationException - Thrown when the user has + * cancelled the search. + */ + private void queryPreviouslyNotable(List domainsBatch, CentralRepository centralRepo, SearchContext context) throws DiscoveryException, SearchCancellationException { if (domainsBatch.isEmpty()) { return; } try { final CorrelationAttributeInstance.Type attributeType = centralRepo.getCorrelationTypeById(CorrelationAttributeInstance.DOMAIN_TYPE_ID); - final Map> resultDomainTable = organizeByValue(domainsBatch, attributeType); + final Map> resultDomainTable = organizeByValue(domainsBatch, attributeType, context); final String values = createCSV(resultDomainTable.keySet()); - + if (context.searchIsCancelled()) { + throw new SearchCancellationException("Search was cancelled while checking for previously notable domains."); + } final String tableName = CentralRepoDbUtil.correlationTypeToInstanceTableName(attributeType); final String domainFrequencyQuery = " value AS domain_name " + "FROM " + tableName + " " @@ -421,7 +494,7 @@ public class DiscoveryAttributes { @Override public void addAttributeToResults(List results, SleuthkitCase caseDb, - CentralRepository centralRepoDb) throws DiscoveryException { + CentralRepository centralRepoDb, SearchContext context) throws DiscoveryException, SearchCancellationException { if (centralRepoDb == null) { for (Result result : results) { if (result.getFrequency() == SearchData.Frequency.UNKNOWN && result.getKnown() == TskData.FileKnown.KNOWN) { @@ -429,7 +502,7 @@ public class DiscoveryAttributes { } } } else { - processResultFilesForCR(results, centralRepoDb); + processResultFilesForCR(results, centralRepoDb, context); } } @@ -437,16 +510,26 @@ public class DiscoveryAttributes { * Private helper method for adding Frequency attribute when CR is * enabled. * - * @param files The list of ResultFiles to caluclate frequency - * for. - * @param centralRepoDb The central repository currently in use. + * @param results The results which are having their frequency + * checked. + * @param centralRepoDb The central repository being used to check + * frequency. + * @param context The SearchContext the search which is applying + * this filter is being performed from. + * + * @throws DiscoveryException + * @throws SearchCancellationException - Thrown when the user has + * cancelled the search. */ private void processResultFilesForCR(List results, - CentralRepository centralRepoDb) throws DiscoveryException { + CentralRepository centralRepoDb, SearchContext context) throws DiscoveryException, SearchCancellationException { List currentFiles = new ArrayList<>(); Set hashesToLookUp = new HashSet<>(); List domainsToQuery = new ArrayList<>(); for (Result result : results) { + if (context.searchIsCancelled()) { + throw new SearchCancellationException("The search was cancelled while Frequency attribute was being calculated with the CR."); + } // If frequency was already calculated, skip... if (result.getFrequency() == SearchData.Frequency.UNKNOWN) { if (result.getKnown() == TskData.FileKnown.KNOWN) { @@ -462,7 +545,7 @@ public class DiscoveryAttributes { } if (hashesToLookUp.size() >= BATCH_SIZE) { - computeFrequency(hashesToLookUp, currentFiles, centralRepoDb); + computeFrequency(hashesToLookUp, currentFiles, centralRepoDb, context); hashesToLookUp.clear(); currentFiles.clear(); @@ -470,16 +553,15 @@ public class DiscoveryAttributes { } else { domainsToQuery.add((ResultDomain) result); if (domainsToQuery.size() == DOMAIN_BATCH_SIZE) { - queryDomainFrequency(domainsToQuery, centralRepoDb); - + queryDomainFrequency(domainsToQuery, centralRepoDb, context); domainsToQuery.clear(); } } } } - queryDomainFrequency(domainsToQuery, centralRepoDb); - computeFrequency(hashesToLookUp, currentFiles, centralRepoDb); + queryDomainFrequency(domainsToQuery, centralRepoDb, context); + computeFrequency(hashesToLookUp, currentFiles, centralRepoDb, context); } } @@ -487,17 +569,22 @@ public class DiscoveryAttributes { * Query to get the frequency of a domain. * * @param domainsToQuery List of domains to check the frequency of. - * @param centralRepository The central repository to query. + * @param centralRepository The central repository being used to check + * frequency. + * @param context The SearchContext the search which is applying + * this filter is being performed from. * * @throws DiscoveryException + * @throws SearchCancellationException - Thrown when the user has cancelled + * the search. */ - private static void queryDomainFrequency(List domainsToQuery, CentralRepository centralRepository) throws DiscoveryException { + private static void queryDomainFrequency(List domainsToQuery, CentralRepository centralRepository, SearchContext context) throws DiscoveryException, SearchCancellationException { if (domainsToQuery.isEmpty()) { return; } try { final CorrelationAttributeInstance.Type attributeType = centralRepository.getCorrelationTypeById(CorrelationAttributeInstance.DOMAIN_TYPE_ID); - final Map> resultDomainTable = organizeByValue(domainsToQuery, attributeType); + final Map> resultDomainTable = organizeByValue(domainsToQuery, attributeType, context); final String values = createCSV(resultDomainTable.keySet()); final String tableName = CentralRepoDbUtil.correlationTypeToInstanceTableName(attributeType); final String domainFrequencyQuery = " value AS domain_name, COUNT(value) AS frequency FROM" @@ -508,8 +595,11 @@ public class DiscoveryAttributes { + ")) AS foo GROUP BY value"; final DomainFrequencyCallback frequencyCallback = new DomainFrequencyCallback(resultDomainTable); - centralRepository.processSelectClause(domainFrequencyQuery, frequencyCallback); + centralRepository.processSelectClause(domainFrequencyQuery, frequencyCallback); + if (context.searchIsCancelled()) { + throw new SearchCancellationException("The search was cancelled while Domain frequency was being queried with the CR."); + } if (frequencyCallback.getCause() != null) { throw frequencyCallback.getCause(); } @@ -620,7 +710,7 @@ public class DiscoveryAttributes { @Override public void addAttributeToResults(List results, SleuthkitCase caseDb, - CentralRepository centralRepoDb) throws DiscoveryException { + CentralRepository centralRepoDb, SearchContext context) throws DiscoveryException, SearchCancellationException { // Get pairs of (object ID, hash set name) for all files in the list of files that have // hash set hits. @@ -628,6 +718,9 @@ public class DiscoveryAttributes { BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME.getTypeID()); HashSetNamesCallback callback = new HashSetNamesCallback(results); + if (context.searchIsCancelled()) { + throw new SearchCancellationException("The search was cancelled while Hash Hit attribute was being added."); + } try { caseDb.getCaseDbAccessManager().select(selectQuery, callback); } catch (TskCoreException ex) { @@ -695,7 +788,7 @@ public class DiscoveryAttributes { @Override public void addAttributeToResults(List results, SleuthkitCase caseDb, - CentralRepository centralRepoDb) throws DiscoveryException { + CentralRepository centralRepoDb, SearchContext context) throws DiscoveryException, SearchCancellationException { // Get pairs of (object ID, interesting item set name) for all files in the list of files that have // interesting file set hits. @@ -703,6 +796,9 @@ public class DiscoveryAttributes { BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME.getTypeID()); InterestingFileSetNamesCallback callback = new InterestingFileSetNamesCallback(results); + if (context.searchIsCancelled()) { + throw new SearchCancellationException("The search was cancelled while Interesting Item attribute was being added."); + } try { caseDb.getCaseDbAccessManager().select(selectQuery, callback); } catch (TskCoreException ex) { @@ -808,7 +904,7 @@ public class DiscoveryAttributes { @Override public void addAttributeToResults(List results, SleuthkitCase caseDb, - CentralRepository centralRepoDb) throws DiscoveryException { + CentralRepository centralRepoDb, SearchContext context) throws DiscoveryException, SearchCancellationException { // Get pairs of (object ID, object type name) for all files in the list of files that have // objects detected @@ -816,6 +912,9 @@ public class DiscoveryAttributes { BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DESCRIPTION.getTypeID()); ObjectDetectedNamesCallback callback = new ObjectDetectedNamesCallback(results); + if (context.searchIsCancelled()) { + throw new SearchCancellationException("The search was cancelled while Object Detected attribute was being added."); + } try { caseDb.getCaseDbAccessManager().select(selectQuery, callback); } catch (TskCoreException ex) { @@ -884,10 +983,13 @@ public class DiscoveryAttributes { @Override public void addAttributeToResults(List results, SleuthkitCase caseDb, - CentralRepository centralRepoDb) throws DiscoveryException { + CentralRepository centralRepoDb, SearchContext context) throws DiscoveryException, SearchCancellationException { try { for (Result result : results) { + if (context.searchIsCancelled()) { + throw new SearchCancellationException("The search was cancelled while File Tag attribute was being added."); + } if (result.getType() == SearchData.Type.DOMAIN) { return; } @@ -995,14 +1097,20 @@ public class DiscoveryAttributes { } /** + * * Computes the CR frequency of all the given hashes and updates the list of * files. * * @param hashesToLookUp Hashes to find the frequency of. * @param currentFiles List of files to update with frequencies. * @param centralRepoDb The central repository being used. + * @param context The SearchContext the search which is applying this + * filter is being performed from. + * + * @throws SearchCancellationException - Thrown when the user has cancelled + * the search. */ - private static void computeFrequency(Set hashesToLookUp, List currentFiles, CentralRepository centralRepoDb) { + private static void computeFrequency(Set hashesToLookUp, List currentFiles, CentralRepository centralRepoDb, SearchContext context) throws SearchCancellationException { if (hashesToLookUp.isEmpty()) { return; @@ -1022,7 +1130,9 @@ public class DiscoveryAttributes { FrequencyCallback callback = new FrequencyCallback(currentFiles); centralRepoDb.processSelectClause(selectClause, callback); - + if (context.searchIsCancelled()) { + throw new SearchCancellationException("The search was cancelled while Domain frequency was being queried with the CR."); + } } catch (CentralRepoException ex) { logger.log(Level.WARNING, "Error getting frequency counts from Central Repository", ex); // NON-NLS } diff --git a/Core/src/org/sleuthkit/autopsy/discovery/search/DiscoveryKeyUtils.java b/Core/src/org/sleuthkit/autopsy/discovery/search/DiscoveryKeyUtils.java index c201414199..ca1828d02c 100644 --- a/Core/src/org/sleuthkit/autopsy/discovery/search/DiscoveryKeyUtils.java +++ b/Core/src/org/sleuthkit/autopsy/discovery/search/DiscoveryKeyUtils.java @@ -59,6 +59,7 @@ public class DiscoveryKeyUtils { private final List filters; private final SleuthkitCase sleuthkitCase; private final CentralRepository centralRepository; + private final SearchContext context; /** * Construct a new SearchKey with all information that defines a search. @@ -70,16 +71,20 @@ public class DiscoveryKeyUtils { * @param sortingMethod The method to sort the results by. * @param sleuthkitCase The SleuthkitCase being searched. * @param centralRepository The Central Repository being searched. + * @param context The SearchContext which reflects the search + * being performed to get results for this + * key. */ SearchKey(String userName, List filters, DiscoveryAttributes.AttributeType groupAttributeType, Group.GroupSortingAlgorithm groupSortingType, ResultsSorter.SortingMethod sortingMethod, - SleuthkitCase sleuthkitCase, CentralRepository centralRepository) { + SleuthkitCase sleuthkitCase, CentralRepository centralRepository, SearchContext context) { this.groupAttributeType = groupAttributeType; this.groupSortingType = groupSortingType; this.sortingMethod = sortingMethod; this.filters = filters; + this.context = context; StringBuilder searchStringBuilder = new StringBuilder(); searchStringBuilder.append(userName); @@ -93,8 +98,8 @@ public class DiscoveryKeyUtils { } /** - * Construct a SearchKey without a SleuthkitCase or CentralRepositry - * instance. + * Construct a SearchKey without a SearchContext, SleuthkitCase or + * CentralRepositry instance. * * @param userName The name of the user performing the search. * @param filters The Filters being used for the search. @@ -107,7 +112,8 @@ public class DiscoveryKeyUtils { Group.GroupSortingAlgorithm groupSortingType, ResultsSorter.SortingMethod sortingMethod) { this(userName, filters, groupAttributeType, groupSortingType, - sortingMethod, null, null); + sortingMethod, null, null, null); + //this constructor should only be used putting things directly into a map or getting if present since casedb, cr, and search context will be null } @Override @@ -141,6 +147,23 @@ public class DiscoveryKeyUtils { return hash; } + /** + * Get the SearchContext for the search this key is being used in. + * + * @return The SearchContext the search key is being used in. + * + * @throws DiscoveryException Thrown when the key being used has a null + * context indicating it was not created with + * knowledge of the case or central + * repository databases. + */ + SearchContext getContext() throws DiscoveryException { + if (context == null) { + throw new DiscoveryException("The key in use was created without a context and does not support retrieving information from the databases."); + } + return context; + } + /** * Get the String representation of this key. * diff --git a/Core/src/org/sleuthkit/autopsy/discovery/search/DomainSearch.java b/Core/src/org/sleuthkit/autopsy/discovery/search/DomainSearch.java index eb52327d45..0557808758 100644 --- a/Core/src/org/sleuthkit/autopsy/discovery/search/DomainSearch.java +++ b/Core/src/org/sleuthkit/autopsy/discovery/search/DomainSearch.java @@ -78,24 +78,31 @@ public class DomainSearch { * @param caseDb The case database. * @param centralRepoDb The central repository database. Can be null * if not needed. + * @param context The SearchContext the search is being performed from. * * @return A LinkedHashMap grouped and sorted according to the parameters. * * @throws DiscoveryException + * @throws SearchCancellationException - Thrown when the user has cancelled + * the search. */ public Map getGroupSizes(String userName, List filters, DiscoveryAttributes.AttributeType groupAttributeType, Group.GroupSortingAlgorithm groupSortingType, ResultsSorter.SortingMethod domainSortingMethod, - SleuthkitCase caseDb, CentralRepository centralRepoDb) throws DiscoveryException { + SleuthkitCase caseDb, CentralRepository centralRepoDb, SearchContext context) throws DiscoveryException, SearchCancellationException { final Map> searchResults = searchCache.get( userName, filters, groupAttributeType, groupSortingType, - domainSortingMethod, caseDb, centralRepoDb); + domainSortingMethod, caseDb, centralRepoDb, context); + // Transform the cached results into a map of group key to group size. final LinkedHashMap groupSizes = new LinkedHashMap<>(); for (GroupKey groupKey : searchResults.keySet()) { + if (context.searchIsCancelled()) { + throw new SearchCancellationException("The search was cancelled before group sizes were finished being calculated"); + } groupSizes.put(groupKey, searchResults.get(groupKey).size()); } @@ -130,11 +137,11 @@ public class DomainSearch { Group.GroupSortingAlgorithm groupSortingType, ResultsSorter.SortingMethod domainSortingMethod, GroupKey groupKey, int startingEntry, int numberOfEntries, - SleuthkitCase caseDb, CentralRepository centralRepoDb) throws DiscoveryException { + SleuthkitCase caseDb, CentralRepository centralRepoDb, SearchContext context) throws DiscoveryException, SearchCancellationException { final Map> searchResults = searchCache.get( userName, filters, groupAttributeType, groupSortingType, - domainSortingMethod, caseDb, centralRepoDb); + domainSortingMethod, caseDb, centralRepoDb, context); final List domainsInGroup = searchResults.get(groupKey); final List page = new ArrayList<>(); for (int i = startingEntry; (i < startingEntry + numberOfEntries) diff --git a/Core/src/org/sleuthkit/autopsy/discovery/search/DomainSearchCache.java b/Core/src/org/sleuthkit/autopsy/discovery/search/DomainSearchCache.java index 306a66b287..ee677a6972 100755 --- a/Core/src/org/sleuthkit/autopsy/discovery/search/DomainSearchCache.java +++ b/Core/src/org/sleuthkit/autopsy/discovery/search/DomainSearchCache.java @@ -1,7 +1,7 @@ /* * Autopsy Forensic Browser * - * Copyright 2020 Basis Technology Corp. + * Copyright 2020-2021 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -55,20 +55,24 @@ class DomainSearchCache { * @param caseDb The case database. * @param centralRepoDb The central repository database. Can be null if * not needed. + * @param context The SearchContext the search is being performed + * from. * * @return Domain search results matching the given parameters. * * @throws DiscoveryException + * @throws SearchCancellationException - Thrown when the user has cancelled + * the search. */ Map> get(String userName, List filters, DiscoveryAttributes.AttributeType groupAttributeType, Group.GroupSortingAlgorithm groupSortingType, ResultsSorter.SortingMethod domainSortingMethod, - SleuthkitCase caseDb, CentralRepository centralRepoDb) throws DiscoveryException { + SleuthkitCase caseDb, CentralRepository centralRepoDb, SearchContext context) throws DiscoveryException, SearchCancellationException { try { final SearchKey searchKey = new SearchKey(userName, filters, groupAttributeType, - groupSortingType, domainSortingMethod, caseDb, centralRepoDb); + groupSortingType, domainSortingMethod, caseDb, centralRepoDb, context); return cache.get(searchKey); } catch (ExecutionException ex) { throw new DiscoveryException("Error fetching results from cache", ex.getCause()); diff --git a/Core/src/org/sleuthkit/autopsy/discovery/search/DomainSearchCacheLoader.java b/Core/src/org/sleuthkit/autopsy/discovery/search/DomainSearchCacheLoader.java index db2ada61de..6a302fdab1 100755 --- a/Core/src/org/sleuthkit/autopsy/discovery/search/DomainSearchCacheLoader.java +++ b/Core/src/org/sleuthkit/autopsy/discovery/search/DomainSearchCacheLoader.java @@ -73,7 +73,7 @@ class DomainSearchCacheLoader extends CacheLoader sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -62,17 +62,21 @@ public class FileSearch { * @param caseDb The case database * @param centralRepoDb The central repository database. Can be null if * not needed. + * @param context The SearchContext the search is being performed + * from. * * @return The raw search results * * @throws DiscoveryException + * @throws SearchCancellationException - Thrown when the user has cancelled + * the search. */ static SearchResults runFileSearchDebug(String userName, List filters, AttributeType groupAttributeType, Group.GroupSortingAlgorithm groupSortingType, ResultsSorter.SortingMethod fileSortingMethod, - SleuthkitCase caseDb, CentralRepository centralRepoDb) throws DiscoveryException { + SleuthkitCase caseDb, CentralRepository centralRepoDb, SearchContext context) throws DiscoveryException, SearchCancellationException { // Make a list of attributes that we want to add values for. This ensures the // ResultFile objects will have all needed fields set when it's time to group // and sort them. For example, if we're grouping by central repo frequency, we need @@ -82,10 +86,10 @@ public class FileSearch { attributesNeededForGroupingOrSorting.addAll(fileSortingMethod.getRequiredAttributes()); // Run the queries for each filter - List results = SearchFiltering.runQueries(filters, caseDb, centralRepoDb); + List results = SearchFiltering.runQueries(filters, caseDb, centralRepoDb, context); // Add the data to resultFiles for any attributes needed for sorting and grouping - addAttributes(attributesNeededForGroupingOrSorting, results, caseDb, centralRepoDb); + addAttributes(attributesNeededForGroupingOrSorting, results, caseDb, centralRepoDb, context); // Collect everything in the search results SearchResults searchResults = new SearchResults(groupSortingType, groupAttributeType, fileSortingMethod); @@ -114,21 +118,28 @@ public class FileSearch { * @param caseDb The case database * @param centralRepoDb The central repository database. Can be null if * not needed. + * @param context The SearchContext the search is being performed + * from. * * @return A LinkedHashMap grouped and sorted according to the parameters * * @throws DiscoveryException + * @throws SearchCancellationException - Thrown when the user has cancelled + * the search. */ public static Map getGroupSizes(String userName, List filters, AttributeType groupAttributeType, Group.GroupSortingAlgorithm groupSortingType, ResultsSorter.SortingMethod fileSortingMethod, - SleuthkitCase caseDb, CentralRepository centralRepoDb) throws DiscoveryException { + SleuthkitCase caseDb, CentralRepository centralRepoDb, SearchContext context) throws DiscoveryException, SearchCancellationException { Map> searchResults = runFileSearch(userName, filters, - groupAttributeType, groupSortingType, fileSortingMethod, caseDb, centralRepoDb); + groupAttributeType, groupSortingType, fileSortingMethod, caseDb, centralRepoDb, context); LinkedHashMap groupSizes = new LinkedHashMap<>(); for (GroupKey groupKey : searchResults.keySet()) { + if (context.searchIsCancelled()) { + throw new SearchCancellationException("The search was cancelled before group sizes were finished being calculated"); + } groupSizes.put(groupKey, searchResults.get(groupKey).size()); } return groupSizes; @@ -151,10 +162,14 @@ public class FileSearch { * @param caseDb The case database * @param centralRepoDb The central repository database. Can be null if * not needed. + * @param context The SearchContext the search is being performed + * from. * * @return A LinkedHashMap grouped and sorted according to the parameters * * @throws DiscoveryException + * @throws SearchCancellationException - Thrown when the user has cancelled + * the search. */ public static List getFilesInGroup(String userName, List filters, @@ -164,7 +179,7 @@ public class FileSearch { GroupKey groupKey, int startingEntry, int numberOfEntries, - SleuthkitCase caseDb, CentralRepository centralRepoDb) throws DiscoveryException { + SleuthkitCase caseDb, CentralRepository centralRepoDb, SearchContext context) throws DiscoveryException, SearchCancellationException { //the group should be in the cache at this point List filesInGroup = null; SearchKey searchKey = new SearchKey(userName, filters, groupAttributeType, groupSortingType, fileSortingMethod); @@ -178,7 +193,7 @@ public class FileSearch { List page = new ArrayList<>(); if (filesInGroup == null) { logger.log(Level.INFO, "Group {0} was not cached, performing search to cache all groups again", groupKey); - runFileSearch(userName, filters, groupAttributeType, groupSortingType, fileSortingMethod, caseDb, centralRepoDb); + runFileSearch(userName, filters, groupAttributeType, groupSortingType, fileSortingMethod, caseDb, centralRepoDb, context); synchronized (searchCache) { resultsMap = searchCache.getIfPresent(searchKey.getKeyString()); } @@ -218,7 +233,6 @@ public class FileSearch { TextSummarizer localSummarizer; synchronized (searchCache) { localSummarizer = SummaryHelpers.getLocalSummarizer(); - } if (localSummarizer != null) { try { @@ -247,17 +261,21 @@ public class FileSearch { * @param caseDb The case database * @param centralRepoDb The central repository database. Can be null if * not needed. + * @param context The SearchContext the search is being performed + * from. * * @return A LinkedHashMap grouped and sorted according to the parameters * * @throws DiscoveryException + * @throws SearchCancellationException - Thrown when the user has cancelled + * the search. */ - private static Map> runFileSearch(String userName, + public static Map> runFileSearch(String userName, List filters, AttributeType groupAttributeType, Group.GroupSortingAlgorithm groupSortingType, ResultsSorter.SortingMethod fileSortingMethod, - SleuthkitCase caseDb, CentralRepository centralRepoDb) throws DiscoveryException { + SleuthkitCase caseDb, CentralRepository centralRepoDb, SearchContext context) throws DiscoveryException, SearchCancellationException { // Make a list of attributes that we want to add values for. This ensures the // ResultFile objects will have all needed fields set when it's time to group @@ -268,10 +286,10 @@ public class FileSearch { attributesNeededForGroupingOrSorting.addAll(fileSortingMethod.getRequiredAttributes()); // Run the queries for each filter - List results = SearchFiltering.runQueries(filters, caseDb, centralRepoDb); + List results = SearchFiltering.runQueries(filters, caseDb, centralRepoDb, context); // Add the data to resultFiles for any attributes needed for sorting and grouping - addAttributes(attributesNeededForGroupingOrSorting, results, caseDb, centralRepoDb); + addAttributes(attributesNeededForGroupingOrSorting, results, caseDb, centralRepoDb, context); // Collect everything in the search results SearchResults searchResults = new SearchResults(groupSortingType, groupAttributeType, fileSortingMethod); @@ -295,13 +313,17 @@ public class FileSearch { * @param caseDb The case database * @param centralRepoDb The central repository database. Can be null if not * needed. + * @param context The SearchContext the search is being performed + * from. * * @throws DiscoveryException + * @throws SearchCancellationException - Thrown when the user has cancelled + * the search. */ - private static void addAttributes(List attrs, List results, SleuthkitCase caseDb, CentralRepository centralRepoDb) - throws DiscoveryException { + private static void addAttributes(List attrs, List results, SleuthkitCase caseDb, CentralRepository centralRepoDb, SearchContext context) + throws DiscoveryException, SearchCancellationException { for (AttributeType attr : attrs) { - attr.addAttributeToResults(results, caseDb, centralRepoDb); + attr.addAttributeToResults(results, caseDb, centralRepoDb, context); } } diff --git a/Core/src/org/sleuthkit/autopsy/discovery/search/SearchCancellationException.java b/Core/src/org/sleuthkit/autopsy/discovery/search/SearchCancellationException.java new file mode 100644 index 0000000000..2587777382 --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/discovery/search/SearchCancellationException.java @@ -0,0 +1,40 @@ +/* + * Autopsy + * + * Copyright 2021 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.discovery.search; + +import java.util.concurrent.CancellationException; + +/** + * Exception to be thrown when the search has been intentionally cancelled to + * provide information on where the code was when the cancellation took place. + */ +public class SearchCancellationException extends CancellationException { + + private static final long serialVersionUID = 1L; + + /** + * Construct a new SearchCancellationException with the specified message. + * + * @param message The text to use as the message for the exception. + */ + SearchCancellationException(String message) { + super(message); + } + +} diff --git a/Core/src/org/sleuthkit/autopsy/datasourcesummary/uiutils/ExcelCellModel.java b/Core/src/org/sleuthkit/autopsy/discovery/search/SearchContext.java similarity index 66% rename from Core/src/org/sleuthkit/autopsy/datasourcesummary/uiutils/ExcelCellModel.java rename to Core/src/org/sleuthkit/autopsy/discovery/search/SearchContext.java index 0ca52f12e4..e0ce318b58 100644 --- a/Core/src/org/sleuthkit/autopsy/datasourcesummary/uiutils/ExcelCellModel.java +++ b/Core/src/org/sleuthkit/autopsy/discovery/search/SearchContext.java @@ -1,5 +1,5 @@ /* - * Autopsy Forensic Browser + * Autopsy * * Copyright 2021 Basis Technology Corp. * Contact: carrier sleuthkit org @@ -16,17 +16,18 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.sleuthkit.autopsy.datasourcesummary.uiutils; +package org.sleuthkit.autopsy.discovery.search; /** - * Basic interface for a cell model. + * Interface for providing feedback on if a search has been cancelled. + * */ -public interface ExcelCellModel extends CellModel { +public interface SearchContext { /** - * @return The format string to be used with Apache POI during excel - * export or null if none necessary. + * Returns true if the search has been cancelled, false otherwise. + * + * @return True if the search has been cancelled, false otherwise. */ - String getExcelFormatString(); - + boolean searchIsCancelled(); } diff --git a/Core/src/org/sleuthkit/autopsy/discovery/search/SearchFiltering.java b/Core/src/org/sleuthkit/autopsy/discovery/search/SearchFiltering.java index 29a4dd698c..eba1402ccf 100644 --- a/Core/src/org/sleuthkit/autopsy/discovery/search/SearchFiltering.java +++ b/Core/src/org/sleuthkit/autopsy/discovery/search/SearchFiltering.java @@ -1,7 +1,7 @@ /* * Autopsy Forensic Browser * - * Copyright 2019-2020 Basis Technology Corp. + * Copyright 2019-2021 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -60,10 +60,16 @@ public class SearchFiltering { * @param caseDb The case database. * @param centralRepoDb The central repo. Can be null as long as no filters * need it. + * @param context The SearchContext the search is being performed + * from. * * @return List of Results from the search performed. + * + * @throws DiscoveryException + * @throws SearchCancellationException - Thrown when the user has cancelled + * the search. */ - static List runQueries(List filters, SleuthkitCase caseDb, CentralRepository centralRepoDb) throws DiscoveryException { + static List runQueries(List filters, SleuthkitCase caseDb, CentralRepository centralRepoDb, SearchContext context) throws DiscoveryException, SearchCancellationException { if (caseDb == null) { throw new DiscoveryException("Case DB parameter is null"); // NON-NLS } @@ -82,8 +88,11 @@ public class SearchFiltering { // The file search filter is required, so this should never be empty. throw new DiscoveryException("Selected filters do not include a case database query"); } + if (context.searchIsCancelled()) { + throw new SearchCancellationException("The search was cancelled before result list could be retrieved."); + } try { - return getResultList(filters, combinedQuery, caseDb, centralRepoDb); + return getResultList(filters, combinedQuery, caseDb, centralRepoDb, context); } catch (TskCoreException ex) { throw new DiscoveryException("Error querying case database", ex); // NON-NLS } @@ -97,17 +106,23 @@ public class SearchFiltering { * @param caseDb The case database. * @param centralRepoDb The central repo. Can be null as long as no filters * need it. + * @param context The SearchContext the search is being performed + * from. * * @return An ArrayList of Results returned by the query. * * @throws TskCoreException * @throws DiscoveryException + * @throws SearchCancellationException - Thrown when the user has cancelled + * the search. */ - private static List getResultList(List filters, String combinedQuery, SleuthkitCase caseDb, CentralRepository centralRepoDb) throws TskCoreException, DiscoveryException { + private static List getResultList(List filters, String combinedQuery, SleuthkitCase caseDb, CentralRepository centralRepoDb, SearchContext context) throws TskCoreException, DiscoveryException, SearchCancellationException { // Get all matching abstract files List resultList = new ArrayList<>(); List sqlResults = caseDb.findAllFilesWhere(combinedQuery); - + if (context.searchIsCancelled()) { + throw new SearchCancellationException("The search was cancelled while the case database query was being performed."); + } // If there are no results, return now if (sqlResults.isEmpty()) { return resultList; @@ -120,8 +135,11 @@ public class SearchFiltering { // Now run any non-SQL filters. for (AbstractFilter filter : filters) { + if (context.searchIsCancelled()) { + throw new SearchCancellationException("The search was cancelled while alternate filters were being applied."); + } if (filter.useAlternateFilter()) { - resultList = filter.applyAlternateFilter(resultList, caseDb, centralRepoDb); + resultList = filter.applyAlternateFilter(resultList, caseDb, centralRepoDb, context); } // There are no matches for the filters run so far, so return if (resultList.isEmpty()) { @@ -227,7 +245,7 @@ public class SearchFiltering { public Collection getTypes() { return Collections.unmodifiableCollection(types); } - + private StringJoiner joinStandardArtifactTypes() { StringJoiner joiner = new StringJoiner(","); for (ARTIFACT_TYPE type : types) { @@ -241,9 +259,10 @@ public class SearchFiltering { StringJoiner joiner = joinStandardArtifactTypes(); return "artifact_type_id IN (" + joiner + ")"; } - + /** - * Used by backend domain search code to query for additional artifact types. + * Used by backend domain search code to query for additional artifact + * types. */ String getWhereClause(List nonVisibleArtifactTypesToInclude) { StringJoiner joiner = joinStandardArtifactTypes(); @@ -674,14 +693,17 @@ public class SearchFiltering { @Override public List applyAlternateFilter(List currentResults, SleuthkitCase caseDb, - CentralRepository centralRepoDb) throws DiscoveryException { + CentralRepository centralRepoDb, SearchContext context) throws DiscoveryException, SearchCancellationException { // Set the frequency for each file DiscoveryAttributes.FrequencyAttribute freqAttr = new DiscoveryAttributes.FrequencyAttribute(); - freqAttr.addAttributeToResults(currentResults, caseDb, centralRepoDb); + freqAttr.addAttributeToResults(currentResults, caseDb, centralRepoDb, context); // If the frequency matches the filter, add the file to the results List frequencyResults = new ArrayList<>(); for (Result file : currentResults) { + if (context.searchIsCancelled()) { + throw new SearchCancellationException("The search was cancelled while Frequency alternate filter was being applied."); + } if (frequencies.contains(file.getFrequency())) { frequencyResults.add(file); } @@ -705,7 +727,7 @@ public class SearchFiltering { return Bundle.SearchFiltering_FrequencyFilter_desc(desc); } } - + /** * A filter for domains with known account types. */ @@ -715,17 +737,20 @@ public class SearchFiltering { public String getWhereClause() { throw new UnsupportedOperationException("Not supported, this is an alternative filter."); } - + @Override public boolean useAlternateFilter() { return true; } - + @Override public List applyAlternateFilter(List currentResults, SleuthkitCase caseDb, - CentralRepository centralRepoDb) throws DiscoveryException { + CentralRepository centralRepoDb, SearchContext context) throws DiscoveryException, SearchCancellationException { List filteredResults = new ArrayList<>(); for (Result result : currentResults) { + if (context.searchIsCancelled()) { + throw new SearchCancellationException("The search was cancelled while Known Account Type alternate filter was being applied."); + } if (result instanceof ResultDomain) { ResultDomain domain = (ResultDomain) result; if (domain.hasKnownAccountType()) { @@ -745,9 +770,9 @@ public class SearchFiltering { public String getDesc() { return Bundle.SearchFiltering_KnownAccountTypeFilter_desc(); } - + } - + /** * A filter for previously notable content in the central repository. */ @@ -757,19 +782,22 @@ public class SearchFiltering { public String getWhereClause() { throw new UnsupportedOperationException("Not supported, this is an alternative filter."); } - + @Override public boolean useAlternateFilter() { return true; } - + @Override public List applyAlternateFilter(List currentResults, SleuthkitCase caseDb, - CentralRepository centralRepoDb) throws DiscoveryException { + CentralRepository centralRepoDb, SearchContext context) throws DiscoveryException, SearchCancellationException { DiscoveryAttributes.PreviouslyNotableAttribute previouslyNotableAttr = new DiscoveryAttributes.PreviouslyNotableAttribute(); - previouslyNotableAttr.addAttributeToResults(currentResults, caseDb, centralRepoDb); + previouslyNotableAttr.addAttributeToResults(currentResults, caseDb, centralRepoDb, context); List filteredResults = new ArrayList<>(); for (Result file : currentResults) { + if (context.searchIsCancelled()) { + throw new SearchCancellationException("The search was cancelled while Previously Notable alternate filter was being applied."); + } if (file.getPreviouslyNotableInCR() == SearchData.PreviouslyNotable.PREVIOUSLY_NOTABLE) { filteredResults.add(file); } @@ -784,7 +812,7 @@ public class SearchFiltering { public String getDesc() { return Bundle.SearchFiltering_PreviouslyNotableFilter_desc(); } - + } /** @@ -1068,7 +1096,7 @@ public class SearchFiltering { @Override public List applyAlternateFilter(List currentResults, SleuthkitCase caseDb, - CentralRepository centralRepoDb) throws DiscoveryException { + CentralRepository centralRepoDb, SearchContext context) throws DiscoveryException, SearchCancellationException { if (centralRepoDb == null) { throw new DiscoveryException("Can not run Previously Notable filter with null Central Repository DB"); // NON-NLS @@ -1087,6 +1115,9 @@ public class SearchFiltering { CorrelationAttributeInstance.Type type = CorrelationAttributeInstance.getDefaultCorrelationTypes().get(CorrelationAttributeInstance.FILES_TYPE_ID); for (Result result : currentResults) { + if (context.searchIsCancelled()) { + throw new SearchCancellationException("The search was cancelled while Notable alternate filter was being applied."); + } ResultFile file = (ResultFile) result; if (result.getType() == SearchData.Type.DOMAIN) { break; diff --git a/Core/src/org/sleuthkit/autopsy/discovery/ui/DiscoveryDialog.java b/Core/src/org/sleuthkit/autopsy/discovery/ui/DiscoveryDialog.java index 450ad4d381..63bbf673a8 100644 --- a/Core/src/org/sleuthkit/autopsy/discovery/ui/DiscoveryDialog.java +++ b/Core/src/org/sleuthkit/autopsy/discovery/ui/DiscoveryDialog.java @@ -1,7 +1,7 @@ /* * Autopsy * - * Copyright 2020 Basis Technology Corp. + * Copyright 2020-2021 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -31,7 +31,6 @@ import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.logging.Level; -import javax.swing.SwingUtilities; import org.apache.commons.lang.StringUtils; import org.openide.util.NbBundle.Messages; import org.openide.windows.WindowManager; @@ -574,7 +573,7 @@ final class DiscoveryDialog extends javax.swing.JDialog { } private void searchButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_searchButtonActionPerformed - // Get the selected filters + setVisible(false); //set visible used here instead of dispose incase dispose code changes final DiscoveryTopComponent tc = DiscoveryTopComponent.getTopComponent(); if (tc == null) { setValid("No Top Component Found"); @@ -584,6 +583,7 @@ final class DiscoveryDialog extends javax.swing.JDialog { tc.open(); } tc.resetTopComponent(); + // Get the selected filters List filters; if (videosButton.isSelected()) { filters = videoFilterPanel.getFilters(); @@ -617,7 +617,6 @@ final class DiscoveryDialog extends javax.swing.JDialog { } searchWorker = new SearchWorker(centralRepoDb, type, filters, groupingAttr, groupSortAlgorithm, fileSort); searchWorker.execute(); - dispose(); tc.toFront(); tc.requestActive(); }//GEN-LAST:event_searchButtonActionPerformed @@ -651,6 +650,7 @@ final class DiscoveryDialog extends javax.swing.JDialog { void cancelSearch() { if (searchWorker != null) { searchWorker.cancel(true); + searchWorker = null; } } @@ -750,7 +750,6 @@ final class DiscoveryDialog extends javax.swing.JDialog { || eventData.getBlackboardArtifactType().getTypeID() == BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_ARTIFACT_HIT.getTypeID()) { shouldUpdate = shouldUpdateFilters(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME.getTypeID(), eventData, interestingItems); } - } } catch (NoCurrentCaseException notUsed) { // Case is closed, do nothing. diff --git a/Core/src/org/sleuthkit/autopsy/discovery/ui/DiscoveryTopComponent.java b/Core/src/org/sleuthkit/autopsy/discovery/ui/DiscoveryTopComponent.java index a74b32ca26..c5dba98337 100644 --- a/Core/src/org/sleuthkit/autopsy/discovery/ui/DiscoveryTopComponent.java +++ b/Core/src/org/sleuthkit/autopsy/discovery/ui/DiscoveryTopComponent.java @@ -1,7 +1,7 @@ /* * Autopsy * - * Copyright 2019-2020 Basis Technology Corp. + * Copyright 2019-2021 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -260,7 +260,6 @@ public final class DiscoveryTopComponent extends TopComponent { private void newSearchButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_newSearchButtonActionPerformed close(); final DiscoveryDialog discDialog = DiscoveryDialog.getDiscoveryDialogInstance(); - discDialog.cancelSearch(); discDialog.setVisible(true); discDialog.validateDialog(); }//GEN-LAST:event_newSearchButtonActionPerformed diff --git a/Core/src/org/sleuthkit/autopsy/discovery/ui/PageWorker.java b/Core/src/org/sleuthkit/autopsy/discovery/ui/PageWorker.java index 491e618683..8718f60a74 100644 --- a/Core/src/org/sleuthkit/autopsy/discovery/ui/PageWorker.java +++ b/Core/src/org/sleuthkit/autopsy/discovery/ui/PageWorker.java @@ -1,7 +1,7 @@ /* * Autopsy * - * Copyright 2019 Basis Technology Corp. + * Copyright 2019-2021 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -36,6 +36,8 @@ import org.sleuthkit.autopsy.discovery.search.DiscoveryException; import org.sleuthkit.autopsy.discovery.search.DomainSearch; import org.sleuthkit.autopsy.discovery.search.ResultsSorter; import org.sleuthkit.autopsy.discovery.search.Result; +import org.sleuthkit.autopsy.discovery.search.SearchCancellationException; +import org.sleuthkit.autopsy.discovery.search.SearchContext; /** * SwingWorker to retrieve the contents of a page. @@ -87,7 +89,7 @@ final class PageWorker extends SwingWorker { @Override protected Void doInBackground() throws Exception { - + SearchContext context = new SwingWorkerSearchContext(this); try { // Run the search if (resultType == SearchData.Type.DOMAIN) { @@ -96,17 +98,22 @@ final class PageWorker extends SwingWorker { groupingAttribute, groupSort, fileSortMethod, groupKey, startingEntry, pageSize, - Case.getCurrentCase().getSleuthkitCase(), centralRepo)); + Case.getCurrentCase().getSleuthkitCase(), centralRepo, context)); } else { results.addAll(FileSearch.getFilesInGroup(System.getProperty(USER_NAME_PROPERTY), searchfilters, groupingAttribute, groupSort, fileSortMethod, groupKey, startingEntry, pageSize, - Case.getCurrentCase().getSleuthkitCase(), centralRepo)); + Case.getCurrentCase().getSleuthkitCase(), centralRepo, context)); } } catch (DiscoveryException ex) { logger.log(Level.SEVERE, "Error running file search test", ex); cancel(true); + } catch (SearchCancellationException ex) { + //The user does not explicitly have a way to cancel the loading of a page + //but they could have cancelled the search during the loading of the first page + //So this may or may not be an issue depending on when this occurred. + logger.log(Level.WARNING, "Search was cancelled while retrieving data for results page with starting entry: " + startingEntry, ex); } return null; } diff --git a/Core/src/org/sleuthkit/autopsy/discovery/ui/SearchWorker.java b/Core/src/org/sleuthkit/autopsy/discovery/ui/SearchWorker.java index 7c6863ce62..6ba7a75fd2 100644 --- a/Core/src/org/sleuthkit/autopsy/discovery/ui/SearchWorker.java +++ b/Core/src/org/sleuthkit/autopsy/discovery/ui/SearchWorker.java @@ -1,7 +1,7 @@ /* * Autopsy * - * Copyright 2019 Basis Technology Corp. + * Copyright 2019-2021 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -35,6 +35,8 @@ import org.sleuthkit.autopsy.discovery.search.FileSearch; import org.sleuthkit.autopsy.discovery.search.DiscoveryException; import org.sleuthkit.autopsy.discovery.search.DomainSearch; import org.sleuthkit.autopsy.discovery.search.ResultsSorter; +import org.sleuthkit.autopsy.discovery.search.SearchCancellationException; +import org.sleuthkit.autopsy.discovery.search.SearchContext; import org.sleuthkit.autopsy.discovery.search.SearchData; /** @@ -75,23 +77,28 @@ final class SearchWorker extends SwingWorker { protected Void doInBackground() throws Exception { try { // Run the search + SearchContext context = new SwingWorkerSearchContext(this); if (searchType == SearchData.Type.DOMAIN) { DomainSearch domainSearch = new DomainSearch(); results.putAll(domainSearch.getGroupSizes(System.getProperty(USER_NAME_PROPERTY), filters, groupingAttr, groupSortAlgorithm, fileSort, - Case.getCurrentCase().getSleuthkitCase(), centralRepoDb)); + Case.getCurrentCase().getSleuthkitCase(), centralRepoDb, context)); } else { results.putAll(FileSearch.getGroupSizes(System.getProperty(USER_NAME_PROPERTY), filters, groupingAttr, groupSortAlgorithm, fileSort, - Case.getCurrentCase().getSleuthkitCase(), centralRepoDb)); + Case.getCurrentCase().getSleuthkitCase(), centralRepoDb, context)); } } catch (DiscoveryException ex) { - logger.log(Level.SEVERE, "Error running file search test", ex); + logger.log(Level.SEVERE, "Error running file search test.", ex); cancel(true); + } catch (SearchCancellationException ex) { + //search cancellation exceptions should indicate that the user chose to cancell this search + //so would not be a problem but we might be curious what was being done when it was cancelled + logger.log(Level.INFO, "Discovery search was cancelled.", ex); } return null; } diff --git a/Core/src/org/sleuthkit/autopsy/discovery/ui/SwingWorkerSearchContext.java b/Core/src/org/sleuthkit/autopsy/discovery/ui/SwingWorkerSearchContext.java new file mode 100644 index 0000000000..2d51d755f9 --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/discovery/ui/SwingWorkerSearchContext.java @@ -0,0 +1,45 @@ +/* + * Autopsy + * + * Copyright 2021 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.discovery.ui; + +import javax.swing.SwingWorker; +import org.sleuthkit.autopsy.discovery.search.SearchContext; + +/** + * Implementation of SearchContext for searches being performed in the + * background thread of a SwingWorker. + */ +class SwingWorkerSearchContext implements SearchContext { + + private final SwingWorker searchWorker; + + /** + * Construct a new SwingWorkerSearchContext. + * + * @param worker The SwingWorker the search is being performed in. + */ + SwingWorkerSearchContext(SwingWorker worker) { + searchWorker = worker; + } + + @Override + public boolean searchIsCancelled() { + return searchWorker.isCancelled(); + } +} diff --git a/Core/src/org/sleuthkit/autopsy/filesearch/DataSourceFilter.java b/Core/src/org/sleuthkit/autopsy/filesearch/DataSourceFilter.java index fd0a232dcf..1d0063a8ff 100755 --- a/Core/src/org/sleuthkit/autopsy/filesearch/DataSourceFilter.java +++ b/Core/src/org/sleuthkit/autopsy/filesearch/DataSourceFilter.java @@ -48,6 +48,15 @@ class DataSourceFilter extends AbstractFileSearchFilter { return this.getComponent().isSelected(); } + /** + * Set the data source filter to select the specified data source initially. + * + * @param dataSourceId - The data source to select. + */ + void setSelectedDataSource(long dataSourceId) { + this.getComponent().setDataSourceSelected(dataSourceId); + } + /** * Reset the data source filter to be up to date with the current case. */ diff --git a/Core/src/org/sleuthkit/autopsy/filesearch/DataSourcePanel.java b/Core/src/org/sleuthkit/autopsy/filesearch/DataSourcePanel.java index 7b19d4d5c8..6fc610b224 100755 --- a/Core/src/org/sleuthkit/autopsy/filesearch/DataSourcePanel.java +++ b/Core/src/org/sleuthkit/autopsy/filesearch/DataSourcePanel.java @@ -1,7 +1,7 @@ /* * Autopsy Forensic Browser * - * Copyright 2018 Basis Technology Corp. + * Copyright 2018-2021 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -152,6 +152,19 @@ public class DataSourcePanel extends javax.swing.JPanel { this.dataSourceNoteLabel.setEnabled(enabled); } + /** + * Set the data source initially selected in this filter. + * + * @param dataSourceId - The object ID of the data source which will be + * selected. + */ + void setDataSourceSelected(long dataSourceId) { + this.dataSourceCheckBox.setSelected(true); + setComponentsEnabled(); + String dataSourceName = dataSourceMap.get(dataSourceId); + dataSourceList.setSelectedValue(dataSourceName, true); + } + /** * This method is called from within the constructor to initialize the form. * WARNING: Do NOT modify this code. The content of this method is always diff --git a/Core/src/org/sleuthkit/autopsy/filesearch/FileSearchAction.java b/Core/src/org/sleuthkit/autopsy/filesearch/FileSearchAction.java index fb78ee8e74..11d8f94561 100644 --- a/Core/src/org/sleuthkit/autopsy/filesearch/FileSearchAction.java +++ b/Core/src/org/sleuthkit/autopsy/filesearch/FileSearchAction.java @@ -32,6 +32,7 @@ final class FileSearchAction extends CallableSystemAction implements FileSearchP private static final long serialVersionUID = 1L; private static FileSearchAction instance = null; private static FileSearchDialog searchDialog; + private static Long selectedDataSourceId; FileSearchAction() { super(); @@ -39,7 +40,7 @@ final class FileSearchAction extends CallableSystemAction implements FileSearchP Case.addEventTypeSubscriber(EnumSet.of(Case.Events.CURRENT_CASE), (PropertyChangeEvent evt) -> { if (evt.getPropertyName().equals(Case.Events.CURRENT_CASE.toString())) { setEnabled(evt.getNewValue() != null); - if (searchDialog != null && evt.getNewValue() != null){ + if (searchDialog != null && evt.getNewValue() != null) { searchDialog.resetCaseDependentFilters(); } } @@ -57,7 +58,9 @@ final class FileSearchAction extends CallableSystemAction implements FileSearchP public void actionPerformed(ActionEvent e) { if (searchDialog == null) { searchDialog = new FileSearchDialog(); - } + } + //Preserve whatever the previously selected data source was + selectedDataSourceId = null; searchDialog.setVisible(true); } @@ -66,6 +69,8 @@ final class FileSearchAction extends CallableSystemAction implements FileSearchP if (searchDialog == null) { searchDialog = new FileSearchDialog(); } + // + searchDialog.setSelectedDataSourceFilter(selectedDataSourceId); searchDialog.setVisible(true); } @@ -85,7 +90,15 @@ final class FileSearchAction extends CallableSystemAction implements FileSearchP } @Override - public void showDialog() { + public void showDialog(Long dataSourceId) { + selectedDataSourceId = dataSourceId; performAction(); + + } + + @Override + @Deprecated + public void showDialog() { + showDialog(null); } } diff --git a/Core/src/org/sleuthkit/autopsy/filesearch/FileSearchDialog.java b/Core/src/org/sleuthkit/autopsy/filesearch/FileSearchDialog.java index 01217f7008..a98e531700 100644 --- a/Core/src/org/sleuthkit/autopsy/filesearch/FileSearchDialog.java +++ b/Core/src/org/sleuthkit/autopsy/filesearch/FileSearchDialog.java @@ -1,7 +1,7 @@ /* * Autopsy Forensic Browser * - * Copyright 2011-2018 Basis Technology Corp. + * Copyright 2011-2021 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -29,6 +29,8 @@ import org.openide.windows.WindowManager; @SuppressWarnings("PMD.SingularField") // UI widgets cause lots of false positives final class FileSearchDialog extends javax.swing.JDialog { + private static final long serialVersionUID = 1L; + /** * Creates new form FileSearchDialog */ @@ -48,6 +50,15 @@ final class FileSearchDialog extends javax.swing.JDialog { }); } + /** + * Set the data source filter to select the specified data source initially. + * + * @param dataSourceId - The data source to select. + */ + void setSelectedDataSourceFilter(long dataSourceId) { + fileSearchPanel1.setDataSourceFilter(dataSourceId); + } + /** * Reset the filters which are populated with options based on the contents * of the current case. diff --git a/Core/src/org/sleuthkit/autopsy/filesearch/FileSearchPanel.java b/Core/src/org/sleuthkit/autopsy/filesearch/FileSearchPanel.java index 4775041b1f..14b1578076 100644 --- a/Core/src/org/sleuthkit/autopsy/filesearch/FileSearchPanel.java +++ b/Core/src/org/sleuthkit/autopsy/filesearch/FileSearchPanel.java @@ -57,13 +57,13 @@ import org.sleuthkit.datamodel.TskCoreException; */ @SuppressWarnings("PMD.SingularField") // UI widgets cause lots of false positives class FileSearchPanel extends javax.swing.JPanel { - + private static final Logger logger = Logger.getLogger(FileSearchPanel.class.getName()); private static final long serialVersionUID = 1L; private final List filters = new ArrayList<>(); private static int resultWindowCount = 0; //keep track of result windows so they get unique names - private static MimeTypeFilter mimeTypeFilter = new MimeTypeFilter(); - private static DataSourceFilter dataSourceFilter = new DataSourceFilter(); + private static final MimeTypeFilter mimeTypeFilter = new MimeTypeFilter(); + private static final DataSourceFilter dataSourceFilter = new DataSourceFilter(); private static final String EMPTY_WHERE_CLAUSE = NbBundle.getMessage(DateSearchFilter.class, "FileSearchPanel.emptyWhereClause.text"); private static SwingWorker searchWorker = null; @@ -106,7 +106,6 @@ class FileSearchPanel extends javax.swing.JPanel { DateSearchFilter dateFilter = new DateSearchFilter(); KnownStatusSearchFilter knowStatusFilter = new KnownStatusSearchFilter(); HashSearchFilter hashFilter = new HashSearchFilter(); - panel2.add(new FilterArea(NbBundle.getMessage(this.getClass(), "FileSearchPanel.filterTitle.name"), nameFilter)); panel3.add(new FilterArea(NbBundle.getMessage(this.getClass(), "FileSearchPanel.filterTitle.metadata"), sizeFilter)); @@ -149,6 +148,15 @@ class FileSearchPanel extends javax.swing.JPanel { searchButton.setEnabled(isValidSearch()); } + /** + * Set the data source filter to select the specified data source initially. + * + * @param dataSourceId - The data source to select. + */ + void setDataSourceFilter(long dataSourceId) { + dataSourceFilter.setSelectedDataSource(dataSourceId); + } + /** * @return true if any of the filters in the panel are enabled (checked) */ @@ -334,7 +342,7 @@ class FileSearchPanel extends javax.swing.JPanel { return enabledFilters; } - + /** * Reset the filters which are populated with options based on the contents * of the current case. diff --git a/Core/src/org/sleuthkit/autopsy/filesearch/MimeTypePanel.java b/Core/src/org/sleuthkit/autopsy/filesearch/MimeTypePanel.java index 676ef4188d..3128f7982f 100644 --- a/Core/src/org/sleuthkit/autopsy/filesearch/MimeTypePanel.java +++ b/Core/src/org/sleuthkit/autopsy/filesearch/MimeTypePanel.java @@ -1,7 +1,7 @@ /* * Autopsy Forensic Browser * - * Copyright 2011-2018 Basis Technology Corp. + * Copyright 2011-2021 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/Core/src/org/sleuthkit/autopsy/ingest/Bundle.properties-MERGED b/Core/src/org/sleuthkit/autopsy/ingest/Bundle.properties-MERGED index 9c1a1bfa5c..11fbd0a9d8 100755 --- a/Core/src/org/sleuthkit/autopsy/ingest/Bundle.properties-MERGED +++ b/Core/src/org/sleuthkit/autopsy/ingest/Bundle.properties-MERGED @@ -115,7 +115,7 @@ IngestJobSettingsPanel.jButtonSelectAll.text=Select All IngestJobSettingsPanel.jButtonDeselectAll.text=Deselect All IngestManager.cancellingIngest.msgDlg.text=Cancelling all currently running ingest jobs IngestManager.serviceIsDown.msgDlg.text={0} is down -ProfilePanel.messages.profileNameContainsIllegalCharacter=Profile name contains an illegal character +ProfilePanel.messages.profileNameContainsIllegalCharacter=Profile name contains an illegal character. Only \nletters, digits, and underscore characters are allowed. ProfilePanel.messages.profilesMustBeNamed=Ingest profile must be named. ProfilePanel.newProfileText=NewEmptyProfile ProfilePanel.profileDescLabel.text=Description: diff --git a/Core/src/org/sleuthkit/autopsy/ingest/Bundle_ja.properties b/Core/src/org/sleuthkit/autopsy/ingest/Bundle_ja.properties index 1456e6922a..2b0efb2f0b 100644 --- a/Core/src/org/sleuthkit/autopsy/ingest/Bundle_ja.properties +++ b/Core/src/org/sleuthkit/autopsy/ingest/Bundle_ja.properties @@ -135,7 +135,6 @@ ModuleTableModel.colName.module=\u30e2\u30b8\u30e5\u30fc\u30eb OpenIDE-Module-Name=\u30a4\u30f3\u30b8\u30a7\u30b9\u30c8 OptionsCategory_Keywords_IngestOptions=\u30a4\u30f3\u30b8\u30a7\u30b9\u30c8 OptionsCategory_Name_IngestOptions=\u30a4\u30f3\u30b8\u30a7\u30b9\u30c8 -ProfilePanel.messages.profileNameContainsIllegalCharacter=\u30d7\u30ed\u30d5\u30a1\u30a4\u30eb\u540d\u306b\u4e0d\u6b63\u306a\u6587\u5b57\u304c\u542b\u307e\u308c\u3066\u3044\u307e\u3059 ProfilePanel.messages.profilesMustBeNamed=\u30a4\u30f3\u30b8\u30a7\u30b9\u30c8\u30d7\u30ed\u30d5\u30a1\u30a4\u30eb\u306b\u540d\u524d\u3092\u4ed8\u3051\u308b\u5fc5\u8981\u304c\u3042\u308a\u307e\u3059\u3002 ProfilePanel.newProfileText=NewEmptyProfile ProfilePanel.profileDescLabel.text=\u8aac\u660e\: diff --git a/Core/src/org/sleuthkit/autopsy/ingest/ProfilePanel.java b/Core/src/org/sleuthkit/autopsy/ingest/ProfilePanel.java index dc145a6824..5efbb1bd4e 100644 --- a/Core/src/org/sleuthkit/autopsy/ingest/ProfilePanel.java +++ b/Core/src/org/sleuthkit/autopsy/ingest/ProfilePanel.java @@ -1,7 +1,7 @@ /* * Autopsy Forensic Browser * - * Copyright 2011-2018 Basis Technology Corp. + * Copyright 2011-2021 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -19,10 +19,6 @@ package org.sleuthkit.autopsy.ingest; import java.beans.PropertyChangeListener; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collections; -import java.util.List; import org.openide.DialogDisplayer; import org.openide.NotifyDescriptor; import org.openide.util.NbBundle; @@ -39,13 +35,12 @@ class ProfilePanel extends IngestModuleGlobalSettingsPanel { "ProfilePanel.profileNameLabel.text=Profile Name:", "ProfilePanel.newProfileText=NewEmptyProfile", "ProfilePanel.messages.profilesMustBeNamed=Ingest profile must be named.", - "ProfilePanel.messages.profileNameContainsIllegalCharacter=Profile name contains an illegal character"}) + "ProfilePanel.messages.profileNameContainsIllegalCharacter=Profile name contains an illegal character. Only \nletters, digits, and underscore characters are allowed."}) private final IngestJobSettingsPanel ingestSettingsPanel; private final IngestJobSettings settings; private IngestProfile profile; private final static String NEW_PROFILE_NAME = NbBundle.getMessage(ProfilePanel.class, "ProfilePanel.newProfileText"); - private static final List ILLEGAL_NAME_CHARS = Collections.unmodifiableList(new ArrayList<>(Arrays.asList("\\", "/", ":", "*", "?", "\"", "<", ">"))); /** * Creates new form ProfilePanel @@ -231,8 +226,12 @@ class ProfilePanel extends IngestModuleGlobalSettingsPanel { /** * Save a new or edited profile. */ - void store() { + boolean store() { + if (!isValidDefinition(false)) { + return false; + } saveSettings(); + return true; } void load() { @@ -240,41 +239,33 @@ class ProfilePanel extends IngestModuleGlobalSettingsPanel { /** * Checks that information entered constitutes a valid ingest profile. + * + * @param dispayWarnings boolean flag whether to display warnings if an error occurred. * * @return true for valid, false for invalid. */ - boolean isValidDefinition() { - if (getProfileName().isEmpty()) { - NotifyDescriptor notifyDesc = new NotifyDescriptor.Message( - NbBundle.getMessage(ProfilePanel.class, "ProfilePanel.messages.profilesMustBeNamed"), - NotifyDescriptor.WARNING_MESSAGE); - DialogDisplayer.getDefault().notify(notifyDesc); - return false; - } - if (!containsOnlyLegalChars(getProfileName(), ILLEGAL_NAME_CHARS)) { - NotifyDescriptor notifyDesc = new NotifyDescriptor.Message( - NbBundle.getMessage(ProfilePanel.class, "ProfilePanel.messages.profileNameContainsIllegalCharacter"), - NotifyDescriptor.WARNING_MESSAGE); - DialogDisplayer.getDefault().notify(notifyDesc); - return false; - } - return true; - } - - /** - * Checks an input string for the use of illegal characters. - * - * @param toBeChecked The input string. - * @param illegalChars The characters deemed to be illegal. - * - * @return True if the string does not contain illegal characters, false - * otherwise. - */ - private static boolean containsOnlyLegalChars(String toBeChecked, List illegalChars) { - for (String illegalChar : illegalChars) { - if (toBeChecked.contains(illegalChar)) { - return false; + boolean isValidDefinition(boolean dispayWarnings) { + String profileName = getProfileName(); + if (profileName.isEmpty()) { + if (dispayWarnings) { + NotifyDescriptor notifyDesc = new NotifyDescriptor.Message( + NbBundle.getMessage(ProfilePanel.class, "ProfilePanel.messages.profilesMustBeNamed"), + NotifyDescriptor.WARNING_MESSAGE); + DialogDisplayer.getDefault().notify(notifyDesc); } + return false; + } + + // check if the name contains illegal characters + String sanitizedName = profileName.replaceAll("[^A-Za-z0-9_]", ""); + if (!(profileName.equals(sanitizedName))) { + if (dispayWarnings) { + NotifyDescriptor notifyDesc = new NotifyDescriptor.Message( + NbBundle.getMessage(ProfilePanel.class, "ProfilePanel.messages.profileNameContainsIllegalCharacter"), + NotifyDescriptor.WARNING_MESSAGE); + DialogDisplayer.getDefault().notify(notifyDesc); + } + return false; } return true; } diff --git a/Core/src/org/sleuthkit/autopsy/ingest/ProfileSettingsPanel.java b/Core/src/org/sleuthkit/autopsy/ingest/ProfileSettingsPanel.java index c841431171..300cc613d8 100644 --- a/Core/src/org/sleuthkit/autopsy/ingest/ProfileSettingsPanel.java +++ b/Core/src/org/sleuthkit/autopsy/ingest/ProfileSettingsPanel.java @@ -416,7 +416,7 @@ class ProfileSettingsPanel extends IngestModuleGlobalSettingsPanel implements Op do { option = JOptionPane.CANCEL_OPTION; dialog.display(panel); - } while (option == JOptionPane.OK_OPTION && !panel.isValidDefinition()); + } while (option == JOptionPane.OK_OPTION && !panel.isValidDefinition(true)); if (option == JOptionPane.OK_OPTION) { diff --git a/Core/src/org/sleuthkit/autopsy/modules/embeddedfileextractor/Bundle.properties b/Core/src/org/sleuthkit/autopsy/modules/embeddedfileextractor/Bundle.properties index 2f8baa63cc..5e655eb928 100644 --- a/Core/src/org/sleuthkit/autopsy/modules/embeddedfileextractor/Bundle.properties +++ b/Core/src/org/sleuthkit/autopsy/modules/embeddedfileextractor/Bundle.properties @@ -10,7 +10,6 @@ OpenIDE-Module-Short-Description=Embedded File Extraction Ingest Module EmbeddedFileExtractorIngestModule.SevenZipContentReadStream.seek.exception.invalidOrigin=Invalid seek origin: {0} EmbeddedFileExtractorIngestModule.SevenZipContentReadStream.read.exception.errReadStream=Error reading content stream. EmbeddedFileExtractorIngestModule.ArchiveExtractor.encryptionFileLevel=Content-only Encryption (Archive File) -EmbeddedFileExtractorIngestModule.ArchiveExtractor.encryptionFull=Full Encryption (Archive File) EmbeddedFileExtractorIngestModule.ArchiveExtractor.init.errInitModule.details=Error initializing output dir: {0}: {1} EmbeddedFileExtractorIngestModule.ArchiveExtractor.isZipBombCheck.warnMsg=Possible ZIP bomb detected in archive: {0}, item: {1} EmbeddedFileExtractorIngestModule.ArchiveExtractor.isZipBombCheck.warnDetails=Compression ratio is {0}, skipping items in {1}. diff --git a/Core/src/org/sleuthkit/autopsy/modules/embeddedfileextractor/Bundle.properties-MERGED b/Core/src/org/sleuthkit/autopsy/modules/embeddedfileextractor/Bundle.properties-MERGED index efee783e8f..6f7251676d 100755 --- a/Core/src/org/sleuthkit/autopsy/modules/embeddedfileextractor/Bundle.properties-MERGED +++ b/Core/src/org/sleuthkit/autopsy/modules/embeddedfileextractor/Bundle.properties-MERGED @@ -23,7 +23,6 @@ OpenIDE-Module-Short-Description=Embedded File Extraction Ingest Module EmbeddedFileExtractorIngestModule.SevenZipContentReadStream.seek.exception.invalidOrigin=Invalid seek origin: {0} EmbeddedFileExtractorIngestModule.SevenZipContentReadStream.read.exception.errReadStream=Error reading content stream. EmbeddedFileExtractorIngestModule.ArchiveExtractor.encryptionFileLevel=Content-only Encryption (Archive File) -EmbeddedFileExtractorIngestModule.ArchiveExtractor.encryptionFull=Full Encryption (Archive File) EmbeddedFileExtractorIngestModule.ArchiveExtractor.init.errInitModule.details=Error initializing output dir: {0}: {1} EmbeddedFileExtractorIngestModule.ArchiveExtractor.isZipBombCheck.warnMsg=Possible ZIP bomb detected in archive: {0}, item: {1} EmbeddedFileExtractorIngestModule.ArchiveExtractor.isZipBombCheck.warnDetails=Compression ratio is {0}, skipping items in {1}. diff --git a/Core/src/org/sleuthkit/autopsy/modules/embeddedfileextractor/SevenZipExtractor.java b/Core/src/org/sleuthkit/autopsy/modules/embeddedfileextractor/SevenZipExtractor.java index 05b3fdfeb0..a96de76442 100644 --- a/Core/src/org/sleuthkit/autopsy/modules/embeddedfileextractor/SevenZipExtractor.java +++ b/Core/src/org/sleuthkit/autopsy/modules/embeddedfileextractor/SevenZipExtractor.java @@ -58,6 +58,7 @@ import org.sleuthkit.autopsy.casemodule.services.FileManager; import org.sleuthkit.autopsy.coreutils.FileUtil; import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil; +import org.sleuthkit.autopsy.modules.encryptiondetection.EncryptionDetectionModuleFactory; import org.sleuthkit.autopsy.ingest.IngestJobContext; import org.sleuthkit.autopsy.ingest.IngestMessage; import org.sleuthkit.autopsy.ingest.IngestMonitor; @@ -95,8 +96,7 @@ class SevenZipExtractor { //encryption type strings private static final String ENCRYPTION_FILE_LEVEL = NbBundle.getMessage(EmbeddedFileExtractorIngestModule.class, "EmbeddedFileExtractorIngestModule.ArchiveExtractor.encryptionFileLevel"); - private static final String ENCRYPTION_FULL = NbBundle.getMessage(EmbeddedFileExtractorIngestModule.class, - "EmbeddedFileExtractorIngestModule.ArchiveExtractor.encryptionFull"); + private static final String ENCRYPTION_FULL = EncryptionDetectionModuleFactory.PASSWORD_PROTECT_MESSAGE; //zip bomb detection private static final int MAX_DEPTH = 4; diff --git a/Core/src/org/sleuthkit/autopsy/modules/encryptiondetection/EncryptionDetectionFileIngestModule.java b/Core/src/org/sleuthkit/autopsy/modules/encryptiondetection/EncryptionDetectionFileIngestModule.java index ff773cf8ef..81e7c1877e 100644 --- a/Core/src/org/sleuthkit/autopsy/modules/encryptiondetection/EncryptionDetectionFileIngestModule.java +++ b/Core/src/org/sleuthkit/autopsy/modules/encryptiondetection/EncryptionDetectionFileIngestModule.java @@ -83,7 +83,7 @@ final class EncryptionDetectionFileIngestModule extends FileIngestModuleAdapter private Blackboard blackboard; private IngestJobContext context; private double calculatedEntropy; - + private final double minimumEntropy; private final int minimumFileSize; private final boolean fileSizeMultipleEnforced; @@ -119,7 +119,6 @@ final class EncryptionDetectionFileIngestModule extends FileIngestModuleAdapter } @Messages({ - "EncryptionDetectionFileIngestModule.artifactComment.password=Password protection detected.", "EncryptionDetectionFileIngestModule.artifactComment.suspected=Suspected encryption due to high entropy (%f)." }) @Override @@ -160,7 +159,7 @@ final class EncryptionDetectionFileIngestModule extends FileIngestModuleAdapter String.format(Bundle.EncryptionDetectionFileIngestModule_artifactComment_suspected(), calculatedEntropy)); } else if (isFilePasswordProtected(file)) { return flagFile(file, BlackboardArtifact.Type.TSK_ENCRYPTION_DETECTED, Score.SCORE_NOTABLE, - Bundle.EncryptionDetectionFileIngestModule_artifactComment_password()); + EncryptionDetectionModuleFactory.PASSWORD_PROTECT_MESSAGE); } } } catch (ReadContentInputStreamException | SAXException | TikaException | UnsupportedCodecException ex) { diff --git a/Core/src/org/sleuthkit/autopsy/modules/encryptiondetection/EncryptionDetectionModuleFactory.java b/Core/src/org/sleuthkit/autopsy/modules/encryptiondetection/EncryptionDetectionModuleFactory.java index 7a2d486841..7cfff12b07 100755 --- a/Core/src/org/sleuthkit/autopsy/modules/encryptiondetection/EncryptionDetectionModuleFactory.java +++ b/Core/src/org/sleuthkit/autopsy/modules/encryptiondetection/EncryptionDetectionModuleFactory.java @@ -36,15 +36,19 @@ import org.sleuthkit.autopsy.ingest.IngestModuleIngestJobSettingsPanel; @ServiceProvider(service = IngestModuleFactory.class) @Messages({ "EncryptionDetectionFileIngestModule.moduleName.text=Encryption Detection", - "EncryptionDetectionFileIngestModule.getDesc.text=Looks for files with the specified minimum entropy." + "EncryptionDetectionFileIngestModule.getDesc.text=Looks for files with the specified minimum entropy.", + "EncryptionDetectionFileIngestModule.artifactComment.password=Password protection detected.", }) + public class EncryptionDetectionModuleFactory implements IngestModuleFactory { + public static final String PASSWORD_PROTECT_MESSAGE = Bundle.EncryptionDetectionFileIngestModule_artifactComment_password(); + @Override public String getModuleDisplayName() { return getModuleName(); } - + /** * Get the name of the module. * diff --git a/Core/src/org/sleuthkit/autopsy/datasourcesummary/uiutils/BarChartExport.java b/Core/src/org/sleuthkit/autopsy/report/modules/datasourcesummaryexport/BarChartExport.java similarity index 93% rename from Core/src/org/sleuthkit/autopsy/datasourcesummary/uiutils/BarChartExport.java rename to Core/src/org/sleuthkit/autopsy/report/modules/datasourcesummaryexport/BarChartExport.java index 15a93092cc..330aac4cff 100644 --- a/Core/src/org/sleuthkit/autopsy/datasourcesummary/uiutils/BarChartExport.java +++ b/Core/src/org/sleuthkit/autopsy/report/modules/datasourcesummaryexport/BarChartExport.java @@ -16,7 +16,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.sleuthkit.autopsy.datasourcesummary.uiutils; +package org.sleuthkit.autopsy.report.modules.datasourcesummaryexport; import java.awt.Color; import java.nio.ByteBuffer; @@ -49,15 +49,16 @@ import org.apache.poi.xssf.usermodel.XSSFChart; import org.apache.poi.xssf.usermodel.XSSFClientAnchor; import org.apache.poi.xssf.usermodel.XSSFDrawing; import org.apache.poi.xssf.usermodel.XSSFSheet; -import org.sleuthkit.autopsy.datasourcesummary.uiutils.ExcelExport.ExcelExportException; -import org.sleuthkit.autopsy.datasourcesummary.uiutils.ExcelExport.ExcelSheetExport; -import org.sleuthkit.autopsy.datasourcesummary.uiutils.ExcelSpecialFormatExport.ExcelItemExportable; -import org.sleuthkit.autopsy.datasourcesummary.uiutils.ExcelSpecialFormatExport.ItemDimensions; +import org.sleuthkit.autopsy.datasourcesummary.uiutils.BarChartSeries; +import org.sleuthkit.autopsy.report.modules.datasourcesummaryexport.ExcelExport.ExcelExportException; +import org.sleuthkit.autopsy.report.modules.datasourcesummaryexport.ExcelExport.ExcelSheetExport; +import org.sleuthkit.autopsy.report.modules.datasourcesummaryexport.ExcelSpecialFormatExport.ExcelItemExportable; +import org.sleuthkit.autopsy.report.modules.datasourcesummaryexport.ExcelSpecialFormatExport.ItemDimensions; /** * Class that creates an excel stacked bar chart along with data table. */ -public class BarChartExport implements ExcelItemExportable, ExcelSheetExport { +class BarChartExport implements ExcelItemExportable, ExcelSheetExport { /** * Creates an excel table model to be written to an excel sheet and used as @@ -70,7 +71,7 @@ public class BarChartExport implements ExcelItemExportable, ExcelSheetExport { * @return An excel table export to be used as the data source for the chart * in the excel document. */ - private static ExcelTableExport>, ? extends ExcelCellModel> getTableModel( + private static ExcelTableExport>, ? extends CellModel> getTableModel( List categories, String keyColumnHeader, String chartTitle) { // get the row keys by finding the series with the largest set of bar items @@ -134,7 +135,7 @@ public class BarChartExport implements ExcelItemExportable, ExcelSheetExport { private static final int DEFAULT_ROW_PADDING = 1; private static final int DEFAULT_COL_OFFSET = 1; - private final ExcelTableExport>, ? extends ExcelCellModel> tableExport; + private final ExcelTableExport>, ? extends CellModel> tableExport; private final int colOffset; private final int rowPadding; private final int colSize; @@ -154,7 +155,7 @@ public class BarChartExport implements ExcelItemExportable, ExcelSheetExport { * @param chartTitle The title for the chart. * @param categories The categories along with data. */ - public BarChartExport(String keyColumnHeader, + BarChartExport(String keyColumnHeader, String valueFormatString, String chartTitle, List categories) { @@ -177,7 +178,7 @@ public class BarChartExport implements ExcelItemExportable, ExcelSheetExport { * @param colSize The column size of the chart. * @param rowSize The row size of the chart. */ - public BarChartExport(String keyColumnHeader, String valueFormatString, + BarChartExport(String keyColumnHeader, String valueFormatString, String chartTitle, String sheetName, List categories, int colOffset, int rowPadding, int colSize, int rowSize) { diff --git a/Core/src/org/sleuthkit/autopsy/report/modules/datasourcesummaryexport/Bundle.properties b/Core/src/org/sleuthkit/autopsy/report/modules/datasourcesummaryexport/Bundle.properties new file mode 100755 index 0000000000..6c86626cdb --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/report/modules/datasourcesummaryexport/Bundle.properties @@ -0,0 +1,3 @@ +DataSourceSummaryReport.getName.text=Data Source Summary Report +DataSourceSummaryReport.getDesc.text=Data source summary report in Excel (XLS) format. +DataSourceSummaryReport.endReport.srcModuleName.text=Excel Report \ No newline at end of file diff --git a/Core/src/org/sleuthkit/autopsy/report/modules/datasourcesummaryexport/Bundle.properties-MERGED b/Core/src/org/sleuthkit/autopsy/report/modules/datasourcesummaryexport/Bundle.properties-MERGED new file mode 100755 index 0000000000..1767afc028 --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/report/modules/datasourcesummaryexport/Bundle.properties-MERGED @@ -0,0 +1,123 @@ +DataSourceSummaryReport.error.noDataSources=No data sources selected for report. +DataSourceSummaryReport.error.noOpenCase=No currently open case. +DataSourceSummaryReport.excelFileWriteError=Could not write the KML file. +DataSourceSummaryReport.failedToCompleteReport=Failed to complete report. +DataSourceSummaryReport.getName.text=Data Source Summary Report +DataSourceSummaryReport.getDesc.text=Data source summary report in Excel (XLS) format. +DataSourceSummaryReport.endReport.srcModuleName.text=Excel Report +# {0} - sheetNumber +ExcelExport_writeExcel_noSheetName=Sheet {0} +ExcelExportAction_exportToXLSX_beginExport=Beginning Export... +ExcelExportAction_exportToXLSX_gatheringAnalysisData=Fetching Analysis Data +ExcelExportAction_exportToXLSX_gatheringContainerData=Fetching Container & Image Data +ExcelExportAction_exportToXLSX_gatheringFileData=Fetching File and MIME Type Data +ExcelExportAction_exportToXLSX_gatheringGeoData=Fetching Geolocation Data +ExcelExportAction_exportToXLSX_gatheringIngestData=Fetching Ingest History Data +ExcelExportAction_exportToXLSX_gatheringPastData=Fetching Historical Data +ExcelExportAction_exportToXLSX_gatheringRecentActivityData=Fetching Recent Activity Data +ExcelExportAction_exportToXLSX_gatheringTimelineData=Fetching Timeline Data +ExcelExportAction_exportToXLSX_gatheringUserData=Fetching User Activity Data +ExcelExportAction_exportToXLSX_writingToFile=Writing to File... +ExcelExportAction_getXLSXPath_directory=DataSourceSummary +ExcelExportAction_moduleName=Data Source Summary +ExportAnalysisResults_countColumn_title=Count +ExportAnalysisResults_hashsetHits_tabName=Hashset Hits +ExportAnalysisResults_interestingItemHits_tabName=Interesting Item Hits +ExportAnalysisResults_keyColumn_title=Name +ExportAnalysisResults_keywordHits_tabName=Keyword Hits +ExportAnalysisResults_keywordSearchModuleName=Keyword Search +ExportContainerInfo_export_acquisitionDetails=Acquisition Details: +ExportContainerInfo_export_deviceId=Device ID: +ExportContainerInfo_export_displayName=Display Name: +ExportContainerInfo_export_filePaths=File Paths: +ExportContainerInfo_export_imageType=Image Type: +ExportContainerInfo_export_md5=MD5: +ExportContainerInfo_export_originalName=Name: +ExportContainerInfo_export_sectorSize=Sector Size: +ExportContainerInfo_export_sha1=SHA1: +ExportContainerInfo_export_sha256=SHA256: +ExportContainerInfo_export_size=Size: +ExportContainerInfo_export_timeZone=Time Zone: +ExportContainerInfo_export_unallocatedSize=Unallocated Space: +ExportContainerInfo_setFieldsForNonImageDataSource_na=N/A +ExportContainerInfo_tabName=Container +ExportGeolocation_cityColumn_title=Closest City +ExportGeolocation_countColumn_title=Count +ExportGeolocation_mostCommon_tabName=Most Common Cities +ExportGeolocation_mostRecent_tabName=Most Recent Cities +ExportGeolocation_unknownRow_title=Unknown +ExportIngestHistory_endTimeColumn=End Time +ExportIngestHistory_ingestStatusTimeColumn=Ingest Status +ExportIngestHistory_moduleNameTimeColumn=Module Name +ExportIngestHistory_sheetName=Ingest History +ExportIngestHistory_startTimeColumn=Start Time +ExportIngestHistory_versionColumn=Module Version +ExportPastCases_caseColumn_title=Case +ExportPastCases_countColumn_title=Count +ExportPastCases_notableFileTable_tabName=Cases with Common Notable +ExportPastCases_sameIdsTable_tabName=Past Cases with the Same Devices +ExportRecentFiles_attachmentsTable_tabName=Recent Attachments +ExportRecentFiles_col_head_date=Date +ExportRecentFiles_col_header_domain=Domain +ExportRecentFiles_col_header_path=Path +ExportRecentFiles_col_header_sender=Sender +ExportRecentFiles_docsTable_tabName=Recently Opened Documents +ExportRecentFiles_downloadsTable_tabName=Recently Downloads +ExportTypes_artifactsTypesPieChart_title=Artifact Types +ExportTypes_excelTabName=Types +ExportTypes_fileMimeTypesChart_audio_title=Audio +ExportTypes_fileMimeTypesChart_documents_title=Documents +ExportTypes_fileMimeTypesChart_executables_title=Executables +ExportTypes_fileMimeTypesChart_images_title=Images +ExportTypes_fileMimeTypesChart_notAnalyzed_title=Not Analyzed +ExportTypes_fileMimeTypesChart_other_title=Other +ExportTypes_fileMimeTypesChart_title=File Types +ExportTypes_fileMimeTypesChart_unknown_title=Unknown +ExportTypes_fileMimeTypesChart_valueLabel=Count +ExportTypes_fileMimeTypesChart_videos_title=Videos +ExportTypes_filesByCategoryTable_allocatedRow_title=Allocated Files +ExportTypes_filesByCategoryTable_directoryRow_title=Directories +ExportTypes_filesByCategoryTable_slackRow_title=Slack Files +ExportTypes_filesByCategoryTable_unallocatedRow_title=Unallocated Files +ExportTypes_osLabel_title=OS +ExportTypes_sizeLabel_title=Size +ExportTypes_usageLabel_title=Usage +ExportUserActivity_noDataExists=No communication data exists +ExportUserActivity_tab_title=User Activity +ExportUserActivity_TopAccountTableModel_accountType_header=Account Type +ExportUserActivity_TopAccountTableModel_lastAccess_header=Last Accessed +ExportUserActivity_TopAccountTableModel_tabName=Recent Account Types Used +ExportUserActivity_TopDeviceAttachedTableModel_dateAccessed_header=Last Accessed +ExportUserActivity_TopDeviceAttachedTableModel_deviceId_header=Device Id +ExportUserActivity_TopDeviceAttachedTableModel_makeModel_header=Make and Model +ExportUserActivity_TopDeviceAttachedTableModel_tabName=Recent Devices Attached +ExportUserActivity_TopDomainsTableModel_count_header=Visits +ExportUserActivity_TopDomainsTableModel_domain_header=Domain +ExportUserActivity_TopDomainsTableModel_lastAccess_header=Last Accessed +ExportUserActivity_TopDomainsTableModel_tabName=Recent Domains +ExportUserActivity_TopProgramsTableModel_count_header=Run Times +ExportUserActivity_TopProgramsTableModel_folder_header=Folder +ExportUserActivity_TopProgramsTableModel_lastrun_header=Last Run +ExportUserActivity_TopProgramsTableModel_name_header=Program +ExportUserActivity_TopProgramsTableModel_tabName=Recent Programs +ExportUserActivity_TopWebSearchTableModel_dateAccessed_header=Date Accessed +ExportUserActivity_TopWebSearchTableModel_searchString_header=Search String +ExportUserActivity_TopWebSearchTableModel_tabName=Recent Web Searches +ExportUserActivity_TopWebSearchTableModel_translatedResult_header=Translated +SizeRepresentationUtil_units_bytes=bytes +SizeRepresentationUtil_units_gigabytes=GB +SizeRepresentationUtil_units_kilobytes=KB +SizeRepresentationUtil_units_megabytes=MB +SizeRepresentationUtil_units_petabytes=PB +SizeRepresentationUtil_units_terabytes=TB +TimelinePanel_earliestLabel_title=Earliest +TimelinePanel_getExports_activityRange=Activity Range +TimelinePanel_getExports_chartName=Last 30 Days +TimelinePanel_getExports_dateColumnHeader=Date +TimelinePanel_getExports_earliest=Earliest: +TimelinePanel_getExports_latest=Latest: +TimelinePanel_getExports_sheetName=Timeline +TimelinePanel_latestLabel_title=Latest +TimlinePanel_last30DaysChart_artifactEvts_title=Result Events +TimlinePanel_last30DaysChart_fileEvts_title=File Events +TimlinePanel_last30DaysChart_title=Last 30 Days diff --git a/Core/src/org/sleuthkit/autopsy/report/modules/datasourcesummaryexport/CellModel.java b/Core/src/org/sleuthkit/autopsy/report/modules/datasourcesummaryexport/CellModel.java new file mode 100755 index 0000000000..ce072525ea --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/report/modules/datasourcesummaryexport/CellModel.java @@ -0,0 +1,91 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2021 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.report.modules.datasourcesummaryexport; + +import javax.swing.JLabel; +import org.apache.poi.ss.usermodel.HorizontalAlignment; + +/** + * Basic interface for a cell model. + */ +interface CellModel { + + /** + * Describes the horizontal alignment. + */ + enum HorizontalAlign { + LEFT(JLabel.LEFT, HorizontalAlignment.LEFT), + CENTER(JLabel.CENTER, HorizontalAlignment.CENTER), + RIGHT(JLabel.RIGHT, HorizontalAlignment.RIGHT); + + private final int jlabelAlignment; + private final HorizontalAlignment poiAlignment; + + /** + * Constructor for a HorizontalAlign enum. + * + * @param jlabelAlignment The corresponding JLabel horizontal alignment + * number. + * @param poiAlignment Horizontal alignment for Apache POI. + */ + HorizontalAlign(int jlabelAlignment, HorizontalAlignment poiAlignment) { + this.jlabelAlignment = jlabelAlignment; + this.poiAlignment = poiAlignment; + } + + /** + * @return The corresponding JLabel horizontal alignment (i.e. + * JLabel.LEFT). + */ + int getJLabelAlignment() { + return this.jlabelAlignment; + } + + /** + * @return Horizontal alignment for Apache POI. + */ + HorizontalAlignment getPoiAlignment() { + return poiAlignment; + } + } + + /** + * @return The root data object. + */ + Object getData(); + + /** + * @return The text to be shown in the cell. + */ + default String getText() { + Object data = getData(); + return (data == null) ? null : data.toString(); + } + + /** + * @return The horizontal alignment for the text in the cell. + */ + HorizontalAlign getHorizontalAlignment(); + + /** + * @return The format string to be used with Apache POI during excel + * export or null if none necessary. + */ + String getExcelFormatString(); +} diff --git a/Core/src/org/sleuthkit/autopsy/report/modules/datasourcesummaryexport/ColumnModel.java b/Core/src/org/sleuthkit/autopsy/report/modules/datasourcesummaryexport/ColumnModel.java new file mode 100755 index 0000000000..9c68de3992 --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/report/modules/datasourcesummaryexport/ColumnModel.java @@ -0,0 +1,80 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2021 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.report.modules.datasourcesummaryexport; + +import java.util.function.Function; + +/** + * Describes aspects of a column which can be used with getTableModel or + * getJTablePanel. 'T' represents the object that will represent rows in the + * table. + */ +class ColumnModel { + + private final String headerTitle; + private final Function cellRenderer; + private final Integer width; + + /** + * Constructor for a DataResultColumnModel. + * + * @param headerTitle The title for the column. + * @param cellRenderer The method that generates a CellModel for the column + * based on the data. + */ + ColumnModel(String headerTitle, Function cellRenderer) { + this(headerTitle, cellRenderer, null); + } + + /** + * Constructor for a DataResultColumnModel. + * + * @param headerTitle The title for the column. + * @param cellRenderer The method that generates a CellModel for the column + * based on the data. + * @param width The preferred width of the column. + */ + ColumnModel(String headerTitle, Function cellRenderer, Integer width) { + this.headerTitle = headerTitle; + this.cellRenderer = cellRenderer; + this.width = width; + } + + /** + * @return The title for the column. + */ + String getHeaderTitle() { + return headerTitle; + } + + /** + * @return The method that generates a CellModel for the column based on the + * data. + */ + Function getCellRenderer() { + return cellRenderer; + } + + /** + * @return The preferred width of the column (can be null). + */ + Integer getWidth() { + return width; + } +} diff --git a/Core/src/org/sleuthkit/autopsy/report/modules/datasourcesummaryexport/DataSourceSummaryReport.java b/Core/src/org/sleuthkit/autopsy/report/modules/datasourcesummaryexport/DataSourceSummaryReport.java new file mode 100755 index 0000000000..a52f897897 --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/report/modules/datasourcesummaryexport/DataSourceSummaryReport.java @@ -0,0 +1,162 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2021 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.report.modules.datasourcesummaryexport; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import java.util.logging.Level; +import java.util.stream.Collectors; +import javax.swing.JPanel; +import org.openide.util.NbBundle; +import org.openide.util.lookup.ServiceProvider; +import org.sleuthkit.autopsy.casemodule.Case; +import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; +import org.sleuthkit.autopsy.coreutils.Logger; +import org.sleuthkit.autopsy.report.GeneralReportModule; +import org.sleuthkit.autopsy.report.GeneralReportSettings; +import org.sleuthkit.autopsy.report.ReportProgressPanel; +import org.sleuthkit.datamodel.Content; +import org.sleuthkit.datamodel.DataSource; +import org.sleuthkit.datamodel.TskCoreException; + +/** + * Instances of this class plug in to the reporting infrastructure to provide a + * convenient way to extract data source summary information into Excel. + */ +@ServiceProvider(service = GeneralReportModule.class) +public class DataSourceSummaryReport implements GeneralReportModule { + + private static final Logger logger = Logger.getLogger(DataSourceSummaryReport.class.getName()); + private static DataSourceSummaryReport instance; + + // Get the default instance of this report + public static synchronized DataSourceSummaryReport getDefault() { + if (instance == null) { + instance = new DataSourceSummaryReport(); + } + return instance; + } + + public DataSourceSummaryReport() { + } + + + @Override + public String getName() { + String name = NbBundle.getMessage(this.getClass(), "DataSourceSummaryReport.getName.text"); + return name; + } + + @Override + public String getRelativeFilePath() { + return ""; + } + + @Override + public String getDescription() { + String desc = NbBundle.getMessage(this.getClass(), "DataSourceSummaryReport.getDesc.text"); + return desc; + } + + @Override + public JPanel getConfigurationPanel() { + return null; + } + + @Override + public boolean supportsDataSourceSelection() { + return true; + } + + @NbBundle.Messages({ + "DataSourceSummaryReport.error.noOpenCase=No currently open case.", + "DataSourceSummaryReport.error.noDataSources=No data sources selected for report.", + "DataSourceSummaryReport.failedToCompleteReport=Failed to complete report.", + "DataSourceSummaryReport.excelFileWriteError=Could not write the KML file.",}) + @Override + public void generateReport(GeneralReportSettings settings, ReportProgressPanel progressPanel) { + progressPanel.start(); + Case currentCase; + try { + currentCase = Case.getCurrentCaseThrows(); + } catch (NoCurrentCaseException ex) { + progressPanel.complete(ReportProgressPanel.ReportStatus.ERROR, Bundle.DataSourceSummaryReport_error_noOpenCase()); + logger.log(Level.SEVERE, "Exception while getting open case.", ex); //NON-NLS + return; + } + + String errorMessage = ""; + ReportProgressPanel.ReportStatus result = ReportProgressPanel.ReportStatus.COMPLETE; + List selectedDataSources = new ArrayList<>(); + if(settings.getSelectedDataSources() == null) { + // Process all data sources if the list is null. + try { + selectedDataSources = currentCase.getDataSources(); + List dsIDs = selectedDataSources + .stream() + .map(Content::getId) + .collect(Collectors.toList()); + settings.setSelectedDataSources(dsIDs); + } catch (TskCoreException ex) { + result = ReportProgressPanel.ReportStatus.ERROR; + errorMessage = Bundle.DataSourceSummaryReport_failedToCompleteReport(); + logger.log(Level.SEVERE, "Could not get the datasources from the case", ex); + progressPanel.complete(result, errorMessage); + return; + } + } else { + for (Long dsID : settings.getSelectedDataSources()) { + try { + selectedDataSources.add(currentCase.getSleuthkitCase().getContentById(dsID)); + } catch (TskCoreException ex) { + result = ReportProgressPanel.ReportStatus.ERROR; + errorMessage = Bundle.DataSourceSummaryReport_failedToCompleteReport(); + logger.log(Level.SEVERE, "Could not get the datasources from the case", ex); + progressPanel.complete(result, errorMessage); + return; + } + } + } + + if (selectedDataSources.isEmpty()) { + result = ReportProgressPanel.ReportStatus.ERROR; + progressPanel.complete(result, Bundle.DataSourceSummaryReport_error_noDataSources()); + logger.log(Level.SEVERE, "No data sources selected for report."); //NON-NLS + return; + } + + // looop over all selected data sources + for (Content dataSource : selectedDataSources){ + if (dataSource instanceof DataSource) { + try { + new ExcelExportAction().exportToXLSX(progressPanel, (DataSource) dataSource, settings.getReportDirectoryPath()); + } catch (IOException | ExcelExport.ExcelExportException ex) { + errorMessage = Bundle.DataSourceSummaryReport_excelFileWriteError(); + logger.log(Level.SEVERE, errorMessage, ex); //NON-NLS + progressPanel.complete(ReportProgressPanel.ReportStatus.ERROR, errorMessage); + return; + } + } + } + + progressPanel.complete(result, errorMessage); + } + +} diff --git a/Core/src/org/sleuthkit/autopsy/report/modules/datasourcesummaryexport/DefaultCellModel.java b/Core/src/org/sleuthkit/autopsy/report/modules/datasourcesummaryexport/DefaultCellModel.java new file mode 100755 index 0000000000..c2e2b7d085 --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/report/modules/datasourcesummaryexport/DefaultCellModel.java @@ -0,0 +1,111 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2021 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.report.modules.datasourcesummaryexport; + +import java.util.function.Function; +/** + * The default cell model. + */ +class DefaultCellModel implements CellModel { + + private final T data; + private final String text; + private CellModel.HorizontalAlign horizontalAlignment; + private final String excelFormatString; + + /** + * Main constructor. + * + * @param data The data to be displayed in the cell. + */ + DefaultCellModel(T data) { + this(data, null, null); + } + + /** + * Constructor. + * + * @param data The data to be displayed in the cell. + * @param stringConverter The means of converting that data to a string or + * null to use .toString method on object. + */ + DefaultCellModel(T data, Function stringConverter) { + this(data, stringConverter, null); + } + + /** + * Constructor. + * + * @param data The data to be displayed in the cell. + * @param stringConverter The means of converting that data to a string or + * null to use .toString method on object. + * @param excelFormatString The apache poi excel format string to use with + * the data. + * + * NOTE: Only certain data types can be exported. See + * ExcelTableExport.createCell() for types. + */ + DefaultCellModel(T data, Function stringConverter, String excelFormatString) { + this.data = data; + this.excelFormatString = excelFormatString; + + if (stringConverter == null) { + text = this.data == null ? "" : this.data.toString(); + } else { + text = stringConverter.apply(this.data); + } + } + + @Override + public T getData() { + return this.data; + } + + @Override + public String getText() { + return text; + } + + @Override + public HorizontalAlign getHorizontalAlignment() { + return horizontalAlignment; + } + + @Override + public String getExcelFormatString() { + return this.excelFormatString; + } + + /** + * Sets the horizontal alignment for this cell model. + * + * @param alignment The horizontal alignment for the cell model. + * + * @return As a utility, returns this. + */ + DefaultCellModel setHorizontalAlignment(CellModel.HorizontalAlign alignment) { + this.horizontalAlignment = alignment; + return this; + } + + @Override + public String toString() { + return getText(); + } +} diff --git a/Core/src/org/sleuthkit/autopsy/datasourcesummary/uiutils/ExcelExport.java b/Core/src/org/sleuthkit/autopsy/report/modules/datasourcesummaryexport/ExcelExport.java similarity index 91% rename from Core/src/org/sleuthkit/autopsy/datasourcesummary/uiutils/ExcelExport.java rename to Core/src/org/sleuthkit/autopsy/report/modules/datasourcesummaryexport/ExcelExport.java index fab6558c4a..47087dd985 100644 --- a/Core/src/org/sleuthkit/autopsy/datasourcesummary/uiutils/ExcelExport.java +++ b/Core/src/org/sleuthkit/autopsy/report/modules/datasourcesummaryexport/ExcelExport.java @@ -16,7 +16,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.sleuthkit.autopsy.datasourcesummary.uiutils; +package org.sleuthkit.autopsy.report.modules.datasourcesummaryexport; import java.io.File; import java.io.FileOutputStream; @@ -37,24 +37,24 @@ import org.apache.poi.ss.usermodel.Workbook; import org.apache.poi.ss.usermodel.Sheet; import org.apache.poi.xssf.usermodel.XSSFWorkbook; import org.openide.util.NbBundle.Messages; -import org.sleuthkit.autopsy.datasourcesummary.uiutils.CellModel.HorizontalAlign; +import org.sleuthkit.autopsy.report.modules.datasourcesummaryexport.CellModel.HorizontalAlign; /** * Class for handling Excel exporting. */ -public class ExcelExport { +class ExcelExport { /** * Exception thrown in the event of an excel export issue. */ - public static class ExcelExportException extends Exception { + static class ExcelExportException extends Exception { /** * Constructor. * * @param string The message. */ - public ExcelExportException(String string) { + ExcelExportException(String string) { super(string); } @@ -64,7 +64,7 @@ public class ExcelExport { * @param string The message. * @param thrwbl The inner exception. */ - public ExcelExportException(String string, Throwable thrwbl) { + ExcelExportException(String string, Throwable thrwbl) { super(string, thrwbl); } } @@ -153,7 +153,7 @@ public class ExcelExport { /** * Class detailing aspects of the worksheet. */ - public static class WorksheetEnv { + static class WorksheetEnv { private final CellStyle headerStyle; private final Workbook parentWorkbook; @@ -182,7 +182,7 @@ public class ExcelExport { * @param cellStyleKey The key. * @return The cell style representing this key. */ - public CellStyle getCellStyle(CellStyleKey cellStyleKey) { + CellStyle getCellStyle(CellStyleKey cellStyleKey) { return cellStyleCache.computeIfAbsent(cellStyleKey, (pair) -> { CellStyle computed = this.parentWorkbook.createCellStyle(); computed.cloneStyleFrom(cellStyleKey.getCellStyle() == null ? defaultStyle : cellStyleKey.getCellStyle()); @@ -203,7 +203,7 @@ public class ExcelExport { * * @return The cell style to use for headers. */ - public CellStyle getHeaderStyle() { + CellStyle getHeaderStyle() { return headerStyle; } @@ -212,7 +212,7 @@ public class ExcelExport { * * @return The cell style for default items. */ - public CellStyle getDefaultCellStyle() { + CellStyle getDefaultCellStyle() { return defaultStyle; } @@ -221,7 +221,7 @@ public class ExcelExport { * * @return The parent workbook. */ - public Workbook getParentWorkbook() { + Workbook getParentWorkbook() { return parentWorkbook; } } @@ -229,7 +229,7 @@ public class ExcelExport { /** * An item to be exported as a sheet during export. */ - public static interface ExcelSheetExport { + static interface ExcelSheetExport { /** * Returns the name of the sheet to use with this item. @@ -250,23 +250,7 @@ public class ExcelExport { void renderSheet(Sheet sheet, WorksheetEnv env) throws ExcelExportException; } - private static ExcelExport instance = null; - - /** - * Retrieves a singleton instance of this class. - * - * @return The instance. - */ - public static ExcelExport getInstance() { - if (instance == null) { - instance = new ExcelExport(); - } - - return instance; - } - private ExcelExport() { - } /** @@ -281,7 +265,7 @@ public class ExcelExport { "# {0} - sheetNumber", "ExcelExport_writeExcel_noSheetName=Sheet {0}" }) - public void writeExcel(List exports, File path) throws IOException, ExcelExportException { + static void writeExcel(List exports, File path) throws IOException, ExcelExportException { // Create a Workbook Workbook workbook = new XSSFWorkbook(); // new HSSFWorkbook() for generating `.xls` file @@ -337,7 +321,7 @@ public class ExcelExport { * @param cellStyle The style to use. * @return The created cell. */ - static Cell createCell(WorksheetEnv env, Row row, int colNum, ExcelCellModel cellModel, Optional cellStyle) { + static Cell createCell(WorksheetEnv env, Row row, int colNum, CellModel cellModel, Optional cellStyle) { CellStyle cellStyleToUse = cellStyle.orElse(env.getDefaultCellStyle()); if (cellModel.getExcelFormatString() != null || cellModel.getHorizontalAlignment() != null) { diff --git a/Core/src/org/sleuthkit/autopsy/report/modules/datasourcesummaryexport/ExcelExportAction.java b/Core/src/org/sleuthkit/autopsy/report/modules/datasourcesummaryexport/ExcelExportAction.java new file mode 100644 index 0000000000..f9b1b2b988 --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/report/modules/datasourcesummaryexport/ExcelExportAction.java @@ -0,0 +1,332 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2021 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.report.modules.datasourcesummaryexport; + +import java.io.File; +import java.io.IOException; +import java.nio.file.Paths; +import java.text.DateFormat; +import java.text.SimpleDateFormat; +import java.util.ArrayList; +import java.util.Date; +import java.util.List; +import java.util.logging.Level; +import org.openide.util.NbBundle; +import org.openide.util.NbBundle.Messages; +import org.sleuthkit.autopsy.casemodule.Case; +import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; +import org.sleuthkit.autopsy.coreutils.FileUtil; +import org.sleuthkit.autopsy.coreutils.Logger; +import org.sleuthkit.autopsy.datasourcesummary.datamodel.DataFetcher; +import org.sleuthkit.autopsy.report.ReportProgressPanel; +import org.sleuthkit.autopsy.report.modules.datasourcesummaryexport.ExcelExport.ExcelExportException; +import org.sleuthkit.autopsy.report.modules.datasourcesummaryexport.ExcelExport.ExcelSheetExport; +import org.sleuthkit.datamodel.DataSource; +import org.sleuthkit.datamodel.TskCoreException; + +/** + * Action that exports tab data to an excel workbook. + */ +@Messages({ + "ExcelExportAction_moduleName=Data Source Summary",}) +class ExcelExportAction { + + private static final Logger logger = Logger.getLogger(ExcelExportAction.class.getName()); + + /** + * Main constructor. + * + * @param tabExports The different tabs that may have excel exports. + */ + ExcelExportAction() { + } + + /** + * Generates an xlsx path for the data source summary export. + * + * @param dataSourceName The name of the data source. + * + * @return The file to which the excel document should be written or null if + * file already exists or cancellation. + */ + @NbBundle.Messages({ + "ExcelExportAction_getXLSXPath_directory=DataSourceSummary",}) + File getXLSXPath(String dataSourceName, String baseReportDir) { + // set initial path to reports directory with filename that is + // a combination of the data source name and time stamp + DateFormat dateFormat = new SimpleDateFormat("MM-dd-yyyy-HH-mm-ss"); + String fileName = String.format("%s-%s.xlsx", dataSourceName == null ? "" : FileUtil.escapeFileName(dataSourceName), dateFormat.format(new Date())); + File reportsDirFile = Paths.get(baseReportDir, Bundle.ExcelExportAction_getXLSXPath_directory()).toFile(); + if (!reportsDirFile.exists()) { + reportsDirFile.mkdirs(); + } + + return Paths.get(reportsDirFile.getAbsolutePath(), fileName).toFile(); + } + + /** + * Action that handles updating progress and exporting data from the tabs. + * + * @param progressPanel The progress indicator. + * @param dataSource The data source to be exported. + * @param path The path of the excel export. + * + * @throws InterruptedException + * @throws IOException + * @throws ExcelExportException + */ + @NbBundle.Messages({ + "ExcelExportAction_exportToXLSX_beginExport=Beginning Export...", + "ExcelExportAction_exportToXLSX_gatheringRecentActivityData=Fetching Recent Activity Data", + "ExcelExportAction_exportToXLSX_gatheringContainerData=Fetching Container & Image Data", + "ExcelExportAction_exportToXLSX_gatheringTimelineData=Fetching Timeline Data", + "ExcelExportAction_exportToXLSX_gatheringFileData=Fetching File and MIME Type Data", + "ExcelExportAction_exportToXLSX_gatheringAnalysisData=Fetching Analysis Data", + "ExcelExportAction_exportToXLSX_gatheringPastData=Fetching Historical Data", + "ExcelExportAction_exportToXLSX_gatheringUserData=Fetching User Activity Data", + "ExcelExportAction_exportToXLSX_gatheringGeoData=Fetching Geolocation Data", + "ExcelExportAction_exportToXLSX_gatheringIngestData=Fetching Ingest History Data", + "ExcelExportAction_exportToXLSX_writingToFile=Writing to File...",}) + + void exportToXLSX(ReportProgressPanel progressPanel, DataSource dataSource, String baseReportDir) + throws IOException, ExcelExport.ExcelExportException { + + File reportFile = getXLSXPath(dataSource.getName(), baseReportDir); + int totalWeight = 11; + int step = 1; + progressPanel.setIndeterminate(false); + progressPanel.setLabels(dataSource.getName(), reportFile.getPath()); + progressPanel.setMaximumProgress(totalWeight); + progressPanel.updateStatusLabel(Bundle.ExcelExportAction_exportToXLSX_beginExport()); + List sheetExports = new ArrayList<>(); + + // Export file and MIME type data + progressPanel.updateStatusLabel(Bundle.ExcelExportAction_exportToXLSX_gatheringFileData()); + progressPanel.setProgress(step); + List exports = new ExportTypes().getExports(dataSource); + if (exports != null) { + sheetExports.addAll(exports); + } + + // Export user activity + progressPanel.updateStatusLabel(Bundle.ExcelExportAction_exportToXLSX_gatheringUserData()); + progressPanel.setProgress(++step); + exports = new ExportUserActivity().getExports(dataSource); + if (exports != null) { + sheetExports.addAll(exports); + } + + // Export Recent Activity data + progressPanel.updateStatusLabel(Bundle.ExcelExportAction_exportToXLSX_gatheringRecentActivityData()); + progressPanel.setProgress(++step); + exports = new ExportRecentFiles().getExports(dataSource); + if (exports != null) { + sheetExports.addAll(exports); + } + + // Export hash set hits, keyword hits, and interesting item hits + progressPanel.updateStatusLabel(Bundle.ExcelExportAction_exportToXLSX_gatheringAnalysisData()); + progressPanel.setProgress(++step); + exports = new ExportAnalysisResults().getExports(dataSource); + if (exports != null) { + sheetExports.addAll(exports); + } + + // Export past cases data + progressPanel.updateStatusLabel(Bundle.ExcelExportAction_exportToXLSX_gatheringPastData()); + progressPanel.setProgress(++step); + exports = new ExportPastCases().getExports(dataSource); + if (exports != null) { + sheetExports.addAll(exports); + } + + // Export geolocation data + progressPanel.updateStatusLabel(Bundle.ExcelExportAction_exportToXLSX_gatheringGeoData()); + progressPanel.setProgress(++step); + exports = new ExportGeolocation().getExports(dataSource); + if (exports != null) { + sheetExports.addAll(exports); + } + + // Export Timeline data + progressPanel.updateStatusLabel(Bundle.ExcelExportAction_exportToXLSX_gatheringTimelineData()); + progressPanel.setProgress(++step); + exports = new ExportTimeline().getExports(dataSource); + if (exports != null) { + sheetExports.addAll(exports); + } + + // Export ingest history + progressPanel.updateStatusLabel(Bundle.ExcelExportAction_exportToXLSX_gatheringIngestData()); + progressPanel.setProgress(++step); + exports = ExportIngestHistory.getExports(dataSource); + if (exports != null) { + sheetExports.addAll(exports); + } + + // Export Container & Image info data + progressPanel.updateStatusLabel(Bundle.ExcelExportAction_exportToXLSX_gatheringContainerData()); + progressPanel.setProgress(++step); + exports = new ExportContainerInfo().getExports(dataSource); + if (exports != null) { + sheetExports.addAll(exports); + } + + progressPanel.updateStatusLabel(Bundle.ExcelExportAction_exportToXLSX_writingToFile()); + progressPanel.setProgress(++step); + ExcelExport.writeExcel(sheetExports, reportFile); + + try { + // add to reports + Case curCase = Case.getCurrentCaseThrows(); + curCase.addReport(reportFile.getParent(), + Bundle.ExcelExportAction_moduleName(), + reportFile.getName(), + dataSource); + } catch (NoCurrentCaseException | TskCoreException ex) { + logger.log(Level.WARNING, "There was an error attaching report to case.", ex); + } + } + + /** + * Function that converts data into a excel sheet data. + */ + protected interface ExcelExportFunction { + + /** + * Function that converts data into an excel sheet. + * + * @param data The data. + * + * @return The excel sheet export. + * + * @throws ExcelExportException + */ + ExcelSheetExport convert(T data) throws ExcelExportException; + } + + /** + * Runs a data fetcher and returns the result handling any possible errors + * with a log message. + * + * @param dataFetcher The means of fetching the data. + * @param sheetName The name of the sheet. + * @param ds The data source. + * + * @return The fetched data. + */ + protected static T getFetchResult( + DataFetcher dataFetcher, + String sheetName, DataSource ds) { + + try { + return dataFetcher.runQuery(ds); + } catch (Exception ex) { + logger.log(Level.WARNING, + String.format("There was an error while acquiring data for exporting worksheet(s): '%s' for dataSource: %s", + sheetName == null ? "" : sheetName, + ds == null || ds.getName() == null ? "" : ds.getName()), ex); + return null; + } + } + + /** + * Helper method that converts data into an excel sheet export handling + * possible excel exceptions. + * + * @param excelConverter Function to convert data to an excel sheet export. + * @param data The data. If data is null, null will be returned. + * @param sheetName The name(s) of the sheet (to be used in the error + * message). + * + * @return The excel sheet export. + */ + protected static ExcelSheetExport convertToExcel(ExcelExportFunction excelConverter, T data, String sheetName) { + if (data == null) { + return null; + } + + try { + return excelConverter.convert(data); + } catch (ExcelExportException ex) { + logger.log(Level.WARNING, + String.format("There was an error while preparing export of worksheet(s): '%s'", + sheetName == null ? "" : sheetName), ex); + return null; + } + } + + /** + * Returns an excel sheet export given the fetching of data or null if no + * export created. + * + * @param dataFetcher The means of fetching data. + * @param excelConverter The means of converting data to excel. + * @param sheetName The name of the sheet (for error handling + * reporting). + * @param ds The data source to use for fetching data. + * + * @return The excel sheet export or null if no export could be generated. + */ + protected static ExcelSheetExport getExport( + DataFetcher dataFetcher, ExcelExportFunction excelConverter, + String sheetName, DataSource ds) { + + T data = getFetchResult(dataFetcher, sheetName, ds); + return convertToExcel(excelConverter, data, sheetName); + } + + /** + * Returns an excel table export of the data or null if no export created. + * + * @param columnsModel The model for the columns. + * @param sheetName The name for the sheet. + * @param data The data to be exported. + * + * @return The excel table export or null if no export could be generated. + */ + protected static ExcelSheetExport getTableExport(List> columnsModel, + String sheetName, List data) { + + return convertToExcel((dataList) -> new ExcelTableExport<>(sheetName, columnsModel, dataList), + data, + sheetName); + } + + /** + * Returns an excel table export of the data or null if no export created. + * + * @param dataFetcher The means of fetching data for the data source and + * the export. + * @param columnsModel The model for the columns. + * @param sheetName The name for the sheet. + * @param ds The data source. + * + * @return The excel export or null if no export created. + */ + protected static ExcelSheetExport getTableExport( + DataFetcher> dataFetcher, List> columnsModel, + String sheetName, DataSource ds) { + + return getExport(dataFetcher, + (dataList) -> new ExcelTableExport<>(sheetName, columnsModel, dataList), + sheetName, + ds); + } +} diff --git a/Core/src/org/sleuthkit/autopsy/datasourcesummary/uiutils/ExcelSpecialFormatExport.java b/Core/src/org/sleuthkit/autopsy/report/modules/datasourcesummaryexport/ExcelSpecialFormatExport.java similarity index 84% rename from Core/src/org/sleuthkit/autopsy/datasourcesummary/uiutils/ExcelSpecialFormatExport.java rename to Core/src/org/sleuthkit/autopsy/report/modules/datasourcesummaryexport/ExcelSpecialFormatExport.java index f9c46fa5e2..cd30e7d816 100644 --- a/Core/src/org/sleuthkit/autopsy/datasourcesummary/uiutils/ExcelSpecialFormatExport.java +++ b/Core/src/org/sleuthkit/autopsy/report/modules/datasourcesummaryexport/ExcelSpecialFormatExport.java @@ -16,25 +16,25 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.sleuthkit.autopsy.datasourcesummary.uiutils; +package org.sleuthkit.autopsy.report.modules.datasourcesummaryexport; import java.util.Collections; import java.util.List; import java.util.Optional; import org.apache.poi.ss.usermodel.Row; import org.apache.poi.ss.usermodel.Sheet; -import org.sleuthkit.autopsy.datasourcesummary.uiutils.ExcelExport.ExcelExportException; +import org.sleuthkit.autopsy.report.modules.datasourcesummaryexport.ExcelExport.ExcelExportException; /** * An excel export that has special row-by-row formatting. */ -public class ExcelSpecialFormatExport implements ExcelExport.ExcelSheetExport { +class ExcelSpecialFormatExport implements ExcelExport.ExcelSheetExport { /** * The dimensions consumed by an item in an ExcelSpecialFormatExport list of * items to be rendered. */ - public static class ItemDimensions { + static class ItemDimensions { private final int rowStart; private final int rowEnd; @@ -49,7 +49,7 @@ public class ExcelSpecialFormatExport implements ExcelExport.ExcelSheetExport { * @param rowEnd The last excel row of the the item. * @param colEnd The last excel column of the item. */ - public ItemDimensions(int rowStart, int colStart, int rowEnd, int colEnd) { + ItemDimensions(int rowStart, int colStart, int rowEnd, int colEnd) { this.rowStart = rowStart; this.colStart = colStart; this.rowEnd = rowEnd; @@ -59,28 +59,28 @@ public class ExcelSpecialFormatExport implements ExcelExport.ExcelSheetExport { /** * @return The starting excel row of the item. */ - public int getRowStart() { + int getRowStart() { return rowStart; } /** * @return The last excel row of the the item. */ - public int getRowEnd() { + int getRowEnd() { return rowEnd; } /** * @return The starting excel column of the item. */ - public int getColStart() { + int getColStart() { return colStart; } /** * @return The last excel column of the item. */ - public int getColEnd() { + int getColEnd() { return colEnd; } } @@ -88,7 +88,7 @@ public class ExcelSpecialFormatExport implements ExcelExport.ExcelSheetExport { /** * An item to be exported in a specially formatted excel export. */ - public interface ExcelItemExportable { + interface ExcelItemExportable { /** * Writes the item to the sheet in the special format export sheet. @@ -106,16 +106,16 @@ public class ExcelSpecialFormatExport implements ExcelExport.ExcelSheetExport { /** * Writes a string to a single cell in a specially formatted excel export. */ - public static class SingleCellExportable implements ExcelItemExportable { + static class SingleCellExportable implements ExcelItemExportable { - private final ExcelCellModel item; + private final CellModel item; /** * Main constructor. * * @param key The text to be written. */ - public SingleCellExportable(String key) { + SingleCellExportable(String key) { this(new DefaultCellModel<>(key)); } @@ -124,7 +124,7 @@ public class ExcelSpecialFormatExport implements ExcelExport.ExcelSheetExport { * * @param item The cell model to be written. */ - public SingleCellExportable(ExcelCellModel item) { + SingleCellExportable(CellModel item) { this.item = item; } @@ -140,10 +140,10 @@ public class ExcelSpecialFormatExport implements ExcelExport.ExcelSheetExport { * Writes a row consisting of first column as a key and second column as a * value. */ - public static class KeyValueItemExportable implements ExcelItemExportable { + static class KeyValueItemExportable implements ExcelItemExportable { - private final ExcelCellModel key; - private final ExcelCellModel value; + private final CellModel key; + private final CellModel value; /** * Main constructor. @@ -151,7 +151,7 @@ public class ExcelSpecialFormatExport implements ExcelExport.ExcelSheetExport { * @param key The string key to be exported. * @param value The cell model to be exported. */ - public KeyValueItemExportable(String key, ExcelCellModel value) { + KeyValueItemExportable(String key, CellModel value) { this(new DefaultCellModel<>(key), value); } @@ -161,7 +161,7 @@ public class ExcelSpecialFormatExport implements ExcelExport.ExcelSheetExport { * @param key The cell key to be exported. * @param value The cell model to be exported. */ - public KeyValueItemExportable(ExcelCellModel key, ExcelCellModel value) { + KeyValueItemExportable(CellModel key, CellModel value) { this.key = key; this.value = value; } @@ -186,7 +186,7 @@ public class ExcelSpecialFormatExport implements ExcelExport.ExcelSheetExport { * item 2 * */ - public static class TitledExportable implements ExcelItemExportable { + static class TitledExportable implements ExcelItemExportable { private static final int DEFAULT_INDENT = 1; @@ -199,7 +199,7 @@ public class ExcelSpecialFormatExport implements ExcelExport.ExcelSheetExport { * @param title The title for the export. * @param children The children to be indented and enumerated. */ - public TitledExportable(String title, List children) { + TitledExportable(String title, List children) { this.title = title; this.children = children; } @@ -232,7 +232,7 @@ public class ExcelSpecialFormatExport implements ExcelExport.ExcelSheetExport { * @param sheetName The name of the sheet. * @param exports The row-by-row items to be exported. */ - public ExcelSpecialFormatExport(String sheetName, List exports) { + ExcelSpecialFormatExport(String sheetName, List exports) { this.sheetName = sheetName; this.exports = exports == null ? Collections.emptyList() : exports; } diff --git a/Core/src/org/sleuthkit/autopsy/datasourcesummary/uiutils/ExcelTableExport.java b/Core/src/org/sleuthkit/autopsy/report/modules/datasourcesummaryexport/ExcelTableExport.java similarity index 81% rename from Core/src/org/sleuthkit/autopsy/datasourcesummary/uiutils/ExcelTableExport.java rename to Core/src/org/sleuthkit/autopsy/report/modules/datasourcesummaryexport/ExcelTableExport.java index c79cb381aa..b31c7da507 100644 --- a/Core/src/org/sleuthkit/autopsy/datasourcesummary/uiutils/ExcelTableExport.java +++ b/Core/src/org/sleuthkit/autopsy/report/modules/datasourcesummaryexport/ExcelTableExport.java @@ -16,7 +16,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.sleuthkit.autopsy.datasourcesummary.uiutils; +package org.sleuthkit.autopsy.report.modules.datasourcesummaryexport; import java.util.Collections; import java.util.List; @@ -24,15 +24,15 @@ import java.util.Optional; import org.apache.poi.ss.usermodel.Cell; import org.apache.poi.ss.usermodel.Row; import org.apache.poi.ss.usermodel.Sheet; -import org.sleuthkit.autopsy.datasourcesummary.uiutils.ExcelExport.ExcelExportException; -import org.sleuthkit.autopsy.datasourcesummary.uiutils.ExcelExport.ExcelSheetExport; -import org.sleuthkit.autopsy.datasourcesummary.uiutils.ExcelSpecialFormatExport.ExcelItemExportable; -import org.sleuthkit.autopsy.datasourcesummary.uiutils.ExcelSpecialFormatExport.ItemDimensions; +import org.sleuthkit.autopsy.report.modules.datasourcesummaryexport.ExcelExport.ExcelExportException; +import org.sleuthkit.autopsy.report.modules.datasourcesummaryexport.ExcelExport.ExcelSheetExport; +import org.sleuthkit.autopsy.report.modules.datasourcesummaryexport.ExcelSpecialFormatExport.ExcelItemExportable; +import org.sleuthkit.autopsy.report.modules.datasourcesummaryexport.ExcelSpecialFormatExport.ItemDimensions; /** * An excel sheet export of table data. */ -public class ExcelTableExport implements ExcelSheetExport, ExcelItemExportable { +class ExcelTableExport implements ExcelSheetExport, ExcelItemExportable { private final String sheetName; private final List> columns; @@ -47,7 +47,7 @@ public class ExcelTableExport implements ExcelSheet * @param columns The columns of the table. * @param data The data to export. */ - public ExcelTableExport(String sheetName, List> columns, List data) { + ExcelTableExport(String sheetName, List> columns, List data) { this(sheetName, columns, data, 0); } @@ -60,7 +60,7 @@ public class ExcelTableExport implements ExcelSheet * @param data The data to export. * @param columnIndent The column indent. */ - public ExcelTableExport(String sheetName, List> columns, List data, int columnIndent) { + ExcelTableExport(String sheetName, List> columns, List data, int columnIndent) { this.sheetName = sheetName; this.columns = columns; this.data = data; @@ -104,7 +104,7 @@ public class ExcelTableExport implements ExcelSheet * @throws ExcelExportException * @return The number of rows (including the header) written. */ - private static int renderSheet( + private static int renderSheet( Sheet sheet, ExcelExport.WorksheetEnv worksheetEnv, int rowStart, @@ -127,8 +127,8 @@ public class ExcelTableExport implements ExcelSheet T rowData = safeData.get(rowNum); Row row = sheet.createRow(rowNum + rowStart + 1); for (int colNum = 0; colNum < columns.size(); colNum++) { - ColumnModel colModel = columns.get(colNum); - ExcelCellModel cellModel = colModel.getCellRenderer().apply(rowData); + ColumnModel colModel = columns.get(colNum); + CellModel cellModel = colModel.getCellRenderer().apply(rowData); ExcelExport.createCell(worksheetEnv, row, colNum + colStart, cellModel, Optional.empty()); } } diff --git a/Core/src/org/sleuthkit/autopsy/report/modules/datasourcesummaryexport/ExportAnalysisResults.java b/Core/src/org/sleuthkit/autopsy/report/modules/datasourcesummaryexport/ExportAnalysisResults.java new file mode 100755 index 0000000000..57786f1f8e --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/report/modules/datasourcesummaryexport/ExportAnalysisResults.java @@ -0,0 +1,79 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2021 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.report.modules.datasourcesummaryexport; + +import java.util.Arrays; +import java.util.List; +import java.util.stream.Collectors; +import java.util.stream.Stream; +import org.apache.commons.lang3.tuple.Pair; +import org.openide.util.NbBundle.Messages; +import org.sleuthkit.autopsy.datasourcesummary.datamodel.DataFetcher; +import org.sleuthkit.autopsy.datasourcesummary.datamodel.AnalysisSummary; +import org.sleuthkit.autopsy.report.modules.datasourcesummaryexport.ExcelExport.ExcelSheetExport; +import static org.sleuthkit.autopsy.report.modules.datasourcesummaryexport.ExcelExportAction.getTableExport; +import org.sleuthkit.datamodel.DataSource; + +/** + * Class to export data hash set hits, keyword hits, and interesting item hits + * within a datasource. + */ +@Messages({ + "ExportAnalysisResults_keyColumn_title=Name", + "ExportAnalysisResults_countColumn_title=Count", + "ExportAnalysisResults_keywordSearchModuleName=Keyword Search", + "ExportAnalysisResults_hashsetHits_tabName=Hashset Hits", + "ExportAnalysisResults_keywordHits_tabName=Keyword Hits", + "ExportAnalysisResults_interestingItemHits_tabName=Interesting Item Hits",}) +class ExportAnalysisResults { + + // Default Column definitions for each table + private static final List, DefaultCellModel>> DEFAULT_COLUMNS = Arrays.asList( + new ColumnModel<>( + Bundle.ExportAnalysisResults_keyColumn_title(), + (pair) -> new DefaultCellModel<>(pair.getKey()), + 300 + ), + new ColumnModel<>( + Bundle.ExportAnalysisResults_countColumn_title(), + (pair) -> new DefaultCellModel<>(pair.getValue()), + 100 + ) + ); + + private final AnalysisSummary analysisSummary; + + ExportAnalysisResults() { + analysisSummary = new AnalysisSummary(); + } + + List getExports(DataSource dataSource) { + + DataFetcher>> hashsetsFetcher = (ds) -> analysisSummary.getHashsetCounts(ds); + DataFetcher>> keywordsFetcher = (ds) -> analysisSummary.getKeywordCounts(ds); + DataFetcher>> interestingItemsFetcher = (ds) -> analysisSummary.getInterestingItemCounts(ds); + + return Stream.of( + getTableExport(hashsetsFetcher, DEFAULT_COLUMNS, Bundle.ExportAnalysisResults_hashsetHits_tabName(), dataSource), + getTableExport(keywordsFetcher, DEFAULT_COLUMNS, Bundle.ExportAnalysisResults_keywordHits_tabName(), dataSource), + getTableExport(interestingItemsFetcher, DEFAULT_COLUMNS, Bundle.ExportAnalysisResults_interestingItemHits_tabName(), dataSource)) + .filter(sheet -> sheet != null) + .collect(Collectors.toList()); + } +} diff --git a/Core/src/org/sleuthkit/autopsy/report/modules/datasourcesummaryexport/ExportContainerInfo.java b/Core/src/org/sleuthkit/autopsy/report/modules/datasourcesummaryexport/ExportContainerInfo.java new file mode 100755 index 0000000000..44ede9064c --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/report/modules/datasourcesummaryexport/ExportContainerInfo.java @@ -0,0 +1,130 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2021 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.report.modules.datasourcesummaryexport; + +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.stream.Collectors; +import java.util.stream.Stream; +import org.apache.commons.lang.StringUtils; +import org.openide.util.NbBundle.Messages; +import org.sleuthkit.autopsy.datasourcesummary.datamodel.DataFetcher; +import org.sleuthkit.autopsy.datasourcesummary.datamodel.ContainerSummary; +import org.sleuthkit.autopsy.datasourcesummary.datamodel.ContainerSummary.ContainerDetails; +import org.sleuthkit.autopsy.datasourcesummary.datamodel.ContainerSummary.ImageDetails; +import org.sleuthkit.autopsy.report.modules.datasourcesummaryexport.ExcelExport.ExcelSheetExport; +import org.sleuthkit.autopsy.report.modules.datasourcesummaryexport.ExcelSpecialFormatExport.ExcelItemExportable; +import org.sleuthkit.autopsy.report.modules.datasourcesummaryexport.ExcelSpecialFormatExport.KeyValueItemExportable; +import org.sleuthkit.autopsy.report.modules.datasourcesummaryexport.ExcelSpecialFormatExport.SingleCellExportable; +import org.sleuthkit.autopsy.report.modules.datasourcesummaryexport.ExcelSpecialFormatExport.TitledExportable; +import org.sleuthkit.datamodel.DataSource; + +/** + * Class to export additional details associated with a specific DataSource + */ +class ExportContainerInfo { + + private final ContainerSummary containerSummary; + + /** + * Creates new form ExportContainerInfo. + */ + ExportContainerInfo() { + containerSummary = new ContainerSummary(); + } + + /** + * Divides acquisition details into key/value pairs to be displayed in + * separate cells in an excel export. + * + * @param acquisitionDetails The acquisition details. + * @return The list of key value pairs that can be incorporated into the + * excel export. + */ + private static List getAcquisitionDetails(String acquisitionDetails) { + if (StringUtils.isBlank(acquisitionDetails)) { + return Collections.emptyList(); + } else { + return Stream.of(acquisitionDetails.split("\\r?\\n")) + .map((line) -> (StringUtils.isBlank(line)) ? null : new SingleCellExportable(line)) + .filter(item -> item != null) + .collect(Collectors.toList()); + } + } + + @Messages({ + "ExportContainerInfo_setFieldsForNonImageDataSource_na=N/A", + "ExportContainerInfo_tabName=Container", + "ExportContainerInfo_export_displayName=Display Name:", + "ExportContainerInfo_export_originalName=Name:", + "ExportContainerInfo_export_deviceId=Device ID:", + "ExportContainerInfo_export_timeZone=Time Zone:", + "ExportContainerInfo_export_acquisitionDetails=Acquisition Details:", + "ExportContainerInfo_export_imageType=Image Type:", + "ExportContainerInfo_export_size=Size:", + "ExportContainerInfo_export_sectorSize=Sector Size:", + "ExportContainerInfo_export_md5=MD5:", + "ExportContainerInfo_export_sha1=SHA1:", + "ExportContainerInfo_export_sha256=SHA256:", + "ExportContainerInfo_export_unallocatedSize=Unallocated Space:", + "ExportContainerInfo_export_filePaths=File Paths:",}) + List getExports(DataSource ds) { + DataFetcher containerDataFetcher = (dataSource) -> containerSummary.getContainerDetails(dataSource); + ContainerDetails containerDetails = ExcelExportAction.getFetchResult(containerDataFetcher, "Container sheets", ds); + if (ds == null || containerDetails == null) { + return Collections.emptyList(); + } + + String NA = Bundle.ExportContainerInfo_setFieldsForNonImageDataSource_na(); + DefaultCellModel NACell = new DefaultCellModel<>(NA); + + ImageDetails imageDetails = containerDetails.getImageDetails(); + boolean hasImage = imageDetails != null; + + DefaultCellModel timeZone = hasImage ? new DefaultCellModel<>(imageDetails.getTimeZone()) : NACell; + DefaultCellModel imageType = hasImage ? new DefaultCellModel<>(imageDetails.getImageType()) : NACell; + DefaultCellModel size = hasImage ? SizeRepresentationUtil.getBytesCell(imageDetails.getSize()) : NACell; + DefaultCellModel sectorSize = hasImage ? SizeRepresentationUtil.getBytesCell(imageDetails.getSectorSize()) : NACell; + DefaultCellModel md5 = hasImage ? new DefaultCellModel<>(imageDetails.getMd5Hash()) : NACell; + DefaultCellModel sha1 = hasImage ? new DefaultCellModel<>(imageDetails.getSha1Hash()) : NACell; + DefaultCellModel sha256 = hasImage ? new DefaultCellModel<>(imageDetails.getSha256Hash()) : NACell; + DefaultCellModel unallocatedSize = hasImage ? SizeRepresentationUtil.getBytesCell(imageDetails.getUnallocatedSize()) : NACell; + List paths = containerDetails.getImageDetails() == null ? Collections.singletonList(NA) : containerDetails.getImageDetails().getPaths(); + List cellPaths = paths.stream() + .map(SingleCellExportable::new) + .collect(Collectors.toList()); + + return Arrays.asList(new ExcelSpecialFormatExport(Bundle.ExportContainerInfo_tabName(), Arrays.asList(new KeyValueItemExportable(Bundle.ExportContainerInfo_export_displayName(), new DefaultCellModel<>(containerDetails.getDisplayName())), + new KeyValueItemExportable(Bundle.ExportContainerInfo_export_originalName(), new DefaultCellModel<>(containerDetails.getOriginalName())), + new KeyValueItemExportable(Bundle.ExportContainerInfo_export_deviceId(), new DefaultCellModel<>(containerDetails.getDeviceId())), + new KeyValueItemExportable(Bundle.ExportContainerInfo_export_timeZone(), timeZone), + new TitledExportable(Bundle.ExportContainerInfo_export_acquisitionDetails(), getAcquisitionDetails(containerDetails.getAcquisitionDetails())), + new KeyValueItemExportable(Bundle.ExportContainerInfo_export_imageType(), imageType), + new KeyValueItemExportable(Bundle.ExportContainerInfo_export_size(), size), + new KeyValueItemExportable(Bundle.ExportContainerInfo_export_sectorSize(), sectorSize), + new KeyValueItemExportable(Bundle.ExportContainerInfo_export_md5(), md5), + new KeyValueItemExportable(Bundle.ExportContainerInfo_export_sha1(), sha1), + new KeyValueItemExportable(Bundle.ExportContainerInfo_export_sha256(), sha256), + new KeyValueItemExportable(Bundle.ExportContainerInfo_export_unallocatedSize(), unallocatedSize), + new TitledExportable(Bundle.ExportContainerInfo_export_filePaths(), cellPaths) + ))); + + } +} diff --git a/Core/src/org/sleuthkit/autopsy/report/modules/datasourcesummaryexport/ExportGeolocation.java b/Core/src/org/sleuthkit/autopsy/report/modules/datasourcesummaryexport/ExportGeolocation.java new file mode 100755 index 0000000000..a0a6b0874c --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/report/modules/datasourcesummaryexport/ExportGeolocation.java @@ -0,0 +1,227 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2021 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.report.modules.datasourcesummaryexport; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.stream.Collectors; +import java.util.stream.Stream; +import org.apache.commons.lang3.StringUtils; +import org.apache.commons.lang3.tuple.Pair; +import org.openide.util.NbBundle.Messages; +import org.sleuthkit.autopsy.datasourcesummary.datamodel.DataFetcher; +import org.sleuthkit.autopsy.datasourcesummary.datamodel.GeolocationSummary.CityCountsList; +import org.sleuthkit.autopsy.datasourcesummary.datamodel.GeolocationSummary.CityData; +import org.sleuthkit.autopsy.datasourcesummary.datamodel.GeolocationSummary.CityRecordCount; +import org.sleuthkit.autopsy.datasourcesummary.datamodel.CityRecord; +import org.sleuthkit.autopsy.datasourcesummary.datamodel.GeolocationSummary; +import static org.sleuthkit.autopsy.report.modules.datasourcesummaryexport.ExcelExportAction.getFetchResult; +import static org.sleuthkit.autopsy.report.modules.datasourcesummaryexport.ExcelExportAction.getTableExport; +import org.sleuthkit.datamodel.DataSource; + +/** + * Class to export information about a data source's geolocation data. + */ +@Messages({ + "ExportGeolocation_cityColumn_title=Closest City", + "ExportGeolocation_countColumn_title=Count", + "ExportGeolocation_unknownRow_title=Unknown", + "ExportGeolocation_mostCommon_tabName=Most Common Cities", + "ExportGeolocation_mostRecent_tabName=Most Recent Cities",}) +class ExportGeolocation { + + private final GeolocationSummary geoSummary; + + /** + * Object encapsulating geolocation data. + */ + private static class GeolocationData { + + private final List> mostRecentData; + private final List> mostCommonData; + + /** + * Main constructor. + * + * @param mostRecentData The data to be displayed in the most recent + * tab. + * @param mostCommonData The data to be displayed in the most common + * tab. + */ + GeolocationData(List> mostRecentData, List> mostCommonData) { + this.mostRecentData = mostRecentData; + this.mostCommonData = mostCommonData; + } + + /** + * Returns the data to be displayed in the most recent tab. + * + * @return The data to be displayed in the most recent tab. + */ + List> getMostRecentData() { + return mostRecentData; + } + + /** + * Returns the data to be displayed in the most common tab. + * + * @return The data to be displayed in the most common tab. + */ + List> getMostCommonData() { + return mostCommonData; + } + } + + private static final int DAYS_COUNT = 30; + private static final int MAX_COUNT = 10; + + // The column indicating the city + private static final ColumnModel, DefaultCellModel> CITY_COL = new ColumnModel<>( + Bundle.ExportGeolocation_cityColumn_title(), + (pair) -> new DefaultCellModel<>(pair.getLeft()), + 300 + ); + + // The column indicating the count of points seen close to that city + private static final ColumnModel, DefaultCellModel> COUNT_COL = new ColumnModel<>( + Bundle.ExportGeolocation_countColumn_title(), + (pair) -> new DefaultCellModel<>(pair.getRight()), + 100 + ); + + private static final List, DefaultCellModel>> DEFAULT_TEMPLATE = Arrays.asList( + CITY_COL, + COUNT_COL + ); + + ExportGeolocation() { + geoSummary = new GeolocationSummary(); + } + + /** + * Retrieves the city name to display from the record. + * + * @param record The record for the city to display. + * + * @return The display name (city, country). + */ + private static String getCityName(CityRecord record) { + if (record == null) { + return null; + } + + List cityIdentifiers = Stream.of(record.getCityName(), record.getState(), record.getCountry()) + .filter(StringUtils::isNotBlank) + .collect(Collectors.toList()); + + if (cityIdentifiers.size() == 1) { + return cityIdentifiers.get(0); + } else if (cityIdentifiers.size() == 2) { + return String.format("%s, %s", cityIdentifiers.get(0), cityIdentifiers.get(1)); + } else if (cityIdentifiers.size() >= 3) { + return String.format("%s, %s; %s", cityIdentifiers.get(0), cityIdentifiers.get(1), cityIdentifiers.get(2)); + } + + return null; + } + + /** + * Formats one record to be displayed as a row in the tab (specifically, + * formats the city name). + * + * @param cityCount The CityRecordCount representing a row. + * + * @return The city/count pair to be displayed as a row. + */ + private static Pair formatRecord(CityRecordCount cityCount) { + if (cityCount == null) { + return null; + } + + String cityName = getCityName(cityCount.getCityRecord()); + int count = cityCount.getCount(); + return Pair.of(cityName, count); + } + + /** + * Formats a list of records to be displayed in a tab (specifically, + * includes the count of points where no closest city could be determined as + * 'unknown'). + * + * @param countsList The CityCountsList object representing the data to be + * displayed in the tab. + * + * @return The list of city/count tuples to be displayed as a row. + */ + private static List> formatList(CityCountsList countsList) { + if (countsList == null) { + return Collections.emptyList(); + } + + Stream countsStream = ((countsList.getCounts() == null) + ? new ArrayList() + : countsList.getCounts()).stream(); + + Stream> pairStream = countsStream.map((r) -> formatRecord(r)); + + Pair unknownRecord = Pair.of(Bundle.ExportGeolocation_unknownRow_title(), countsList.getOtherCount()); + + return Stream.concat(pairStream, Stream.of(unknownRecord)) + .filter((p) -> p != null && p.getRight() != null && p.getRight() > 0) + .sorted((a, b) -> -Integer.compare(a.getRight(), b.getRight())) + .limit(MAX_COUNT) + .collect(Collectors.toList()); + } + + /** + * Converts CityData from GeolocationSummaryGetter into data that can be + * directly put into tab in this panel. + * + * @param cityData The city data. + * + * @return The geolocation data. + */ + private static GeolocationData convertToViewModel(CityData cityData) { + if (cityData == null) { + return new GeolocationData(Collections.emptyList(), Collections.emptyList()); + } else { + return new GeolocationData(formatList(cityData.getMostRecent()), formatList(cityData.getMostCommon())); + } + } + + List getExports(DataSource dataSource) { + + DataFetcher geolocationFetcher = (ds) -> convertToViewModel(geoSummary.getCityCounts(ds, DAYS_COUNT, MAX_COUNT)); + + GeolocationData model + = getFetchResult(geolocationFetcher, "Geolocation sheets", dataSource); + if (model == null) { + return Collections.emptyList(); + } + + return Arrays.asList(getTableExport(DEFAULT_TEMPLATE, + Bundle.ExportGeolocation_mostRecent_tabName(), model.getMostRecentData()), + getTableExport(DEFAULT_TEMPLATE, + Bundle.ExportGeolocation_mostCommon_tabName(), model.getMostCommonData()) + ); + } + +} diff --git a/Core/src/org/sleuthkit/autopsy/datasourcesummary/ui/IngestJobExcelExport.java b/Core/src/org/sleuthkit/autopsy/report/modules/datasourcesummaryexport/ExportIngestHistory.java similarity index 83% rename from Core/src/org/sleuthkit/autopsy/datasourcesummary/ui/IngestJobExcelExport.java rename to Core/src/org/sleuthkit/autopsy/report/modules/datasourcesummaryexport/ExportIngestHistory.java index 3ad757e7be..4c355f10aa 100644 --- a/Core/src/org/sleuthkit/autopsy/datasourcesummary/ui/IngestJobExcelExport.java +++ b/Core/src/org/sleuthkit/autopsy/report/modules/datasourcesummaryexport/ExportIngestHistory.java @@ -16,7 +16,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.sleuthkit.autopsy.datasourcesummary.ui; +package org.sleuthkit.autopsy.report.modules.datasourcesummaryexport; import java.text.DateFormat; import java.text.SimpleDateFormat; @@ -34,27 +34,24 @@ import org.openide.util.NbBundle.Messages; import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; import org.sleuthkit.autopsy.coreutils.Logger; -import org.sleuthkit.autopsy.datasourcesummary.uiutils.ColumnModel; -import org.sleuthkit.autopsy.datasourcesummary.uiutils.DefaultCellModel; -import org.sleuthkit.autopsy.datasourcesummary.uiutils.ExcelExport.ExcelSheetExport; -import org.sleuthkit.autopsy.datasourcesummary.uiutils.ExcelTableExport; +import org.sleuthkit.autopsy.report.modules.datasourcesummaryexport.ExcelExport.ExcelSheetExport; import org.sleuthkit.datamodel.DataSource; import org.sleuthkit.datamodel.IngestJobInfo; import org.sleuthkit.datamodel.IngestModuleInfo; import org.sleuthkit.datamodel.TskCoreException; /** - * Class that handles exporting information in IngestJobInfoPanel to excel. + * Class that handles exporting ingest job information to excel. */ @Messages({ - "IngestJobExcelExport_startTimeColumn=Start Time", - "IngestJobExcelExport_endTimeColumn=End Time", - "IngestJobExcelExport_ingestStatusTimeColumn=Ingest Status", - "IngestJobExcelExport_moduleNameTimeColumn=Module Name", - "IngestJobExcelExport_versionColumn=Module Version", - "IngestJobExcelExport_sheetName=Ingest History" + "ExportIngestHistory_startTimeColumn=Start Time", + "ExportIngestHistory_endTimeColumn=End Time", + "ExportIngestHistory_ingestStatusTimeColumn=Ingest Status", + "ExportIngestHistory_moduleNameTimeColumn=Module Name", + "ExportIngestHistory_versionColumn=Module Version", + "ExportIngestHistory_sheetName=Ingest History" }) -class IngestJobExcelExport { +class ExportIngestHistory { /** * An entry to display in an excel export. @@ -70,10 +67,10 @@ class IngestJobExcelExport { /** * Main constructor. * - * @param startTime The ingest start time. - * @param endTime The ingest stop time. - * @param status The ingest status. - * @param ingestModule The ingest module. + * @param startTime The ingest start time. + * @param endTime The ingest stop time. + * @param status The ingest status. + * @param ingestModule The ingest module. * @param ingestModuleVersion The ingest module version. */ IngestJobEntry(Date startTime, Date endTime, String status, String ingestModule, String ingestModuleVersion) { @@ -120,26 +117,26 @@ class IngestJobExcelExport { } } - private static final Logger logger = Logger.getLogger(IngestJobExcelExport.class.getName()); + private static final Logger logger = Logger.getLogger(ExportIngestHistory.class.getName()); private static final String DATETIME_FORMAT_STR = "yyyy/MM/dd HH:mm:ss"; private static final DateFormat DATETIME_FORMAT = new SimpleDateFormat(DATETIME_FORMAT_STR, Locale.getDefault()); // columns in the excel export table to be created. private static final List>> COLUMNS = Arrays.asList( new ColumnModel<>( - Bundle.IngestJobExcelExport_startTimeColumn(), + Bundle.ExportIngestHistory_startTimeColumn(), (entry) -> getDateCell(entry.getStartTime())), new ColumnModel<>( - Bundle.IngestJobExcelExport_endTimeColumn(), + Bundle.ExportIngestHistory_endTimeColumn(), (entry) -> getDateCell(entry.getEndTime())), new ColumnModel<>( - Bundle.IngestJobExcelExport_ingestStatusTimeColumn(), + Bundle.ExportIngestHistory_ingestStatusTimeColumn(), (entry) -> new DefaultCellModel<>(entry.getStatus())), new ColumnModel<>( - Bundle.IngestJobExcelExport_moduleNameTimeColumn(), + Bundle.ExportIngestHistory_moduleNameTimeColumn(), (entry) -> new DefaultCellModel<>(entry.getIngestModule())), new ColumnModel<>( - Bundle.IngestJobExcelExport_versionColumn(), + Bundle.ExportIngestHistory_versionColumn(), (entry) -> new DefaultCellModel<>(entry.getIngestModuleVersion())) ); @@ -147,6 +144,7 @@ class IngestJobExcelExport { * Retrieves data for a date cell. * * @param date The date. + * * @return The data cell to be used in the excel export. */ private static DefaultCellModel getDateCell(Date date) { @@ -158,6 +156,7 @@ class IngestJobExcelExport { * Retrieves all the ingest job modules and versions for a job. * * @param job The ingest job. + * * @return All of the corresponding entries sorted by module name. */ private static List getEntries(IngestJobInfo job) { @@ -190,6 +189,7 @@ class IngestJobExcelExport { * to null. * * @param list The list of entries for an ingest job. + * * @return The stream of entries to be displayed. */ private static Stream showFirstRowOnly(List list) { @@ -209,6 +209,7 @@ class IngestJobExcelExport { * Returns a list of sheets to be exported for the Ingest History tab. * * @param dataSource The data source. + * * @return The list of sheets to be included in an export. */ static List getExports(DataSource dataSource) { @@ -245,9 +246,9 @@ class IngestJobExcelExport { .filter(item -> item != null) .collect(Collectors.toList()); - return Arrays.asList(new ExcelTableExport<>(Bundle.IngestJobExcelExport_sheetName(), COLUMNS, toDisplay)); + return Arrays.asList(new ExcelTableExport<>(Bundle.ExportIngestHistory_sheetName(), COLUMNS, toDisplay)); } - private IngestJobExcelExport() { + private ExportIngestHistory() { } } diff --git a/Core/src/org/sleuthkit/autopsy/report/modules/datasourcesummaryexport/ExportPastCases.java b/Core/src/org/sleuthkit/autopsy/report/modules/datasourcesummaryexport/ExportPastCases.java new file mode 100755 index 0000000000..be824477a1 --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/report/modules/datasourcesummaryexport/ExportPastCases.java @@ -0,0 +1,80 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2021 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.report.modules.datasourcesummaryexport; + +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import org.apache.commons.lang3.tuple.Pair; +import org.openide.util.NbBundle.Messages; +import org.sleuthkit.autopsy.datasourcesummary.datamodel.DataFetcher; +import org.sleuthkit.autopsy.datasourcesummary.datamodel.PastCasesSummary; +import org.sleuthkit.autopsy.datasourcesummary.datamodel.PastCasesSummary.PastCasesResult; +import static org.sleuthkit.autopsy.report.modules.datasourcesummaryexport.ExcelExportAction.getFetchResult; +import static org.sleuthkit.autopsy.report.modules.datasourcesummaryexport.ExcelExportAction.getTableExport; +import org.sleuthkit.datamodel.DataSource; + +/** + * Class to export information about a datasource and how it pertains to other + * cases. + */ +@Messages({ + "ExportPastCases_caseColumn_title=Case", + "ExportPastCases_countColumn_title=Count", + "ExportPastCases_notableFileTable_tabName=Cases with Common Notable", + "ExportPastCases_sameIdsTable_tabName=Past Cases with the Same Devices",}) +class ExportPastCases { + + private final PastCasesSummary pastSummary; + + // model for column indicating the case + private static final ColumnModel, DefaultCellModel> CASE_COL = new ColumnModel<>( + Bundle.ExportPastCases_caseColumn_title(), + (pair) -> new DefaultCellModel<>(pair.getKey()), + 300 + ); + + // model for column indicating the count + private static final ColumnModel, DefaultCellModel> COUNT_COL = new ColumnModel<>( + Bundle.ExportPastCases_countColumn_title(), + (pair) -> new DefaultCellModel<>(pair.getValue()), + 100 + ); + + // the template for columns in both tables in this tab + private static List, DefaultCellModel>> DEFAULT_TEMPLATE + = Arrays.asList(CASE_COL, COUNT_COL); + + ExportPastCases() { + pastSummary = new PastCasesSummary(); + } + + List getExports(DataSource dataSource) { + DataFetcher pastCasesFetcher = (ds) -> pastSummary.getPastCasesData(ds); + PastCasesResult result = getFetchResult(pastCasesFetcher, "Past cases sheets", dataSource); + if (result == null) { + return Collections.emptyList(); + } + + return Arrays.asList( + getTableExport(DEFAULT_TEMPLATE, Bundle.ExportPastCases_notableFileTable_tabName(), result.getTaggedNotable()), + getTableExport(DEFAULT_TEMPLATE, Bundle.ExportPastCases_sameIdsTable_tabName(), result.getSameIdsResults()) + ); + } +} diff --git a/Core/src/org/sleuthkit/autopsy/report/modules/datasourcesummaryexport/ExportRecentFiles.java b/Core/src/org/sleuthkit/autopsy/report/modules/datasourcesummaryexport/ExportRecentFiles.java new file mode 100755 index 0000000000..a7d5471343 --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/report/modules/datasourcesummaryexport/ExportRecentFiles.java @@ -0,0 +1,123 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2021 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.report.modules.datasourcesummaryexport; + +import java.text.DateFormat; +import java.text.SimpleDateFormat; +import java.util.Arrays; +import java.util.Date; +import java.util.List; +import java.util.Locale; +import java.util.function.Function; +import java.util.stream.Collectors; +import java.util.stream.Stream; +import org.openide.util.NbBundle.Messages; +import org.sleuthkit.autopsy.datasourcesummary.datamodel.DataFetcher; +import org.sleuthkit.autopsy.datasourcesummary.datamodel.RecentFilesSummary; +import org.sleuthkit.autopsy.datasourcesummary.datamodel.RecentFilesSummary.RecentAttachmentDetails; +import org.sleuthkit.autopsy.datasourcesummary.datamodel.RecentFilesSummary.RecentDownloadDetails; +import org.sleuthkit.autopsy.datasourcesummary.datamodel.RecentFilesSummary.RecentFileDetails; +import org.sleuthkit.datamodel.DataSource; + +/** + * Class to export data source summary info. + */ +@Messages({ + "ExportRecentFiles_docsTable_tabName=Recently Opened Documents", + "ExportRecentFiles_downloadsTable_tabName=Recently Downloads", + "ExportRecentFiles_attachmentsTable_tabName=Recent Attachments", + "ExportRecentFiles_col_head_date=Date", + "ExportRecentFiles_col_header_domain=Domain", + "ExportRecentFiles_col_header_path=Path", + "ExportRecentFiles_col_header_sender=Sender" +}) +final class ExportRecentFiles { + + private final RecentFilesSummary recentSummary; + + private static final String DATETIME_FORMAT_STR = "yyyy/MM/dd HH:mm:ss"; + private static final DateFormat DATETIME_FORMAT = new SimpleDateFormat(DATETIME_FORMAT_STR, Locale.getDefault()); + + private static final List>> docsTemplate = Arrays.asList( + new ColumnModel<>(Bundle.ExportRecentFiles_col_header_path(), + (prog) -> { + return new DefaultCellModel<>(prog.getPath()); + }, 250), + new ColumnModel<>(Bundle.ExportRecentFiles_col_head_date(), + getDateFunct(), + 80)); + + private static final List>> downloadsTemplate = Arrays.asList( + new ColumnModel<>(Bundle.ExportRecentFiles_col_header_domain(), + (prog) -> { + return new DefaultCellModel<>(prog.getWebDomain()); + }, 100), + new ColumnModel<>(Bundle.ExportRecentFiles_col_header_path(), + (prog) -> { + return new DefaultCellModel<>(prog.getPath()); + }, 250), + new ColumnModel<>(Bundle.ExportRecentFiles_col_head_date(), + getDateFunct(), + 80)); + + private static final List>> attachmentsTemplate = Arrays.asList( + new ColumnModel<>(Bundle.ExportRecentFiles_col_header_path(), + (prog) -> { + return new DefaultCellModel<>(prog.getPath()); + }, 250), + new ColumnModel<>(Bundle.ExportRecentFiles_col_head_date(), + getDateFunct(), + 80), + new ColumnModel<>(Bundle.ExportRecentFiles_col_header_sender(), + (prog) -> { + return new DefaultCellModel<>(prog.getSender()); + }, 150)); + + ExportRecentFiles() { + recentSummary = new RecentFilesSummary(); + } + + /** + * Returns a function that gets the date from the RecentFileDetails object + * and converts into a DefaultCellModel to be displayed in a table. + * + * @return The function that determines the date cell from a + * RecentFileDetails object. + */ + private static Function> getDateFunct() { + return (T lastAccessed) -> { + Function dateParser = (dt) -> dt == null ? "" : DATETIME_FORMAT.format(dt); + return new DefaultCellModel<>(new Date(lastAccessed.getDateAsLong() * 1000), dateParser, DATETIME_FORMAT_STR); + }; + } + + List getExports(DataSource dataSource) { + + DataFetcher> docsFetcher = (ds) -> recentSummary.getRecentlyOpenedDocuments(ds, 10); + DataFetcher> downloadsFetcher = (ds) -> recentSummary.getRecentDownloads(ds, 10); + DataFetcher> attachmentsFetcher = (ds) -> recentSummary.getRecentAttachments(ds, 10); + + return Stream.of( + ExcelExportAction.getTableExport(docsFetcher, docsTemplate, Bundle.ExportRecentFiles_docsTable_tabName(), dataSource), + ExcelExportAction.getTableExport(downloadsFetcher, downloadsTemplate, Bundle.ExportRecentFiles_downloadsTable_tabName(), dataSource), + ExcelExportAction.getTableExport(attachmentsFetcher, attachmentsTemplate, Bundle.ExportRecentFiles_attachmentsTable_tabName(), dataSource)) + .filter(sheet -> sheet != null) + .collect(Collectors.toList()); + } +} diff --git a/Core/src/org/sleuthkit/autopsy/report/modules/datasourcesummaryexport/ExportTimeline.java b/Core/src/org/sleuthkit/autopsy/report/modules/datasourcesummaryexport/ExportTimeline.java new file mode 100755 index 0000000000..cbb385b60a --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/report/modules/datasourcesummaryexport/ExportTimeline.java @@ -0,0 +1,144 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2021 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.report.modules.datasourcesummaryexport; + +import java.awt.Color; +import java.text.DateFormat; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.Date; +import java.util.List; +import org.apache.commons.collections.CollectionUtils; +import org.openide.util.NbBundle.Messages; +import org.sleuthkit.autopsy.datasourcesummary.uiutils.BarChartSeries; +import org.sleuthkit.autopsy.datasourcesummary.uiutils.BarChartSeries.BarChartItem; +import org.sleuthkit.autopsy.datasourcesummary.uiutils.BarChartSeries.OrderedKey; +import org.sleuthkit.autopsy.datasourcesummary.datamodel.DataFetcher; +import org.sleuthkit.autopsy.datasourcesummary.datamodel.TimelineSummary; +import org.sleuthkit.autopsy.datasourcesummary.datamodel.TimelineSummary.DailyActivityAmount; +import org.sleuthkit.autopsy.datasourcesummary.datamodel.TimelineSummary.TimelineSummaryData; +import org.sleuthkit.autopsy.report.modules.datasourcesummaryexport.ExcelSpecialFormatExport.KeyValueItemExportable; +import org.sleuthkit.autopsy.report.modules.datasourcesummaryexport.ExcelSpecialFormatExport.TitledExportable; +import org.sleuthkit.datamodel.DataSource; + +/** + * Class to export information about a data source's timeline events. + */ +@Messages({ + "TimelinePanel_earliestLabel_title=Earliest", + "TimelinePanel_latestLabel_title=Latest", + "TimlinePanel_last30DaysChart_title=Last 30 Days", + "TimlinePanel_last30DaysChart_fileEvts_title=File Events", + "TimlinePanel_last30DaysChart_artifactEvts_title=Result Events",}) +class ExportTimeline { + + private final TimelineSummary timelineSummary; + + private static final String EARLIEST_LATEST_FORMAT_STR = "MMM d, yyyy"; + private static final DateFormat EARLIEST_LATEST_FORMAT = TimelineSummary.getUtcFormat(EARLIEST_LATEST_FORMAT_STR); + private static final DateFormat CHART_FORMAT = TimelineSummary.getUtcFormat("MMM d, yyyy"); + private static final int MOST_RECENT_DAYS_COUNT = 30; + + private static final Color FILE_EVT_COLOR = new Color(228, 22, 28); + private static final Color ARTIFACT_EVT_COLOR = new Color(21, 227, 100); + + /** + * Creates new form PastCasesPanel + */ + ExportTimeline() { + timelineSummary = new TimelineSummary(); + } + + /** + * Converts DailyActivityAmount data retrieved from TimelineSummaryGetter + * into data to be displayed as a bar chart. + * + * @param recentDaysActivity The data retrieved from + * TimelineSummaryGetter. + * @param showIntermediateDates If true, shows all dates. If false, shows + * only first and last date. + * + * @return The data to be displayed in the BarChart. + */ + private static List parseChartData(List recentDaysActivity, boolean showIntermediateDates) { + // if no data, return null indicating no result. + if (CollectionUtils.isEmpty(recentDaysActivity)) { + return null; + } + + // Create a bar chart item for each recent days activity item + List fileEvtCounts = new ArrayList<>(); + List artifactEvtCounts = new ArrayList<>(); + + for (int i = 0; i < recentDaysActivity.size(); i++) { + DailyActivityAmount curItem = recentDaysActivity.get(i); + + long fileAmt = curItem.getFileActivityCount(); + long artifactAmt = curItem.getArtifactActivityCount() * 100; + String formattedDate = (showIntermediateDates || i == 0 || i == recentDaysActivity.size() - 1) + ? TimelineSummary.formatDate(curItem.getDay(), CHART_FORMAT) : ""; + + OrderedKey thisKey = new OrderedKey(formattedDate, i); + fileEvtCounts.add(new BarChartItem(thisKey, fileAmt)); + artifactEvtCounts.add(new BarChartItem(thisKey, artifactAmt)); + } + + return Arrays.asList( + new BarChartSeries(Bundle.TimlinePanel_last30DaysChart_fileEvts_title(), FILE_EVT_COLOR, fileEvtCounts), + new BarChartSeries(Bundle.TimlinePanel_last30DaysChart_artifactEvts_title(), ARTIFACT_EVT_COLOR, artifactEvtCounts)); + } + + /** + * Create a default cell model to be use with excel export in the earliest / + * latest date format. + * + * @param date The date. + * @return The cell model. + */ + private static DefaultCellModel getEarliestLatestCell(Date date) { + return new DefaultCellModel<>(date, (dt) -> dt == null ? "" : EARLIEST_LATEST_FORMAT.format(dt), EARLIEST_LATEST_FORMAT_STR); + } + + @Messages({ + "TimelinePanel_getExports_sheetName=Timeline", + "TimelinePanel_getExports_activityRange=Activity Range", + "TimelinePanel_getExports_earliest=Earliest:", + "TimelinePanel_getExports_latest=Latest:", + "TimelinePanel_getExports_dateColumnHeader=Date", + "TimelinePanel_getExports_chartName=Last 30 Days",}) + List getExports(DataSource dataSource) { + DataFetcher dataFetcher = (ds) -> timelineSummary.getTimelineSummaryData(ds, MOST_RECENT_DAYS_COUNT); + TimelineSummaryData summaryData = ExcelExportAction.getFetchResult(dataFetcher, "Timeline", dataSource); + if (summaryData == null) { + return Collections.emptyList(); + } + + return Arrays.asList( + new ExcelSpecialFormatExport(Bundle.TimelinePanel_getExports_sheetName(), + Arrays.asList( + new TitledExportable(Bundle.TimelinePanel_getExports_activityRange(), Collections.emptyList()), + new KeyValueItemExportable(Bundle.TimelinePanel_getExports_earliest(), getEarliestLatestCell(summaryData.getMinDate())), + new KeyValueItemExportable(Bundle.TimelinePanel_getExports_latest(), getEarliestLatestCell(summaryData.getMaxDate())), + new BarChartExport(Bundle.TimelinePanel_getExports_dateColumnHeader(), + "#,###", + Bundle.TimelinePanel_getExports_chartName(), + parseChartData(summaryData.getMostRecentDaysActivity(), true))))); + } +} diff --git a/Core/src/org/sleuthkit/autopsy/report/modules/datasourcesummaryexport/ExportTypes.java b/Core/src/org/sleuthkit/autopsy/report/modules/datasourcesummaryexport/ExportTypes.java new file mode 100755 index 0000000000..f3787a4a98 --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/report/modules/datasourcesummaryexport/ExportTypes.java @@ -0,0 +1,265 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2019-2021 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.report.modules.datasourcesummaryexport; + +import java.awt.Color; +import java.sql.SQLException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashSet; +import java.util.List; +import java.util.stream.Collectors; +import java.util.stream.Stream; +import org.openide.util.NbBundle.Messages; +import org.sleuthkit.autopsy.datasourcesummary.uiutils.PieChartItem; +import org.sleuthkit.autopsy.datasourcesummary.datamodel.ContainerSummary; +import org.sleuthkit.autopsy.coreutils.FileTypeUtils.FileTypeCategory; +import org.sleuthkit.autopsy.datasourcesummary.datamodel.DataFetcher; +import org.sleuthkit.autopsy.datasourcesummary.datamodel.DataSourceInfoUtilities; +import org.sleuthkit.autopsy.datasourcesummary.datamodel.MimeTypeSummary; +import org.sleuthkit.autopsy.datasourcesummary.datamodel.SleuthkitCaseProvider; +import org.sleuthkit.autopsy.datasourcesummary.datamodel.TypesSummary; +import org.sleuthkit.autopsy.datasourcesummary.datamodel.TypesSummary.FileTypeCategoryData; +import org.sleuthkit.autopsy.report.modules.datasourcesummaryexport.ExcelSpecialFormatExport.KeyValueItemExportable; +import org.sleuthkit.datamodel.DataSource; +import org.sleuthkit.datamodel.TskCoreException; + +/** + * Class to export summary information on the known files present in the + * specified DataSource. + */ +@Messages({ + "ExportTypes_artifactsTypesPieChart_title=Artifact Types", + "ExportTypes_filesByCategoryTable_allocatedRow_title=Allocated Files", + "ExportTypes_filesByCategoryTable_unallocatedRow_title=Unallocated Files", + "ExportTypes_filesByCategoryTable_slackRow_title=Slack Files", + "ExportTypes_filesByCategoryTable_directoryRow_title=Directories", + "ExportTypes_fileMimeTypesChart_title=File Types", + "ExportTypes_fileMimeTypesChart_valueLabel=Count", + "ExportTypes_fileMimeTypesChart_audio_title=Audio", + "ExportTypes_fileMimeTypesChart_documents_title=Documents", + "ExportTypes_fileMimeTypesChart_executables_title=Executables", + "ExportTypes_fileMimeTypesChart_images_title=Images", + "ExportTypes_fileMimeTypesChart_videos_title=Videos", + "ExportTypes_fileMimeTypesChart_other_title=Other", + "ExportTypes_fileMimeTypesChart_unknown_title=Unknown", + "ExportTypes_fileMimeTypesChart_notAnalyzed_title=Not Analyzed", + "ExportTypes_usageLabel_title=Usage", + "ExportTypes_osLabel_title=OS", + "ExportTypes_sizeLabel_title=Size", + "ExportTypes_excelTabName=Types"}) +class ExportTypes { + + /** + * Data for types pie chart. + */ + private static class TypesPieChartData { + + private final List pieSlices; + private final boolean usefulContent; + + /** + * Main constructor. + * + * @param pieSlices The pie slices. + * @param usefulContent True if this is useful content; false if there + * is 0 mime type information. + */ + TypesPieChartData(List pieSlices, boolean usefulContent) { + this.pieSlices = pieSlices; + this.usefulContent = usefulContent; + } + + /** + * @return The pie chart data. + */ + List getPieSlices() { + return pieSlices; + } + + /** + * @return Whether or not the data is usefulContent. + */ + boolean isUsefulContent() { + return usefulContent; + } + } + + private final MimeTypeSummary mimeTypeSummary; + private final ContainerSummary containerSummary; + private final TypesSummary typesSummary; + private final SleuthkitCaseProvider provider; + + private static final Color IMAGES_COLOR = new Color(156, 39, 176); + private static final Color VIDEOS_COLOR = Color.YELLOW; + private static final Color AUDIO_COLOR = Color.BLUE; + private static final Color DOCUMENTS_COLOR = Color.GREEN; + private static final Color EXECUTABLES_COLOR = new Color(0, 188, 212); + private static final Color UNKNOWN_COLOR = Color.ORANGE; + private static final Color OTHER_COLOR = new Color(78, 52, 46); + private static final Color NOT_ANALYZED_COLOR = Color.WHITE; + + // All file type categories. + private static final List FILE_MIME_TYPE_CATEGORIES = Arrays.asList( + new FileTypeCategoryData(Bundle.ExportTypes_fileMimeTypesChart_images_title(), FileTypeCategory.IMAGE.getMediaTypes(), IMAGES_COLOR), + new FileTypeCategoryData(Bundle.ExportTypes_fileMimeTypesChart_videos_title(), FileTypeCategory.VIDEO.getMediaTypes(), VIDEOS_COLOR), + new FileTypeCategoryData(Bundle.ExportTypes_fileMimeTypesChart_audio_title(), FileTypeCategory.AUDIO.getMediaTypes(), AUDIO_COLOR), + new FileTypeCategoryData(Bundle.ExportTypes_fileMimeTypesChart_documents_title(), FileTypeCategory.DOCUMENTS.getMediaTypes(), DOCUMENTS_COLOR), + new FileTypeCategoryData(Bundle.ExportTypes_fileMimeTypesChart_executables_title(), FileTypeCategory.EXECUTABLE.getMediaTypes(), EXECUTABLES_COLOR), + new FileTypeCategoryData(Bundle.ExportTypes_fileMimeTypesChart_unknown_title(), new HashSet<>(Arrays.asList("application/octet-stream")), UNKNOWN_COLOR) + ); + + ExportTypes() { + this.provider = SleuthkitCaseProvider.DEFAULT; + containerSummary = new ContainerSummary(); + typesSummary = new TypesSummary(); + mimeTypeSummary = new MimeTypeSummary(); + } + + /** + * Gets all the data for the file type pie chart. + * + * @param mimeTypeData The means of acquiring data. + * @param dataSource The datasource. + * + * @return The pie chart items. + */ + private TypesPieChartData getMimeTypeCategoriesModel(DataSource dataSource) + throws SQLException, TskCoreException, SleuthkitCaseProvider.SleuthkitCaseProviderException { + + if (dataSource == null) { + return null; + } + + // for each category of file types, get the counts of files + List fileCategoryItems = new ArrayList<>(); + long categoryTotalCount = 0; + + for (FileTypeCategoryData cat : FILE_MIME_TYPE_CATEGORIES) { + long thisValue = DataSourceInfoUtilities.getLongOrZero(mimeTypeSummary.getCountOfFilesForMimeTypes(dataSource, cat.getMimeTypes())); + categoryTotalCount += thisValue; + + fileCategoryItems.add(new PieChartItem( + cat.getLabel(), + thisValue, + cat.getColor())); + } + + // get a count of all files with no mime type + long noMimeTypeCount = DataSourceInfoUtilities.getLongOrZero(mimeTypeSummary.getCountOfFilesWithNoMimeType(dataSource)); + + // get a count of all regular files + long allRegularFiles = DataSourceInfoUtilities.getLongOrZero(DataSourceInfoUtilities.getCountOfRegNonSlackFiles(provider.get(), dataSource, null)); + + // create entry for mime types in other category + long otherCount = allRegularFiles - (categoryTotalCount + noMimeTypeCount); + PieChartItem otherPieItem = new PieChartItem(Bundle.ExportTypes_fileMimeTypesChart_other_title(), + otherCount, OTHER_COLOR); + + // check at this point to see if these are all 0; if so, we don't have useful content. + boolean usefulContent = categoryTotalCount > 0 || otherCount > 0; + + // create entry for not analyzed mime types category + PieChartItem notAnalyzedItem = new PieChartItem(Bundle.ExportTypes_fileMimeTypesChart_notAnalyzed_title(), + noMimeTypeCount, NOT_ANALYZED_COLOR); + + // combine categories with 'other' and 'not analyzed' + List items = Stream.concat( + fileCategoryItems.stream(), + Stream.of(otherPieItem, notAnalyzedItem)) + // remove items that have no value + .filter(slice -> slice.getValue() > 0) + .collect(Collectors.toList()); + + return new TypesPieChartData(items, usefulContent); + } + + /** + * Returns a key value pair to be exported in a sheet. + * + * @param fetcher The means of fetching the data. + * @param key The key to use. + * @param dataSource The data source containing the data. + * + * @return The key value pair to be exported. + */ + private static KeyValueItemExportable getStrExportable(DataFetcher fetcher, String key, DataSource dataSource) { + String result = ExcelExportAction.getFetchResult(fetcher, "Types", dataSource); + return (result == null) ? null : new KeyValueItemExportable(key, new DefaultCellModel<>(result)); + } + + /** + * Returns a key value pair to be exported in a sheet formatting the long + * with commas separated by orders of 1000. + * + * @param fetcher The means of fetching the data. + * @param key The string key for this key value pair. + * @param dataSource The data source. + * + * @return The key value pair. + */ + private static KeyValueItemExportable getCountExportable(DataFetcher fetcher, String key, DataSource dataSource) { + Long count = ExcelExportAction.getFetchResult(fetcher, "Types", dataSource); + return (count == null) ? null : new KeyValueItemExportable(key, + new DefaultCellModel(count, DataSourceInfoUtilities.COMMA_FORMATTER::format, DataSourceInfoUtilities.COMMA_FORMAT_STR)); + } + + List getExports(DataSource dataSource) { + if (dataSource == null) { + return Collections.emptyList(); + } + + DataFetcher usageFetcher = (ds) -> containerSummary.getDataSourceType(ds); + DataFetcher osFetcher = (ds) -> containerSummary.getOperatingSystems(ds); + DataFetcher sizeFetcher = (ds) -> ds == null ? null : ds.getSize(); + + DataFetcher typesFetcher = (ds) -> getMimeTypeCategoriesModel(ds); + + DataFetcher allocatedFetcher = (ds) -> typesSummary.getCountOfAllocatedFiles(ds); + DataFetcher unallocatedFetcher = (ds) -> typesSummary.getCountOfUnallocatedFiles(ds); + DataFetcher slackFetcher = (ds) -> typesSummary.getCountOfSlackFiles(ds); + DataFetcher directoriesFetcher = (ds) -> typesSummary.getCountOfDirectories(ds); + + // Retrieve data to create the types pie chart + TypesPieChartData typesData = ExcelExportAction.getFetchResult(typesFetcher, "Types", dataSource); + PieChartExport typesChart = (typesData == null || !typesData.isUsefulContent()) ? null + : new PieChartExport( + Bundle.ExportTypes_fileMimeTypesChart_title(), + Bundle.ExportTypes_fileMimeTypesChart_valueLabel(), + "#,###", + Bundle.ExportTypes_fileMimeTypesChart_title(), + typesData.getPieSlices()); + + return Arrays.asList(new ExcelSpecialFormatExport(Bundle.ExportTypes_excelTabName(), + Stream.of( + getStrExportable(usageFetcher, Bundle.ExportTypes_usageLabel_title(), dataSource), + getStrExportable(osFetcher, Bundle.ExportTypes_osLabel_title(), dataSource), + new KeyValueItemExportable(Bundle.ExportTypes_sizeLabel_title(), + SizeRepresentationUtil.getBytesCell(ExcelExportAction.getFetchResult(sizeFetcher, "Types", dataSource))), + typesChart, + getCountExportable(allocatedFetcher, Bundle.ExportTypes_filesByCategoryTable_allocatedRow_title(), dataSource), + getCountExportable(unallocatedFetcher, Bundle.ExportTypes_filesByCategoryTable_unallocatedRow_title(), dataSource), + getCountExportable(slackFetcher, Bundle.ExportTypes_filesByCategoryTable_slackRow_title(), dataSource), + getCountExportable(directoriesFetcher, Bundle.ExportTypes_filesByCategoryTable_directoryRow_title(), dataSource)) + .filter(sheet -> sheet != null) + .collect(Collectors.toList()) + )); + } +} diff --git a/Core/src/org/sleuthkit/autopsy/report/modules/datasourcesummaryexport/ExportUserActivity.java b/Core/src/org/sleuthkit/autopsy/report/modules/datasourcesummaryexport/ExportUserActivity.java new file mode 100755 index 0000000000..f1a4198b42 --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/report/modules/datasourcesummaryexport/ExportUserActivity.java @@ -0,0 +1,240 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2021 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.report.modules.datasourcesummaryexport; + +import java.text.DateFormat; +import java.text.SimpleDateFormat; +import java.util.Arrays; +import java.util.Date; +import java.util.List; +import java.util.Locale; +import java.util.function.Function; +import java.util.stream.Collectors; +import java.util.stream.Stream; +import org.apache.commons.lang.StringUtils; +import org.openide.util.NbBundle.Messages; +import org.sleuthkit.datamodel.DataSource; +import org.sleuthkit.autopsy.datasourcesummary.datamodel.DataFetcher; +import org.sleuthkit.autopsy.datasourcesummary.datamodel.UserActivitySummary; +import org.sleuthkit.autopsy.datasourcesummary.datamodel.UserActivitySummary.LastAccessedArtifact; +import org.sleuthkit.autopsy.datasourcesummary.datamodel.UserActivitySummary.TopAccountResult; +import org.sleuthkit.autopsy.datasourcesummary.datamodel.UserActivitySummary.TopDeviceAttachedResult; +import org.sleuthkit.autopsy.datasourcesummary.datamodel.UserActivitySummary.TopDomainsResult; +import org.sleuthkit.autopsy.datasourcesummary.datamodel.UserActivitySummary.TopProgramsResult; +import org.sleuthkit.autopsy.datasourcesummary.datamodel.UserActivitySummary.TopWebSearchResult; +import static org.sleuthkit.autopsy.report.modules.datasourcesummaryexport.ExcelExportAction.getTableExport; + +/** + * Class to export user activity present in the specified DataSource. + */ +@Messages({ + "ExportUserActivity_tab_title=User Activity", + "ExportUserActivity_TopProgramsTableModel_tabName=Recent Programs", + "ExportUserActivity_TopDomainsTableModel_tabName=Recent Domains", + "ExportUserActivity_TopWebSearchTableModel_tabName=Recent Web Searches", + "ExportUserActivity_TopDeviceAttachedTableModel_tabName=Recent Devices Attached", + "ExportUserActivity_TopAccountTableModel_tabName=Recent Account Types Used", + "ExportUserActivity_TopProgramsTableModel_name_header=Program", + "ExportUserActivity_TopProgramsTableModel_folder_header=Folder", + "ExportUserActivity_TopProgramsTableModel_count_header=Run Times", + "ExportUserActivity_TopProgramsTableModel_lastrun_header=Last Run", + "ExportUserActivity_TopDomainsTableModel_domain_header=Domain", + "ExportUserActivity_TopDomainsTableModel_count_header=Visits", + "ExportUserActivity_TopDomainsTableModel_lastAccess_header=Last Accessed", + "ExportUserActivity_TopWebSearchTableModel_searchString_header=Search String", + "ExportUserActivity_TopWebSearchTableModel_dateAccessed_header=Date Accessed", + "ExportUserActivity_TopWebSearchTableModel_translatedResult_header=Translated", + "ExportUserActivity_TopDeviceAttachedTableModel_deviceId_header=Device Id", + "ExportUserActivity_TopDeviceAttachedTableModel_makeModel_header=Make and Model", + "ExportUserActivity_TopDeviceAttachedTableModel_dateAccessed_header=Last Accessed", + "ExportUserActivity_TopAccountTableModel_accountType_header=Account Type", + "ExportUserActivity_TopAccountTableModel_lastAccess_header=Last Accessed", + "ExportUserActivity_noDataExists=No communication data exists"}) +class ExportUserActivity { + + private final UserActivitySummary userSummary; + + private static final String DATETIME_FORMAT_STR = "yyyy/MM/dd HH:mm:ss"; + private static final DateFormat DATETIME_FORMAT = new SimpleDateFormat(DATETIME_FORMAT_STR, Locale.getDefault()); + private static final int TOP_PROGS_COUNT = 10; + private static final int TOP_DOMAINS_COUNT = 10; + private static final int TOP_SEARCHES_COUNT = 10; + private static final int TOP_ACCOUNTS_COUNT = 5; + private static final int TOP_DEVICES_COUNT = 10; + + // set up recent programs + private static final List>> topProgramsTemplate = Arrays.asList( + // program name column + new ColumnModel<>( + Bundle.ExportUserActivity_TopProgramsTableModel_name_header(), + (prog) -> { + return new DefaultCellModel<>(prog.getProgramName()); + }, + 250), + // program folder column + new ColumnModel<>( + Bundle.ExportUserActivity_TopProgramsTableModel_folder_header(), + (prog) -> { + return new DefaultCellModel<>( + UserActivitySummary.getShortFolderName( + prog.getProgramPath(), + prog.getProgramName())); + }, + 150), + // run count column + new ColumnModel<>( + Bundle.ExportUserActivity_TopProgramsTableModel_count_header(), + (prog) -> { + return new DefaultCellModel<>(prog.getRunTimes(), (num) -> num == null ? "" : num.toString()); + }, + 80), + // last run date column + new ColumnModel<>( + Bundle.ExportUserActivity_TopProgramsTableModel_lastrun_header(), + getDateFunct(), + 150) + ); + + // set up recent domains + private static final List>> topDomainsTemplate = Arrays.asList( + // domain column + new ColumnModel<>( + Bundle.ExportUserActivity_TopDomainsTableModel_domain_header(), + (recentDomain) -> { + return new DefaultCellModel<>(recentDomain.getDomain()); + }, + 250), + // count column + new ColumnModel<>( + Bundle.ExportUserActivity_TopDomainsTableModel_count_header(), + (recentDomain) -> { + return new DefaultCellModel<>(recentDomain.getVisitTimes(), (num) -> num == null ? "" : num.toString()); + }, + 100), + // last accessed column + new ColumnModel<>( + Bundle.ExportUserActivity_TopDomainsTableModel_lastAccess_header(), + getDateFunct(), + 150) + ); + + // top web searches + private static final List>> topWebSearchesTemplate = Arrays.asList( + // search string column + new ColumnModel<>( + Bundle.ExportUserActivity_TopWebSearchTableModel_searchString_header(), + (webSearch) -> { + return new DefaultCellModel<>(webSearch.getSearchString()); + }, + 250 + ), + // last accessed + new ColumnModel<>( + Bundle.ExportUserActivity_TopWebSearchTableModel_dateAccessed_header(), + getDateFunct(), + 150 + ), + // translated value + new ColumnModel<>( + Bundle.ExportUserActivity_TopWebSearchTableModel_translatedResult_header(), + (webSearch) -> { + return new DefaultCellModel<>(webSearch.getTranslatedResult()); + }, + 250 + ) + ); + + // top devices attached + private static final List>> topDevicesTemplate = Arrays.asList( + // device id column + new ColumnModel<>( + Bundle.ExportUserActivity_TopDeviceAttachedTableModel_deviceId_header(), + (device) -> { + return new DefaultCellModel<>(device.getDeviceId()); + }, + 250 + ), + // last accessed + new ColumnModel<>( + Bundle.ExportUserActivity_TopDeviceAttachedTableModel_dateAccessed_header(), + getDateFunct(), + 150 + ), + // make and model + new ColumnModel<>( + Bundle.ExportUserActivity_TopDeviceAttachedTableModel_makeModel_header(), + (device) -> { + String make = StringUtils.isBlank(device.getDeviceMake()) ? "" : device.getDeviceMake().trim(); + String model = StringUtils.isBlank(device.getDeviceModel()) ? "" : device.getDeviceModel().trim(); + String makeModelString = (make.isEmpty() || model.isEmpty()) + ? make + model + : String.format("%s - %s", make, model); + return new DefaultCellModel<>(makeModelString); + }, + 250 + ) + ); + + // top accounts + private static final List>> topAccountsTemplate = Arrays.asList( + // account type column + new ColumnModel<>( + Bundle.ExportUserActivity_TopAccountTableModel_accountType_header(), + (account) -> { + return new DefaultCellModel<>(account.getAccountType()); + }, + 250 + ), + // last accessed + new ColumnModel<>( + Bundle.ExportUserActivity_TopAccountTableModel_lastAccess_header(), + getDateFunct(), + 150 + ) + ); + + ExportUserActivity() { + userSummary = new UserActivitySummary(); + } + + private static Function> getDateFunct() { + return (T lastAccessed) -> { + Function dateParser = (dt) -> dt == null ? "" : DATETIME_FORMAT.format(dt); + return new DefaultCellModel<>(lastAccessed.getLastAccessed(), dateParser, DATETIME_FORMAT_STR); + }; + } + + List getExports(DataSource dataSource) { + + DataFetcher> topProgramsFetcher = (ds) -> userSummary.getTopPrograms(ds, TOP_PROGS_COUNT); + DataFetcher> topDomainsFetcher = (ds) -> userSummary.getRecentDomains(ds, TOP_DOMAINS_COUNT); + DataFetcher> topWebSearchesFetcher = (ds) -> userSummary.getMostRecentWebSearches(ds, TOP_SEARCHES_COUNT); + DataFetcher> topDevicesAttachedFetcher = (ds) -> userSummary.getRecentDevices(ds, TOP_DEVICES_COUNT); + DataFetcher> topAccountsFetcher = (ds) -> userSummary.getRecentAccounts(ds, TOP_ACCOUNTS_COUNT); + + return Stream.of( + getTableExport(topProgramsFetcher, topProgramsTemplate, Bundle.ExportUserActivity_TopProgramsTableModel_tabName(), dataSource), + getTableExport(topDomainsFetcher, topDomainsTemplate, Bundle.ExportUserActivity_TopDomainsTableModel_tabName(), dataSource), + getTableExport(topWebSearchesFetcher, topWebSearchesTemplate, Bundle.ExportUserActivity_TopWebSearchTableModel_tabName(), dataSource), + getTableExport(topDevicesAttachedFetcher, topDevicesTemplate, Bundle.ExportUserActivity_TopDeviceAttachedTableModel_tabName(), dataSource), + getTableExport(topAccountsFetcher, topAccountsTemplate, Bundle.ExportUserActivity_TopAccountTableModel_tabName(), dataSource)) + .filter(sheet -> sheet != null) + .collect(Collectors.toList()); + } +} diff --git a/Core/src/org/sleuthkit/autopsy/datasourcesummary/uiutils/PieChartExport.java b/Core/src/org/sleuthkit/autopsy/report/modules/datasourcesummaryexport/PieChartExport.java similarity index 91% rename from Core/src/org/sleuthkit/autopsy/datasourcesummary/uiutils/PieChartExport.java rename to Core/src/org/sleuthkit/autopsy/report/modules/datasourcesummaryexport/PieChartExport.java index 4005a34ebc..395637acb1 100644 --- a/Core/src/org/sleuthkit/autopsy/datasourcesummary/uiutils/PieChartExport.java +++ b/Core/src/org/sleuthkit/autopsy/report/modules/datasourcesummaryexport/PieChartExport.java @@ -16,7 +16,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.sleuthkit.autopsy.datasourcesummary.uiutils; +package org.sleuthkit.autopsy.report.modules.datasourcesummaryexport; import java.lang.reflect.Constructor; import java.lang.reflect.InvocationTargetException; @@ -35,23 +35,24 @@ import org.apache.poi.xssf.usermodel.XSSFClientAnchor; import org.apache.poi.xssf.usermodel.XSSFDrawing; import org.apache.poi.xssf.usermodel.XSSFSheet; import org.openxmlformats.schemas.drawingml.x2006.chart.CTPieChart; -import org.sleuthkit.autopsy.datasourcesummary.uiutils.ExcelExport.ExcelExportException; -import org.sleuthkit.autopsy.datasourcesummary.uiutils.ExcelExport.ExcelSheetExport; -import org.sleuthkit.autopsy.datasourcesummary.uiutils.ExcelSpecialFormatExport.ExcelItemExportable; -import org.sleuthkit.autopsy.datasourcesummary.uiutils.ExcelSpecialFormatExport.ItemDimensions; +import org.sleuthkit.autopsy.datasourcesummary.uiutils.PieChartItem; +import org.sleuthkit.autopsy.report.modules.datasourcesummaryexport.ExcelExport.ExcelExportException; +import org.sleuthkit.autopsy.report.modules.datasourcesummaryexport.ExcelExport.ExcelSheetExport; +import org.sleuthkit.autopsy.report.modules.datasourcesummaryexport.ExcelSpecialFormatExport.ExcelItemExportable; +import org.sleuthkit.autopsy.report.modules.datasourcesummaryexport.ExcelSpecialFormatExport.ItemDimensions; /** * * Class that creates an excel pie chart along with data table. */ -public class PieChartExport implements ExcelItemExportable, ExcelSheetExport { +class PieChartExport implements ExcelItemExportable, ExcelSheetExport { private static final int DEFAULT_ROW_SIZE = 20; private static final int DEFAULT_COL_SIZE = 10; private static final int DEFAULT_ROW_PADDING = 1; private static final int DEFAULT_COL_OFFSET = 1; - private final ExcelTableExport tableExport; + private final ExcelTableExport tableExport; private final int colOffset; private final int rowPadding; private final int colSize; @@ -69,7 +70,7 @@ public class PieChartExport implements ExcelItemExportable, ExcelSheetExport { * @param chartTitle The title for the chart. * @param slices The values for the pie slices. */ - public PieChartExport(String keyColumnHeader, + PieChartExport(String keyColumnHeader, String valueColumnHeader, String valueFormatString, String chartTitle, List slices) { @@ -93,7 +94,7 @@ public class PieChartExport implements ExcelItemExportable, ExcelSheetExport { * @param colSize The column size of the chart. * @param rowSize The row size of the chart. */ - public PieChartExport(String keyColumnHeader, + PieChartExport(String keyColumnHeader, String valueColumnHeader, String valueFormatString, String chartTitle, String sheetName, List slices, diff --git a/Core/src/org/sleuthkit/autopsy/report/modules/datasourcesummaryexport/SizeRepresentationUtil.java b/Core/src/org/sleuthkit/autopsy/report/modules/datasourcesummaryexport/SizeRepresentationUtil.java new file mode 100755 index 0000000000..7c9018c9ff --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/report/modules/datasourcesummaryexport/SizeRepresentationUtil.java @@ -0,0 +1,178 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2021 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.report.modules.datasourcesummaryexport; + +import java.text.DecimalFormat; +import org.openide.util.NbBundle; + +/** + * This class provides utilities for representing storage size in most relevant + * units (i.e. bytes, megabytes, etc.). + */ +final class SizeRepresentationUtil { + + private static final int SIZE_CONVERSION_CONSTANT = 1000; + private static final DecimalFormat APPROXIMATE_SIZE_FORMAT = new DecimalFormat("#.##"); + + /** + * A size unit corresponding to orders of magnitude of bytes (kilobyte, gigabytes, etc.). + */ + @NbBundle.Messages({ + "SizeRepresentationUtil_units_bytes=bytes", + "SizeRepresentationUtil_units_kilobytes=KB", + "SizeRepresentationUtil_units_megabytes=MB", + "SizeRepresentationUtil_units_gigabytes=GB", + "SizeRepresentationUtil_units_terabytes=TB", + "SizeRepresentationUtil_units_petabytes=PB" + }) + enum SizeUnit { + BYTES(Bundle.SizeRepresentationUtil_units_bytes(), "#", 0), + KB(Bundle.SizeRepresentationUtil_units_kilobytes(), "#,##0.00,", 1), + MB(Bundle.SizeRepresentationUtil_units_megabytes(), "#,##0.00,,", 2), + GB(Bundle.SizeRepresentationUtil_units_gigabytes(), "#,##0.00,,,", 3), + TB(Bundle.SizeRepresentationUtil_units_terabytes(), "#,##0.00,,,,", 4), + PB(Bundle.SizeRepresentationUtil_units_petabytes(), "#,##0.00,,,,,", 5); + + private final String suffix; + private final String excelFormatString; + private final long divisor; + + /** + * Main constructor. + * @param suffix The string suffix to use for size unit. + * @param excelFormatString The excel format string to use for this size unit. + * @param power The power of 1000 of bytes for this size unit. + */ + SizeUnit(String suffix, String excelFormatString, int power) { + this.suffix = suffix; + + // based on https://www.mrexcel.com/board/threads/how-do-i-format-cells-to-show-gb-mb-kb.140135/ + this.excelFormatString = String.format("%s \"%s\"", excelFormatString, suffix); + this.divisor = (long) Math.pow(SIZE_CONVERSION_CONSTANT, power); + } + + /** + * @return The string suffix to use for size unit. + */ + String getSuffix() { + return suffix; + } + + /** + * @return The excel format string to use for this size unit. + */ + String getExcelFormatString() { + return excelFormatString; + } + + /** + * @return The divisor to convert from bytes to this unit. + */ + long getDivisor() { + return divisor; + } + } + + /** + * Get a long size in bytes as a string formated to be read by users. + * + * @param size Long value representing a size in bytes. + * + * @return Return a string formated with a user friendly version of the size + * as a string, returns empty String when provided empty size. + */ + static String getSizeString(Long size) { + return getSizeString(size, APPROXIMATE_SIZE_FORMAT, true); + } + + /** + * Determines the relevant size unit that should be used for a particular size. + * @param size The size in bytes. + * @return The relevant size unit. + */ + static SizeUnit getSizeUnit(Long size) { + if (size == null) { + return SizeUnit.values()[0]; + } + + for (SizeUnit unit : SizeUnit.values()) { + long result = size / unit.getDivisor(); + if (result < SIZE_CONVERSION_CONSTANT) { + return unit; + } + } + + return SizeUnit.values()[SizeUnit.values().length - 1]; + } + + /** + * Get a long size in bytes as a string formatted to be read by users. + * + * @param size Long value representing a size in byte.s + * @param format The means of formatting the number. + * @param showFullSize Optionally show the number of bytes in the + * datasource. + * + * @return Return a string formatted with a user friendly version of the size + * as a string, returns empty String when provided empty size. + */ + static String getSizeString(Long size, DecimalFormat format, boolean showFullSize) { + if (size == null) { + return ""; + } + + SizeUnit sizeUnit = getSizeUnit(size); + if (sizeUnit == null) { + sizeUnit = SizeUnit.BYTES; + } + + String closestUnitSize = String.format("%s %s", + format.format(((double) size) / sizeUnit.getDivisor()), sizeUnit.getSuffix()); + + String fullSize = String.format("%d %s", size, SizeUnit.BYTES.getSuffix()); + if (sizeUnit.equals(SizeUnit.BYTES)) { + return fullSize; + } else if (showFullSize) { + return String.format("%s (%s)", closestUnitSize, fullSize); + } else { + return closestUnitSize; + } + } + + /** + * Returns a default cell model using size units. + * @param bytes The number of bytes. + * @return The default cell model. + */ + static DefaultCellModel getBytesCell(Long bytes) { + if (bytes == null) { + return new DefaultCellModel<>(""); + } else { + SizeUnit unit = SizeRepresentationUtil.getSizeUnit(bytes); + if (unit == null) { + unit = SizeUnit.BYTES; + } + + return new DefaultCellModel<>(bytes, SizeRepresentationUtil::getSizeString, unit.getExcelFormatString()); + } + } + + private SizeRepresentationUtil() { + } +} diff --git a/Core/test/unit/src/org/sleuthkit/autopsy/discovery/search/DomainSearchCacheLoaderTest.java b/Core/test/unit/src/org/sleuthkit/autopsy/discovery/search/DomainSearchCacheLoaderTest.java index 66a77cf617..b9eb3aeefc 100755 --- a/Core/test/unit/src/org/sleuthkit/autopsy/discovery/search/DomainSearchCacheLoaderTest.java +++ b/Core/test/unit/src/org/sleuthkit/autopsy/discovery/search/DomainSearchCacheLoaderTest.java @@ -1,7 +1,7 @@ /* * Autopsy Forensic Browser * - * Copyright 2020 Basis Technology Corp. + * Copyright 2020-2021 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -52,7 +52,7 @@ public class DomainSearchCacheLoaderTest { new DiscoveryAttributes.DataSourceAttribute(), Group.GroupSortingAlgorithm.BY_GROUP_NAME, ResultsSorter.SortingMethod.BY_DOMAIN_NAME, - caseDb, null); + caseDb, null, new TestSearchContextImpl(false)); DomainSearchCacheLoader loader = mock(DomainSearchCacheLoader.class); when(loader.getResultDomainsFromDatabase(key)).thenReturn(domains); @@ -88,7 +88,7 @@ public class DomainSearchCacheLoaderTest { new DiscoveryAttributes.NoGroupingAttribute(), Group.GroupSortingAlgorithm.BY_GROUP_NAME, ResultsSorter.SortingMethod.BY_DOMAIN_NAME, - caseDb, null); + caseDb, null, new TestSearchContextImpl(false)); DomainSearchCacheLoader loader = mock(DomainSearchCacheLoader.class); when(loader.getResultDomainsFromDatabase(key)).thenReturn(domains); @@ -121,7 +121,7 @@ public class DomainSearchCacheLoaderTest { new DiscoveryAttributes.NoGroupingAttribute(), Group.GroupSortingAlgorithm.BY_GROUP_NAME, ResultsSorter.SortingMethod.BY_DATA_SOURCE, - caseDb, null); + caseDb, null, new TestSearchContextImpl(false)); DomainSearchCacheLoader loader = mock(DomainSearchCacheLoader.class); when(loader.getResultDomainsFromDatabase(key)).thenReturn(domains); @@ -155,7 +155,7 @@ public class DomainSearchCacheLoaderTest { new DiscoveryAttributes.DataSourceAttribute(), Group.GroupSortingAlgorithm.BY_GROUP_SIZE, ResultsSorter.SortingMethod.BY_DOMAIN_NAME, - caseDb, null); + caseDb, null, new TestSearchContextImpl(false)); DomainSearchCacheLoader loader = mock(DomainSearchCacheLoader.class); when(loader.getResultDomainsFromDatabase(key)).thenReturn(domains); @@ -173,4 +173,5 @@ public class DomainSearchCacheLoaderTest { } } } + } diff --git a/Core/test/unit/src/org/sleuthkit/autopsy/discovery/search/DomainSearchTest.java b/Core/test/unit/src/org/sleuthkit/autopsy/discovery/search/DomainSearchTest.java index 7dcffed663..2037bbf8b3 100755 --- a/Core/test/unit/src/org/sleuthkit/autopsy/discovery/search/DomainSearchTest.java +++ b/Core/test/unit/src/org/sleuthkit/autopsy/discovery/search/DomainSearchTest.java @@ -1,7 +1,7 @@ /* * Autopsy Forensic Browser * - * Copyright 2020 Basis Technology Corp. + * Copyright 2020-2021 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -24,7 +24,6 @@ import java.util.HashMap; import java.util.List; import java.util.Map; import org.junit.Test; - import static org.mockito.Mockito.*; import static org.junit.Assert.*; import org.sleuthkit.autopsy.discovery.search.DiscoveryKeyUtils.GroupKey; @@ -46,11 +45,11 @@ public class DomainSearchTest { ); } }; - when(cache.get(null, new ArrayList<>(), null, null, null, null, null)).thenReturn(dummyData); + when(cache.get(isNull(), eq(new ArrayList<>()), isNull(), isNull(), isNull(), isNull(), isNull(), any(SearchContext.class))).thenReturn(dummyData); DomainSearch domainSearch = new DomainSearch(cache, null, null); - Map sizes = domainSearch.getGroupSizes(null, - new ArrayList<>(), null, null, null, null, null); + Map sizes = domainSearch.getGroupSizes(null, + new ArrayList<>(), null, null, null, null, null, new TestSearchContextImpl(false)); assertEquals(4, sizes.get(groupOne).longValue()); } @@ -81,11 +80,11 @@ public class DomainSearchTest { } }; - when(cache.get(null, new ArrayList<>(), null, null, null, null, null)).thenReturn(dummyData); + when(cache.get(isNull(), eq(new ArrayList<>()), isNull(), isNull(), isNull(), isNull(), isNull(), any(SearchContext.class))).thenReturn(dummyData); DomainSearch domainSearch = new DomainSearch(cache, null, null); - Map sizes = domainSearch.getGroupSizes(null, - new ArrayList<>(), null, null, null, null, null); + Map sizes = domainSearch.getGroupSizes(null, + new ArrayList<>(), null, null, null, null, null, new TestSearchContextImpl(false)); assertEquals(4, sizes.get(groupOne).longValue()); assertEquals(3, sizes.get(groupTwo).longValue()); assertEquals(1, sizes.get(groupThree).longValue()); @@ -95,11 +94,11 @@ public class DomainSearchTest { public void groupSizes_EmptyGroup_ShouldBeSizeZero() throws DiscoveryException { DomainSearchCache cache = mock(DomainSearchCache.class); - when(cache.get(null, new ArrayList<>(), null, null, null, null, null)).thenReturn(new HashMap<>()); + when(cache.get(isNull(), eq(new ArrayList<>()), isNull(), isNull(), isNull(), isNull(), isNull(), any(SearchContext.class))).thenReturn(new HashMap<>()); DomainSearch domainSearch = new DomainSearch(cache, null, null); - Map sizes = domainSearch.getGroupSizes(null, - new ArrayList<>(), null, null, null, null, null); + Map sizes = domainSearch.getGroupSizes(null, + new ArrayList<>(), null, null, null, null, null, new TestSearchContextImpl(false)); assertEquals(0, sizes.size()); } @@ -120,17 +119,17 @@ public class DomainSearchTest { } }; - when(cache.get(null, new ArrayList<>(), null, null, null, null, null)).thenReturn(dummyData); + when(cache.get(isNull(), eq(new ArrayList<>()), isNull(), isNull(), isNull(), isNull(), isNull(), any(SearchContext.class))).thenReturn(dummyData); DomainSearch domainSearch = new DomainSearch(cache, null, null); - List firstPage = domainSearch.getDomainsInGroup(null, - new ArrayList<>(), null, null, null, groupOne, 0, 3, null, null); + List firstPage = domainSearch.getDomainsInGroup(null, + new ArrayList<>(), null, null, null, groupOne, 0, 3, null, null, new TestSearchContextImpl(false)); assertEquals(3, firstPage.size()); for (int i = 0; i < firstPage.size(); i++) { assertEquals(domains.get(i), firstPage.get(i)); } } - + @Test public void getDomains_SingleGroupOverSizedPage_ShouldContainAllDomains() throws DiscoveryException { DomainSearchCache cache = mock(DomainSearchCache.class); @@ -148,17 +147,17 @@ public class DomainSearchTest { } }; - when(cache.get(null, new ArrayList<>(), null, null, null, null, null)).thenReturn(dummyData); + when(cache.get(isNull(), eq(new ArrayList<>()), isNull(), isNull(), isNull(), isNull(), isNull(), any(SearchContext.class))).thenReturn(dummyData); DomainSearch domainSearch = new DomainSearch(cache, null, null); - List firstPage = domainSearch.getDomainsInGroup(null, - new ArrayList<>(), null, null, null, groupOne, 0, 100, null, null); + List firstPage = domainSearch.getDomainsInGroup(null, + new ArrayList<>(), null, null, null, groupOne, 0, 100, null, null, new TestSearchContextImpl(false)); assertEquals(4, firstPage.size()); for (int i = 0; i < firstPage.size(); i++) { assertEquals(domains.get(i), firstPage.get(i)); } } - + @Test public void getDomains_SingleGroupHalfPage_ShouldContainHalfDomains() throws DiscoveryException { DomainSearchCache cache = mock(DomainSearchCache.class); @@ -176,18 +175,18 @@ public class DomainSearchTest { } }; - when(cache.get(null, new ArrayList<>(), null, null, null, null, null)).thenReturn(dummyData); + when(cache.get(isNull(), eq(new ArrayList<>()), isNull(), isNull(), isNull(), isNull(), isNull(), any(SearchContext.class))).thenReturn(dummyData); DomainSearch domainSearch = new DomainSearch(cache, null, null); - List firstPage = domainSearch.getDomainsInGroup(null, - new ArrayList<>(), null, null, null, groupOne, 0, 2, null, null); + List firstPage = domainSearch.getDomainsInGroup(null, + new ArrayList<>(), null, null, null, groupOne, 0, 2, null, null, new TestSearchContextImpl(false)); assertEquals(2, firstPage.size()); for (int i = 0; i < firstPage.size(); i++) { assertEquals(domains.get(i), firstPage.get(i)); } } - - @Test + + @Test public void getDomains_SingleGroupLastPageLastDomain_ShouldContainLastDomain() throws DiscoveryException { DomainSearchCache cache = mock(DomainSearchCache.class); @@ -204,15 +203,15 @@ public class DomainSearchTest { } }; - when(cache.get(null, new ArrayList<>(), null, null, null, null, null)).thenReturn(dummyData); + when(cache.get(isNull(), eq(new ArrayList<>()), isNull(), isNull(), isNull(), isNull(), isNull(), any(SearchContext.class))).thenReturn(dummyData); DomainSearch domainSearch = new DomainSearch(cache, null, null); - List firstPage = domainSearch.getDomainsInGroup(null, - new ArrayList<>(), null, null, null, groupOne, 3, 1, null, null); + List firstPage = domainSearch.getDomainsInGroup(null, + new ArrayList<>(), null, null, null, groupOne, 3, 1, null, null, new TestSearchContextImpl(false)); assertEquals(1, firstPage.size()); assertEquals(domains.get(domains.size() - 1), firstPage.get(0)); } - + @Test public void getDomains_SingleGroupOversizedOffset_ShouldContainNoDomains() throws DiscoveryException { DomainSearchCache cache = mock(DomainSearchCache.class); @@ -230,14 +229,14 @@ public class DomainSearchTest { } }; - when(cache.get(null, new ArrayList<>(), null, null, null, null, null)).thenReturn(dummyData); + when(cache.get(isNull(), eq(new ArrayList<>()), isNull(), isNull(), isNull(), isNull(), isNull(), any(SearchContext.class))).thenReturn(dummyData); DomainSearch domainSearch = new DomainSearch(cache, null, null); - List firstPage = domainSearch.getDomainsInGroup(null, - new ArrayList<>(), null, null, null, groupOne, 20, 5, null, null); + List firstPage = domainSearch.getDomainsInGroup(null, + new ArrayList<>(), null, null, null, groupOne, 20, 5, null, null, new TestSearchContextImpl(false)); assertEquals(0, firstPage.size()); } - + @Test public void getDomains_SingleGroupZeroSizedPage_ShouldContainNoDomains() throws DiscoveryException { DomainSearchCache cache = mock(DomainSearchCache.class); @@ -255,14 +254,14 @@ public class DomainSearchTest { } }; - when(cache.get(null, new ArrayList<>(), null, null, null, null, null)).thenReturn(dummyData); + when(cache.get(isNull(), eq(new ArrayList<>()), isNull(), isNull(), isNull(), isNull(), isNull(), any(SearchContext.class))).thenReturn(dummyData); DomainSearch domainSearch = new DomainSearch(cache, null, null); - List firstPage = domainSearch.getDomainsInGroup(null, - new ArrayList<>(), null, null, null, groupOne, 0, 0, null, null); + List firstPage = domainSearch.getDomainsInGroup(null, + new ArrayList<>(), null, null, null, groupOne, 0, 0, null, null, new TestSearchContextImpl(false)); assertEquals(0, firstPage.size()); } - + @Test public void getDomains_MultipleGroupsFullPage_ShouldContainAllDomainsInGroup() throws DiscoveryException { DomainSearchCache cache = mock(DomainSearchCache.class); @@ -290,14 +289,14 @@ public class DomainSearchTest { } }; - when(cache.get(null, new ArrayList<>(), null, null, null, null, null)).thenReturn(dummyData); + when(cache.get(isNull(), eq(new ArrayList<>()), isNull(), isNull(), isNull(), isNull(), isNull(), any(SearchContext.class))).thenReturn(dummyData); DomainSearch domainSearch = new DomainSearch(cache, null, null); - List firstPage = domainSearch.getDomainsInGroup(null, - new ArrayList<>(), null, null, null, groupOne, 0, 3, null, null); + List firstPage = domainSearch.getDomainsInGroup(null, + new ArrayList<>(), null, null, null, groupOne, 0, 3, null, null, new TestSearchContextImpl(false)); assertEquals(3, firstPage.size()); } - + @Test public void getDomains_MultipleGroupsHalfPage_ShouldContainHalfDomainsInGroup() throws DiscoveryException { DomainSearchCache cache = mock(DomainSearchCache.class); @@ -325,17 +324,17 @@ public class DomainSearchTest { } }; - when(cache.get(null, new ArrayList<>(), null, null, null, null, null)).thenReturn(dummyData); + when(cache.get(isNull(), eq(new ArrayList<>()), isNull(), isNull(), isNull(), isNull(), isNull(), any(SearchContext.class))).thenReturn(dummyData); DomainSearch domainSearch = new DomainSearch(cache, null, null); - List firstPage = domainSearch.getDomainsInGroup(null, - new ArrayList<>(), null, null, null, groupTwo, 1, 2, null, null); + List firstPage = domainSearch.getDomainsInGroup(null, + new ArrayList<>(), null, null, null, groupTwo, 1, 2, null, null, new TestSearchContextImpl(false)); assertEquals(2, firstPage.size()); for (int i = 0; i < firstPage.size(); i++) { assertEquals(dummyData.get(groupTwo).get(i + 1), firstPage.get(i)); } } - + @Test public void getDomains_SingleGroupSimulatedPaging_ShouldPageThroughAllDomains() throws DiscoveryException { DomainSearchCache cache = mock(DomainSearchCache.class); @@ -357,20 +356,20 @@ public class DomainSearchTest { } }; - when(cache.get(null, new ArrayList<>(), null, null, null, null, null)).thenReturn(dummyData); + when(cache.get(isNull(), eq(new ArrayList<>()), isNull(), isNull(), isNull(), isNull(), isNull(), any(SearchContext.class))).thenReturn(dummyData); DomainSearch domainSearch = new DomainSearch(cache, null, null); - + int start = 0; int size = 2; while (start + size <= domains.size()) { - List page = domainSearch.getDomainsInGroup(null, - new ArrayList<>(), null, null, null, groupOne, start, size, null, null); + List page = domainSearch.getDomainsInGroup(null, + new ArrayList<>(), null, null, null, groupOne, start, size, null, null, new TestSearchContextImpl(false)); assertEquals(2, page.size()); - for(int i = 0; i < page.size(); i++) { + for (int i = 0; i < page.size(); i++) { assertEquals(domains.get(start + i), page.get(i)); } - + start += size; } } @@ -379,7 +378,7 @@ public class DomainSearchTest { private final String name; - public DummyKey(String name) { + DummyKey(String name) { this.name = name; } diff --git a/Core/test/unit/src/org/sleuthkit/autopsy/discovery/search/TestSearchContextImpl.java b/Core/test/unit/src/org/sleuthkit/autopsy/discovery/search/TestSearchContextImpl.java new file mode 100644 index 0000000000..128038eeed --- /dev/null +++ b/Core/test/unit/src/org/sleuthkit/autopsy/discovery/search/TestSearchContextImpl.java @@ -0,0 +1,37 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2021 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.discovery.search; + +/** + * Implementation of SearchContext for testing to ensure NPEs are not thrown and + * the context indicates the expected cancellation status. + */ +public class TestSearchContextImpl implements SearchContext { + + private final boolean isCancelled; + + public TestSearchContextImpl(boolean hasBeenCancelled) { + isCancelled = hasBeenCancelled; + } + + @Override + public boolean searchIsCancelled() { + return isCancelled; + } +} diff --git a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestDashboard.java b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestDashboard.java index 075ba9395b..ae532a1db9 100644 --- a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestDashboard.java +++ b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestDashboard.java @@ -262,6 +262,7 @@ final class AutoIngestDashboard extends JPanel implements Observer { * Shut down parts of the AutoIngestDashboard which were initialized */ void shutDown() { + scheduledRefreshThreadPoolExecutor.shutdownNow(); if (autoIngestMonitor != null) { autoIngestMonitor.shutDown(); } diff --git a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestJobsNode.java b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestJobsNode.java index 415b3237b9..24a1e57fb9 100644 --- a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestJobsNode.java +++ b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestJobsNode.java @@ -70,7 +70,7 @@ final class AutoIngestJobsNode extends AbstractNode { * refresh events */ AutoIngestJobsNode(AutoIngestMonitor monitor, AutoIngestJobStatus status, EventBus eventBus) { - super(Children.create(new AutoIngestNodeChildren(monitor, status, eventBus), true)); + super(Children.create(new AutoIngestNodeChildren(monitor, status, eventBus), false)); refreshChildrenEventBus = eventBus; } diff --git a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/Manifest.java b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/Manifest.java index 5a42a01e80..c2b7ae5259 100644 --- a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/Manifest.java +++ b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/Manifest.java @@ -30,32 +30,35 @@ import javax.annotation.concurrent.Immutable; public final class Manifest implements Serializable { private static final long serialVersionUID = 1L; - private final String filePath; + private final Path filePath; private final Date dateFileCreated; private final String caseName; private final String deviceId; - private final String dataSourcePath; + private final Path dataSourcePath; + private final String dataSourceFileName; private final Map manifestProperties; public Manifest(Path manifestFilePath, Date dateFileCreated, String caseName, String deviceId, Path dataSourcePath, Map manifestProperties) { - this.filePath = manifestFilePath.toString(); - this.dateFileCreated = dateFileCreated; + this.filePath = Paths.get(manifestFilePath.toString()); + this.dateFileCreated = new Date(dateFileCreated.getTime()); this.caseName = caseName; this.deviceId = deviceId; if (null != dataSourcePath) { - this.dataSourcePath = dataSourcePath.toString(); + this.dataSourcePath = Paths.get(dataSourcePath.toString()); + dataSourceFileName = dataSourcePath.getFileName().toString(); } else { - this.dataSourcePath = ""; + this.dataSourcePath = Paths.get(""); + dataSourceFileName = ""; } this.manifestProperties = new HashMap<>(manifestProperties); } public Path getFilePath() { - return Paths.get(this.filePath); + return this.filePath; } public Date getDateFileCreated() { - return new Date(this.dateFileCreated.getTime()); + return dateFileCreated; } public String getCaseName() { @@ -67,11 +70,11 @@ public final class Manifest implements Serializable { } public Path getDataSourcePath() { - return Paths.get(dataSourcePath); + return dataSourcePath; } public String getDataSourceFileName() { - return Paths.get(dataSourcePath).getFileName().toString(); + return dataSourceFileName; } public Map getManifestProperties() { diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ChromeCacheExtractor.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ChromeCacheExtractor.java index f3e073e7f9..42be2d0233 100644 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ChromeCacheExtractor.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ChromeCacheExtractor.java @@ -177,14 +177,6 @@ final class ChromeCacheExtractor { currentCase = Case.getCurrentCaseThrows(); fileManager = currentCase.getServices().getFileManager(); - // Create an output folder to save any derived files - absOutputFolderName = RAImageIngestModule.getRAOutputPath(currentCase, moduleName, context.getJobId()); - relOutputFolderName = Paths.get(RAImageIngestModule.getRelModuleOutputPath(currentCase, moduleName, context.getJobId())).normalize().toString(); - - File dir = new File(absOutputFolderName); - if (dir.exists() == false) { - dir.mkdirs(); - } } catch (NoCurrentCaseException ex) { String msg = "Failed to get current case."; //NON-NLS throw new IngestModuleException(msg, ex); @@ -279,6 +271,17 @@ final class ChromeCacheExtractor { // Identify each cache folder by searching for the index files in each List indexFiles = findIndexFiles(); + if (indexFiles.size() > 0) { + // Create an output folder to save any derived files + absOutputFolderName = RAImageIngestModule.getRAOutputPath(currentCase, moduleName, context.getJobId()); + relOutputFolderName = Paths.get(RAImageIngestModule.getRelModuleOutputPath(currentCase, moduleName, context.getJobId())).normalize().toString(); + + File dir = new File(absOutputFolderName); + if (dir.exists() == false) { + dir.mkdirs(); + } + } + // Process each of the cache folders for (AbstractFile indexFile: indexFiles) { diff --git a/docs/doxygen-user_fr/Doxyfile b/docs/doxygen-user_fr/Doxyfile index 80ef7fab53..177c9cdb6e 100644 --- a/docs/doxygen-user_fr/Doxyfile +++ b/docs/doxygen-user_fr/Doxyfile @@ -38,7 +38,7 @@ PROJECT_NAME = "Documentation utilisateur Autopsy" # could be handy for archiving the generated documentation or if some version # control system is used. -PROJECT_NUMBER = 4.19.0 +PROJECT_NUMBER = 4.19.1 # Using the PROJECT_BRIEF tag one can provide an optional one line description # for a project that appears at the top of each page and should give viewer a @@ -1025,7 +1025,7 @@ GENERATE_HTML = YES # The default directory is: html. # This tag requires that the tag GENERATE_HTML is set to YES. -HTML_OUTPUT = 4.19.0 +HTML_OUTPUT = 4.19.1 # The HTML_FILE_EXTENSION tag can be used to specify the file extension for each # generated HTML page (for example: .htm, .php, .asp). diff --git a/docs/doxygen-user_fr/lastupdated b/docs/doxygen-user_fr/lastupdated new file mode 100644 index 0000000000..6873d4c1a4 --- /dev/null +++ b/docs/doxygen-user_fr/lastupdated @@ -0,0 +1,2 @@ +# Sun Aug 29 15:02:07 2021 +0200 +user-docs_fr.lastupdated=4f62b90f652ba12dae1b2286fb3eb065f00e5311 \ No newline at end of file diff --git a/docs/doxygen-user_fr/main.dox b/docs/doxygen-user_fr/main.dox index aabf341c7c..936de4aee8 100644 --- a/docs/doxygen-user_fr/main.dox +++ b/docs/doxygen-user_fr/main.dox @@ -8,6 +8,9 @@ Ceci est le guide de l'utilisateur de la "Options" et décritent dans cette documentation sont accessibles via la barre de menu système sous "Préférences" ou via le raccourci Cmd +, (touche "Cmd" + touche "plus"). +Version originale de ce guide: +- Dernière version à jour + Rubriques d'aide ------- Les rubriques suivantes sont disponibles: diff --git a/docs/doxygen/modDSIngestTutorial.dox b/docs/doxygen/modDSIngestTutorial.dox index c187f7ab33..ed8f128c13 100644 --- a/docs/doxygen/modDSIngestTutorial.dox +++ b/docs/doxygen/modDSIngestTutorial.dox @@ -76,29 +76,28 @@ With our connection in hand, we can do some queries. In our sample database, we stmt = dbConn.createStatement() resultSet = stmt.executeQuery("SELECT * FROM contacts")\endverbatim -For each row, we are going to get the values for the name, e-mail, and phone number and make a TSK_CONTACT artifact. Recall from the first tutorial that posting artifacts to the blackboard allows modules to communicate with each other and also allows you to easily display data to the user. The TSK_CONTACT artifact is for storing contact information. +For each row, we are going to get the values for the name, e-mail, and phone number and make a TSK_CONTACT artifact. Recall from the first tutorial that posting artifacts to the blackboard allows modules to communicate with each other and also allows you to easily display data to the user. The TSK_CONTACT artifact is for storing contact information. The artifact catalog shows that TSK_CONTACT is a data artifact, so we will be using the newDataArtifact() method to create each one. The basic approach in our example is to make an artifact of a given type (TSK_CONTACT) and have it be associated with the database it came from. We then make attributes for the name, email, and phone. The following code does this for each row in the database: \verbatim while resultSet.next(): - - # Make an artifact on the blackboard and give it attributes - art = file.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_CONTACT) - - name = resultSet.getString("name") - art.addAttribute(BlackboardAttribute( - BlackboardAttribute.ATTRIBUTE_TYPE.TSK_NAME_PERSON.getTypeID(), - ContactsDbIngestModuleFactory.moduleName, name)) - - email = resultSet.getString("email") - art.addAttribute(BlackboardAttribute( - BlackboardAttribute.ATTRIBUTE_TYPE.TSK_EMAIL.getTypeID(), - ContactsDbIngestModuleFactory.moduleName, email)) - - phone = resultSet.getString("phone") - art.addAttribute(BlackboardAttribute( - BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PHONE_NUMBER.getTypeID(), - ContactsDbIngestModuleFactory.moduleName, phone))\endverbatim + try: + name = resultSet.getString("name") + email = resultSet.getString("email") + phone = resultSet.getString("phone") + except SQLException as e: + self.log(Level.INFO, "Error getting values from contacts table (" + e.getMessage() + ")") + + + # Make an artifact on the blackboard, TSK_CONTACT and give it attributes for each of the fields + art = file.newDataArtifact(BlackboardArtifact.Type.TSK_CONTACT, Arrays.asList( + BlackboardAttribute(BlackboardAttribute.Type.TSK_NAME_PERSON, + ContactsDbIngestModuleFactory.moduleName, name), + BlackboardAttribute(BlackboardAttribute.Type.TSK_EMAIL, + ContactsDbIngestModuleFactory.moduleName, email), + BlackboardAttribute(BlackboardAttribute.Type.TSK_PHONE_NUMBER, + ContactsDbIngestModuleFactory.moduleName, phone) + ))\endverbatim That's it. We've just found the databases, queried them, and made artifacts for the user to see. There are some final things though. First, we should fire off an event so that the UI updates and refreshes with the new artifacts. We can fire just one event after each database is parsed (or you could fire one for each artifact - it's up to you). @@ -113,6 +112,8 @@ stmt.close() dbConn.close() os.remove(lclDbPath)\endverbatim +The final version of findContactsDb.py can be found on github. + \subsection python_tutorial2_niceties Niceties Data source-level ingest modules can run for quite some time. Therefore, data source-level ingest modules should do some additional things that file-level ingest modules do not need to. diff --git a/docs/doxygen/modFileIngestTutorial.dox b/docs/doxygen/modFileIngestTutorial.dox index 8079718137..dadc31305b 100644 --- a/docs/doxygen/modFileIngestTutorial.dox +++ b/docs/doxygen/modFileIngestTutorial.dox @@ -75,75 +75,65 @@ The process() method is passed in a reference to an AbstractFile Object. With th Now that we have found the files, we want to do something with them. In our situation, we just want to alert the user to them. We do this by making an "Interesting Item" blackboard artifact. The Blackboard is where ingest modules can communicate with each other and with the Autopsy GUI. The blackboard has a set of artifacts on it and each artifact:

  • Has a type
  • +
  • Has a category
  • Is associated with a file
  • Has one or more attributes. Attributes are simply name and value pairs.
-For our example, we are going to make an artifact of type "TSK_INTERESTING_FILE" whenever we find a big and round file. These are one of the most generic artifact types and are simply a way of alerting the user that a file is interesting for some reason. Once you make the artifact, it will be shown in the UI. The below code makes an artifact for the file and puts it into the set of "Big and Round Files". You can create whatever set names you want. The Autopsy GUI organizes Interesting Files by their set name. +A list of standard artifact types can be found in the artifact catalog. It is important to note the catagory for the artifact you want to since this affects which method you will use to create the artifact. + +For our example, we are going to make an artifact of type "TSK_INTERESTING_FILE", which is an analysis result, whenever we find a big and round file. These are one of the most generic artifact types and are simply a way of alerting the user that a file is interesting for some reason. Once you make the artifact, it will be shown in the UI. The below code makes an artifact for the file and puts it into the set of "Big and Round Files". You can create whatever set names you want. The Autopsy GUI organizes Interesting Files by their set name. \verbatim - art = file.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT) - att = BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME.getTypeID(), - FindBigRoundFilesIngestModuleFactory.moduleName, "Big and Round Files") - art.addAttribute(att)\endverbatim + art = file.newAnalysisResult(BlackboardArtifact.Type.TSK_INTERESTING_FILE_HIT, Score.SCORE_LIKELY_NOTABLE, + None, "Big and Round Files", None, + Arrays.asList( + BlackboardAttribute(BlackboardAttribute.Type.TSK_SET_NAME, + FindBigRoundFilesIngestModuleFactory.moduleName, + "Big and Round Files"))).getAnalysisResult()\endverbatim -The above code adds the artifact and a single attribute to the blackboard in the embedded database, but it does not notify other modules or the UI. The UI will eventually refresh, but it is faster to fire an event with this: +The above code adds the artifact and a single attribute to the blackboard in the embedded database, but it does not notify other modules or the UI. Calling postArtifact() will let the tree viewer and other parts of the UI know that a refresh may be necessary, and passes the newly created artifacts to other modules that may do further processing on it. \verbatim - IngestServices.getInstance().fireModuleDataEvent( - ModuleDataEvent(FindBigRoundFilesIngestModuleFactory.moduleName, - BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT, None))\endverbatim + blackboard.postArtifact(art, FindBigRoundFilesIngestModuleFactory.moduleName)\endverbatim That's it. Your process() method should look something like this: \verbatim def process(self, file): + # Use blackboard class to index blackboard artifacts for keyword search + blackboard = Case.getCurrentCase().getSleuthkitCase().getBlackboard() + # Skip non-files - - if ((file.getType() == TskData.TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS) or - - (file.getType() == TskData.TSK_DB_FILES_TYPE_ENUM.UNUSED_BLOCKS) or - + if ((file.getType() == TskData.TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS) or + (file.getType() == TskData.TSK_DB_FILES_TYPE_ENUM.UNUSED_BLOCKS) or (file.isFile() == False)): - return IngestModule.ProcessResult.OK - - - # Look for files bigger than 10MB that are a multiple of 4096 - - if ((file.getSize() > 10485760) and ((file.getSize() % 4096) == 0)): - - + # Look for files bigger than 10MB that are a multiple of 4096 + if ((file.getSize() > 10485760) and ((file.getSize() % 4096) == 0)): # Make an artifact on the blackboard. TSK_INTERESTING_FILE_HIT is a generic type of - # artifact. Refer to the developer docs for other examples. + art = file.newAnalysisResult(BlackboardArtifact.Type.TSK_INTERESTING_FILE_HIT, Score.SCORE_LIKELY_NOTABLE, + None, "Big and Round Files", None, + Arrays.asList( + BlackboardAttribute(BlackboardAttribute.Type.TSK_SET_NAME, + FindBigRoundFilesIngestModuleFactory.moduleName, + "Big and Round Files"))).getAnalysisResult() - art = file.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT) + try: + # post the artifact for listeners of artifact events + blackboard.postArtifact(art, FindBigRoundFilesIngestModuleFactory.moduleName) + except Blackboard.BlackboardException as e: + self.log(Level.SEVERE, "Error indexing artifact " + art.getDisplayName()) - att = BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME.getTypeID(), - - FindBigRoundFilesIngestModuleFactory.moduleName, "Big and Round Files") - - art.addAttribute(att) - - - - # Fire an event to notify the UI and others that there is a new artifact - - IngestServices.getInstance().fireModuleDataEvent( - - ModuleDataEvent(FindBigRoundFilesIngestModuleFactory.moduleName, - - BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT, None)) - - - - return IngestModule.ProcessResult.OK\endverbatim + return IngestModule.ProcessResult.OK\endverbatim Save this file and run the module on some of your data. If you have any big and round files, you should see an entry under the "Interesting Items" node in the tree. \image html bigAndRoundFiles.png +The full big and round file module along with test data can be found on github. + \subsection python_tutorial1_debug Debugging and Development Tips Whenever you have syntax errors or other errors in your script, you will get some form of dialog from Autopsy when you try to run ingest modules. If that happens, fix the problem and run ingest modules again. You don't need to restart Autopsy each time! diff --git a/docs/doxygen/modReportModuleTutorial.dox b/docs/doxygen/modReportModuleTutorial.dox index ed2b184cf0..d620076df5 100644 --- a/docs/doxygen/modReportModuleTutorial.dox +++ b/docs/doxygen/modReportModuleTutorial.dox @@ -45,7 +45,7 @@ A third approach is to call org.sleuthkit.autopsy.casemodule.Case.getDataSources \subsubsection python_tutorial3_getting_artifacts Getting Blackboard Artifacts -The blackboard is where modules store their analysis results. If you want to include them in your report, then there are several methods that you could use. If you want all artifacts of a given type, then you can use SleuthkitCase.getBlackboardArtifacts(). There are many variations of this method that take different arguments. Look at them to find the one that is most convenient for you. +The blackboard is where modules store their analysis results. If you want to include them in your report, then there are several methods that you could use. If you want all artifacts of a given type, then you can use getDataArtifacts()or Blackboard.getAnalysisResultsByType(). There are variations of these methods that take different arguments. Look at them to find the one that is most convenient for you. \subsubsection python_tutorial3_getting_tags Getting Tagged Files or Artifacts diff --git a/test/script/regression.py b/test/script/regression.py index 1b42f623bc..e898bec3e5 100644 --- a/test/script/regression.py +++ b/test/script/regression.py @@ -16,18 +16,22 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +import sys +import subprocess +import os + +# import db_diff +sys.path.insert(0, os.path.join(subprocess.getoutput("cygpath -u $TSK_HOME"), "db_diff")) from tskdbdiff import TskDbDiff, TskDbDiffException, PGSettings import codecs import datetime import logging -import os + import re import shutil import socket import sqlite3 -import subprocess -import sys from sys import platform as _platform import time import traceback diff --git a/test/script/tskdbdiff.py b/test/script/tskdbdiff.py deleted file mode 100644 index f0aae2c2a5..0000000000 --- a/test/script/tskdbdiff.py +++ /dev/null @@ -1,1200 +0,0 @@ -# Requires python3 - -import re -import sqlite3 -import subprocess -import shutil -import os -import codecs -import datetime -import sys -from typing import Callable, Dict, Union, List - -import psycopg2 -import psycopg2.extras -import socket -import csv - -class TskDbDiff(object): - """Compares two TSK/Autospy SQLite databases. - - Attributes: - gold_artifacts: - autopsy_artifacts: - gold_attributes: - autopsy_attributes: - gold_objects: - autopsy_objects: - artifact_comparison: - attribute_comparision: - report_errors: a listof_listof_String, the error messages that will be - printed to screen in the run_diff method - passed: a boolean, did the diff pass? - autopsy_db_file: - gold_db_file: - """ - def __init__(self, output_db, gold_db, output_dir=None, gold_bb_dump=None, gold_dump=None, verbose=False, isMultiUser=False, pgSettings=None): - """Constructor for TskDbDiff. - - Args: - output_db_path: path to output database (non-gold standard) - gold_db_path: path to gold database - output_dir: (optional) Path to folder where generated files will be put. - gold_bb_dump: (optional) path to file where the gold blackboard dump is located - gold_dump: (optional) path to file where the gold non-blackboard dump is located - verbose: (optional) a boolean, if true, diff results are sent to stdout. - """ - - self.output_db_file = output_db - self.gold_db_file = gold_db - self.output_dir = output_dir - self.gold_bb_dump = gold_bb_dump - self.gold_dump = gold_dump - self._generate_gold_dump = False - self._generate_gold_bb_dump = False - self._bb_dump_diff = "" - self._dump_diff = "" - self._bb_dump = "" - self._dump = "" - self.verbose = verbose - self.isMultiUser = isMultiUser - self.pgSettings = pgSettings - - if self.isMultiUser and not self.pgSettings: - print("Missing PostgreSQL database connection settings data.") - sys.exit(1) - - if self.gold_bb_dump is None: - self._generate_gold_bb_dump = True - if self.gold_dump is None: - self._generate_gold_dump = True - - def run_diff(self): - """Compare the databases. - - Raises: - TskDbDiffException: if an error occurs while diffing or dumping the database - """ - - self._init_diff() - id_obj_path_table = -1 - # generate the gold database dumps if necessary - if self._generate_gold_dump: - id_obj_path_table = TskDbDiff._dump_output_db_nonbb(self.gold_db_file, self.gold_dump, self.isMultiUser, self.pgSettings) - if self._generate_gold_bb_dump: - TskDbDiff._dump_output_db_bb(self.gold_db_file, self.gold_bb_dump, self.isMultiUser, self.pgSettings, id_obj_path_table) - - # generate the output database dumps (both DB and BB) - id_obj_path_table = TskDbDiff._dump_output_db_nonbb(self.output_db_file, self._dump, self.isMultiUser, self.pgSettings) - TskDbDiff._dump_output_db_bb(self.output_db_file, self._bb_dump, self.isMultiUser, self.pgSettings, id_obj_path_table) - - # Compare non-BB - dump_diff_pass = self._diff(self._dump, self.gold_dump, self._dump_diff) - - # Compare BB - bb_dump_diff_pass = self._diff(self._bb_dump, self.gold_bb_dump, self._bb_dump_diff) - - self._cleanup_diff() - return dump_diff_pass, bb_dump_diff_pass - - - def _init_diff(self): - """Set up the necessary files based on the arguments given at construction""" - if self.output_dir is None: - # No stored files - self._bb_dump = TskDbDiff._get_tmp_file("BlackboardDump", ".txt") - self._bb_dump_diff = TskDbDiff._get_tmp_file("BlackboardDump-Diff", ".txt") - self._dump = TskDbDiff._get_tmp_file("DBDump", ".txt") - self._dump_diff = TskDbDiff._get_tmp_file("DBDump-Diff", ".txt") - else: - self._bb_dump = os.path.join(self.output_dir, "BlackboardDump.txt") - self._bb_dump_diff = os.path.join(self.output_dir, "BlackboardDump-Diff.txt") - self._dump = os.path.join(self.output_dir, "DBDump.txt") - self._dump_diff = os.path.join(self.output_dir, "DBDump-Diff.txt") - - # Sorting gold before comparing (sort behaves differently in different environments) - new_bb = TskDbDiff._get_tmp_file("GoldBlackboardDump", ".txt") - new_db = TskDbDiff._get_tmp_file("GoldDBDump", ".txt") - if self.gold_bb_dump is not None: - srtcmdlst = ["sort", self.gold_bb_dump, "-o", new_bb] - subprocess.call(srtcmdlst) - srtcmdlst = ["sort", self.gold_dump, "-o", new_db] - subprocess.call(srtcmdlst) - self.gold_bb_dump = new_bb - self.gold_dump = new_db - - - def _cleanup_diff(self): - if self.output_dir is None: - #cleanup temp files - os.remove(self._dump) - os.remove(self._bb_dump) - if os.path.isfile(self._dump_diff): - os.remove(self._dump_diff) - if os.path.isfile(self._bb_dump_diff): - os.remove(self._bb_dump_diff) - - if self.gold_bb_dump is None: - os.remove(self.gold_bb_dump) - os.remove(self.gold_dump) - - - def _diff(self, output_file, gold_file, diff_path): - """Compare two text files. - - Args: - output_file: a pathto_File, the latest text file - gold_file: a pathto_File, the gold text file - diff_path: The file to write the differences to - Returns False if different - """ - - if (not os.path.isfile(output_file)): - return False - - if (not os.path.isfile(gold_file)): - return False - - # It is faster to read the contents in and directly compare - output_data = codecs.open(output_file, "r", "utf_8").read() - gold_data = codecs.open(gold_file, "r", "utf_8").read() - if (gold_data == output_data): - return True - - # If they are different, invoke 'diff' - diff_file = codecs.open(diff_path, "wb", "utf_8") - # Gold needs to be passed in as 1st arg and output as 2nd - dffcmdlst = ["diff", gold_file, output_file] - subprocess.call(dffcmdlst, stdout = diff_file) - - # create file path for gold files inside output folder. In case of diff, both gold and current run files - # are available in the report output folder. Prefix Gold- is added to the filename. - gold_file_in_output_dir = os.path.join(os.path.dirname(output_file), "Gold-" + os.path.basename(output_file)) - shutil.copy(gold_file, gold_file_in_output_dir) - - return False - - - @staticmethod - def _get_associated_artifact_type(cur, artifact_id, isMultiUser): - if isMultiUser: - cur.execute( - "SELECT tsk_files.parent_path, blackboard_artifact_types.display_name FROM blackboard_artifact_types INNER JOIN blackboard_artifacts ON blackboard_artifact_types.artifact_type_id = blackboard_artifacts.artifact_type_id INNER JOIN tsk_files ON tsk_files.obj_id = blackboard_artifacts.obj_id WHERE artifact_id=%s", - [artifact_id]) - else: - cur.execute( - "SELECT tsk_files.parent_path, blackboard_artifact_types.display_name FROM blackboard_artifact_types INNER JOIN blackboard_artifacts ON blackboard_artifact_types.artifact_type_id = blackboard_artifacts.artifact_type_id INNER JOIN tsk_files ON tsk_files.obj_id = blackboard_artifacts.obj_id WHERE artifact_id=?", - [artifact_id]) - - info = cur.fetchone() - - return "File path: " + info[0] + " Artifact Type: " + info[1] - - - @staticmethod - def _dump_output_db_bb(db_file, bb_dump_file, isMultiUser, pgSettings, id_obj_path_table): - """Dumps sorted text results to the given output location. - - Smart method that deals with a blackboard comparison to avoid issues - with different IDs based on when artifacts were created. - - Args: - db_file: a pathto_File, the output database. - bb_dump_file: a pathto_File, the sorted dump file to write to - """ - - unsorted_dump = TskDbDiff._get_tmp_file("dump_data", ".txt") - if isMultiUser: - conn, unused_db = db_connect(db_file, isMultiUser, pgSettings) - artifact_cursor = conn.cursor(cursor_factory=psycopg2.extras.DictCursor) - else: # Use Sqlite - conn = sqlite3.connect(db_file) - conn.text_factory = lambda x: x.decode("utf-8", "ignore") - conn.row_factory = sqlite3.Row - artifact_cursor = conn.cursor() - # Get the list of all artifacts (along with type and associated file) - # @@@ Could add a SORT by parent_path in here since that is how we are going to later sort it. - artifact_cursor.execute("SELECT tsk_files.parent_path, tsk_files.name, blackboard_artifact_types.display_name, blackboard_artifacts.artifact_id FROM blackboard_artifact_types INNER JOIN blackboard_artifacts ON blackboard_artifact_types.artifact_type_id = blackboard_artifacts.artifact_type_id INNER JOIN tsk_files ON tsk_files.obj_id = blackboard_artifacts.obj_id") - database_log = codecs.open(unsorted_dump, "wb", "utf_8") - row = artifact_cursor.fetchone() - appnd = False - counter = 0 - artifact_count = 0 - artifact_fail = 0 - - # Cycle through artifacts - try: - while (row != None): - - # File Name and artifact type - # Remove parent object ID from Unalloc file name - normalizedName = re.sub('^Unalloc_[0-9]+_', 'Unalloc_', row["name"]) - if(row["parent_path"] != None): - database_log.write(row["parent_path"] + normalizedName + ' ') - else: - database_log.write(normalizedName + ' ') - - if isMultiUser: - attribute_cursor = conn.cursor(cursor_factory=psycopg2.extras.DictCursor) - else: - attribute_cursor = conn.cursor() - looptry = True - artifact_count += 1 - try: - art_id = "" - art_id = str(row["artifact_id"]) - - # Get attributes for this artifact - if isMultiUser: - attribute_cursor.execute("SELECT blackboard_attributes.source, blackboard_attributes.attribute_type_id, blackboard_attribute_types.display_name, blackboard_attributes.value_type, blackboard_attributes.value_text, blackboard_attributes.value_int32, blackboard_attributes.value_int64, blackboard_attributes.value_double FROM blackboard_attributes INNER JOIN blackboard_attribute_types ON blackboard_attributes.attribute_type_id = blackboard_attribute_types.attribute_type_id WHERE artifact_id = %s ORDER BY blackboard_attributes.source, blackboard_attribute_types.display_name, blackboard_attributes.value_type, blackboard_attributes.value_text, blackboard_attributes.value_int32, blackboard_attributes.value_int64, blackboard_attributes.value_double", [art_id]) - else: - attribute_cursor.execute("SELECT blackboard_attributes.source, blackboard_attributes.attribute_type_id, blackboard_attribute_types.display_name, blackboard_attributes.value_type, blackboard_attributes.value_text, blackboard_attributes.value_int32, blackboard_attributes.value_int64, blackboard_attributes.value_double FROM blackboard_attributes INNER JOIN blackboard_attribute_types ON blackboard_attributes.attribute_type_id = blackboard_attribute_types.attribute_type_id WHERE artifact_id =? ORDER BY blackboard_attributes.source, blackboard_attribute_types.display_name, blackboard_attributes.value_type, blackboard_attributes.value_text, blackboard_attributes.value_int32, blackboard_attributes.value_int64, blackboard_attributes.value_double", [art_id]) - - attributes = attribute_cursor.fetchall() - - # Print attributes - if (len(attributes) == 0): - # @@@@ This should be - database_log.write(' \n') - row = artifact_cursor.fetchone() - continue - - src = attributes[0][0] - for attr in attributes: - numvals = 0 - for x in range(3, 6): - if(attr[x] != None): - numvals += 1 - if(numvals > 1): - msg = "There were too many values for attribute type: " + attr["display_name"] + " for artifact with id #" + str(row["artifact_id"]) + ".\n" - - if(not attr["source"] == src): - msg = "There were inconsistent sources for artifact with id #" + str(row["artifact_id"]) + ".\n" - - try: - if attr["value_type"] == 0: - attr_value_as_string = str(attr["value_text"]) - elif attr["value_type"] == 1: - attr_value_as_string = str(attr["value_int32"]) - elif attr["value_type"] == 2: - attr_value_as_string = str(attr["value_int64"]) - if attr["attribute_type_id"] == 36 and id_obj_path_table != -1 and int(attr_value_as_string) > 0: #normalize positive TSK_PATH_IDs from being object id to a path if the obj_id_path_table was generated - attr_value_as_string = id_obj_path_table[int(attr_value_as_string)] - elif attr["value_type"] == 3: - attr_value_as_string = "%20.10f" % float((attr["value_double"])) #use exact format from db schema to avoid python auto format double value to (0E-10) scientific style - elif attr["value_type"] == 4: - attr_value_as_string = "bytes" - elif attr["value_type"] == 5: - attr_value_as_string = str(attr["value_int64"]) - if attr["display_name"] == "Associated Artifact": - attr_value_as_string = TskDbDiff._get_associated_artifact_type(attribute_cursor, attr_value_as_string, isMultiUser) - patrn = re.compile("[\n\0\a\b\r\f]") - attr_value_as_string = re.sub(patrn, ' ', attr_value_as_string) - if attr["source"] == "Keyword Search" and attr["display_name"] == "Keyword Preview": - attr_value_as_string = "" - database_log.write('') - except IOError as e: - print("IO error") - raise TskDbDiffException("Unexpected IO error while writing to database log." + str(e)) - - except sqlite3.Error as e: - msg = "Attributes in artifact id (in output DB)# " + str(row["artifact_id"]) + " encountered an error: " + str(e) +" .\n" - print("Attributes in artifact id (in output DB)# ", str(row["artifact_id"]), " encountered an error: ", str(e)) - print() - looptry = False - artifact_fail += 1 - database_log.write('Error Extracting Attributes') - database_log.close() - raise TskDbDiffException(msg) - finally: - attribute_cursor.close() - - - # @@@@ This should be - database_log.write(' \n') - row = artifact_cursor.fetchone() - - if(artifact_fail > 0): - msg ="There were " + str(artifact_count) + " artifacts and " + str(artifact_fail) + " threw an exception while loading.\n" - except Exception as e: - raise TskDbDiffException("Unexpected error while dumping blackboard database: " + str(e)) - finally: - database_log.close() - artifact_cursor.close() - conn.close() - - # Now sort the file - srtcmdlst = ["sort", unsorted_dump, "-o", bb_dump_file] - subprocess.call(srtcmdlst) - - @staticmethod - def _dump_output_db_nonbb(db_file, dump_file, isMultiUser, pgSettings): - """Dumps a database to a text file. - - Does not dump the artifact and attributes. - - Args: - db_file: a pathto_File, the database file to dump - dump_file: a pathto_File, the location to dump the non-blackboard database items - """ - - conn, backup_db_file = db_connect(db_file, isMultiUser, pgSettings) - guid_utils = TskGuidUtils.create(conn) - - if isMultiUser: - table_cols = get_pg_table_columns(conn) - schema = get_pg_schema(db_file, pgSettings.username, pgSettings.password, - pgSettings.pgHost, pgSettings.pgPort) - else: - table_cols = get_sqlite_table_columns(conn) - schema = get_sqlite_schema(conn) - - with codecs.open(dump_file, "wb", "utf_8") as output_file: - output_file.write(schema + "\n") - for table, cols in sorted(table_cols.items(), key=lambda pr: pr[0]): - normalizer = TABLE_NORMALIZATIONS[table] if table in TABLE_NORMALIZATIONS else None - write_normalized(guid_utils, output_file, conn, table, cols, normalizer) - - # Now sort the file - srtcmdlst = ["sort", dump_file, "-o", dump_file] - subprocess.call(srtcmdlst) - - conn.close() - # cleanup the backup - # if backup_db_file: - # os.remove(backup_db_file) - return guid_utils.obj_id_guids - - @staticmethod - def dump_output_db(db_file, dump_file, bb_dump_file, isMultiUser, pgSettings): - """Dumps the given database to text files for later comparison. - - Args: - db_file: a pathto_File, the database file to dump - dump_file: a pathto_File, the location to dump the non-blackboard database items - bb_dump_file: a pathto_File, the location to dump the blackboard database items - """ - id_obj_path_table = TskDbDiff._dump_output_db_nonbb(db_file, dump_file, isMultiUser, pgSettings) - TskDbDiff._dump_output_db_bb(db_file, bb_dump_file, isMultiUser, pgSettings, id_obj_path_table) - - @staticmethod - def _get_tmp_file(base, ext): - time = datetime.datetime.now().time().strftime("%H%M%f") - return os.path.join(os.environ['TMP'], base + time + ext) - - -class TskDbDiffException(Exception): - pass - -class PGSettings(object): - def __init__(self, pgHost=None, pgPort=5432, user=None, password=None): - self.pgHost = pgHost - self.pgPort = pgPort - self.username = user - self.password = password - - def get_pgHost(self): - return self.pgHost - - def get_pgPort(self): - return self.pgPort - - def get_username(self): - return self.username - - def get_password(self): - return self.password - - -class TskGuidUtils: - """ - This class provides guids for potentially volatile data. - """ - - @staticmethod - def _get_guid_dict(db_conn, select_statement, delim="", normalizer: Union[Callable[[str], str], None] = None): - """ - Retrieves a dictionary mapping the first item selected to a concatenation of the remaining values. - Args: - db_conn: The database connection. - select_statement: The select statement. - delim: The delimiter for how row data from index 1 to end shall be concatenated. - normalizer: Means of normalizing the generated string or None. - - Returns: A dictionary mapping the key (the first item in the select statement) to a concatenation of the remaining values. - - """ - cursor = db_conn.cursor() - cursor.execute(select_statement) - ret_dict = {} - for row in cursor: - # concatenate value rows with delimiter filtering out any null values. - value_str = delim.join([str(col) for col in filter(lambda col: col is not None, row[1:])]) - if normalizer: - value_str = normalizer(value_str) - ret_dict[row[0]] = value_str - - return ret_dict - - @staticmethod - def create(db_conn): - """ - Creates an instance of this class by querying for relevant guid data. - Args: - db_conn: The database connection. - - Returns: The instance of this class. - - """ - guid_files = TskGuidUtils._get_guid_dict(db_conn, "SELECT obj_id, parent_path, name FROM tsk_files", - normalizer=normalize_file_path) - guid_vs_parts = TskGuidUtils._get_guid_dict(db_conn, "SELECT obj_id, addr, start FROM tsk_vs_parts", "_") - guid_vs_info = TskGuidUtils._get_guid_dict(db_conn, "SELECT obj_id, vs_type, img_offset FROM tsk_vs_info", "_") - guid_fs_info = TskGuidUtils._get_guid_dict(db_conn, "SELECT obj_id, img_offset, fs_type FROM tsk_fs_info", "_") - guid_image_names = TskGuidUtils._get_guid_dict(db_conn, "SELECT obj_id, name FROM tsk_image_names " - "WHERE sequence=0", - normalizer=get_filename) - guid_os_accounts = TskGuidUtils._get_guid_dict(db_conn, "SELECT os_account_obj_id, addr FROM tsk_os_accounts") - guid_reports = TskGuidUtils._get_guid_dict(db_conn, "SELECT obj_id, path FROM reports", - normalizer=normalize_file_path) - - objid_artifacts = TskGuidUtils._get_guid_dict(db_conn, - "SELECT blackboard_artifacts.artifact_obj_id, " - "blackboard_artifact_types.type_name " - "FROM blackboard_artifacts " - "INNER JOIN blackboard_artifact_types " - "ON blackboard_artifact_types.artifact_type_id = " - "blackboard_artifacts.artifact_type_id") - - artifact_objid_artifacts = TskGuidUtils._get_guid_dict(db_conn, - "SELECT blackboard_artifacts.artifact_id, " - "blackboard_artifact_types.type_name " - "FROM blackboard_artifacts " - "INNER JOIN blackboard_artifact_types " - "ON blackboard_artifact_types.artifact_type_id = " - "blackboard_artifacts.artifact_type_id") - - cursor = db_conn.cursor() - cursor.execute("SELECT obj_id, par_obj_id FROM tsk_objects") - par_obj_objects = dict([(row[0], row[1]) for row in cursor]) - - guid_artifacts = {} - for k, v in objid_artifacts.items(): - if k in par_obj_objects: - par_obj_id = par_obj_objects[k] - - # check for artifact parent in files, images, reports - path = '' - for artifact_parent_dict in [guid_files, guid_image_names, guid_reports]: - if par_obj_id in artifact_parent_dict: - path = artifact_parent_dict[par_obj_id] - break - - guid_artifacts[k] = "/".join([path, v]) - - return TskGuidUtils( - # aggregate all the object id dictionaries together - obj_id_guids={**guid_files, **guid_reports, **guid_os_accounts, **guid_vs_parts, **guid_vs_info, - **guid_fs_info, **guid_fs_info, **guid_image_names, **guid_artifacts}, - artifact_types=artifact_objid_artifacts) - - artifact_types: Dict[int, str] - obj_id_guids: Dict[int, any] - - def __init__(self, obj_id_guids: Dict[int, any], artifact_types: Dict[int, str]): - """ - Main constructor. - Args: - obj_id_guids: A dictionary mapping object ids to their guids. - artifact_types: A dictionary mapping artifact ids to their types. - """ - self.artifact_types = artifact_types - self.obj_id_guids = obj_id_guids - - def get_guid_for_objid(self, obj_id, omitted_value: Union[str, None] = 'Object ID Omitted'): - """ - Returns the guid for the specified object id or returns omitted value if the object id is not found. - Args: - obj_id: The object id. - omitted_value: The value if no object id mapping is found. - - Returns: The relevant guid or the omitted_value. - - """ - return self.obj_id_guids[obj_id] if obj_id in self.obj_id_guids else omitted_value - - def get_guid_for_file_objid(self, obj_id, omitted_value: Union[str, None] = 'Object ID Omitted'): - # this method is just an alias for get_guid_for_objid - return self.get_guid_for_objid(obj_id, omitted_value) - - def get_guid_for_accountid(self, account_id, omitted_value: Union[str, None] = 'Account ID Omitted'): - # this method is just an alias for get_guid_for_objid - return self.get_guid_for_objid(account_id, omitted_value) - - def get_guid_for_artifactid(self, artifact_id, omitted_value: Union[str, None] = 'Artifact ID Omitted'): - """ - Returns the guid for the specified artifact id or returns omitted value if the artifact id is not found. - Args: - artifact_id: The artifact id. - omitted_value: The value if no object id mapping is found. - - Returns: The relevant guid or the omitted_value. - """ - return self.artifact_types[artifact_id] if artifact_id in self.artifact_types else omitted_value - - -class NormalizeRow: - """ - Given a dictionary representing a row (i.e. column name mapped to value), returns a normalized representation of - that row such that the values should be less volatile from run to run. - """ - row_masker: Callable[[TskGuidUtils, Dict[str, any]], Dict[str, any]] - - def __init__(self, row_masker: Callable[[TskGuidUtils, Dict[str, any]], Union[Dict[str, any], None]]): - """ - Main constructor. - Args: - row_masker: The function to be called to mask the specified row. - """ - self.row_masker = row_masker - - def normalize(self, guid_util: TskGuidUtils, row: Dict[str, any]) -> Union[Dict[str, any], None]: - """ - Normalizes a row such that the values should be less volatile from run to run. - Args: - guid_util: The TskGuidUtils instance providing guids for volatile ids. - row: The row values mapping column name to value. - - Returns: The normalized row or None if the row should be ignored. - - """ - return self.row_masker(guid_util, row) - - -class NormalizeColumns(NormalizeRow): - """ - Utility for normalizing specific column values of a row so they are not volatile values that will change from run - to run. - """ - - @classmethod - def _normalize_col_vals(cls, - col_mask: Dict[str, Union[any, Callable[[TskGuidUtils, any], any]]], - guid_util: TskGuidUtils, - row: Dict[str, any]): - """ - Normalizes column values for each column rule provided. - Args: - col_mask: A dictionary mapping columns to either the replacement value or a function to retrieve the - replacement value given the TskGuidUtils instance and original value as arguments. - guid_util: The TskGuidUtil used to provide guids for volatile values. - row: The dictionary representing the row mapping column names to values. - - Returns: The new row representation. - - """ - row_copy = row.copy() - for key, val in col_mask.items(): - # only replace values if present in row - if key in row_copy: - # if a column replacing function, call with original value - if isinstance(val, Callable): - row_copy[key] = val(guid_util, row[key]) - # otherwise, just replace with mask value - else: - row_copy[key] = val - - return row_copy - - def __init__(self, col_mask: Dict[str, Union[any, Callable[[any], any]]]): - super().__init__(lambda guid_util, row: NormalizeColumns._normalize_col_vals(col_mask, guid_util, row)) - - -def get_path_segs(path: Union[str, None]) -> Union[List[str], None]: - """ - Breaks a path string into its folders and filenames. - Args: - path: The path string or None. - - Returns: The path segments or None. - - """ - if path: - # split on backslash or forward slash - return list(filter(lambda x: len(x.strip()) > 0, [s for s in re.split(r"[\\/]", path)])) - else: - return None - - -def get_filename(path: Union[str, None]) -> Union[str, None]: - """ - Returns the last segment of a file path. - Args: - path: The path. - - Returns: The last segment of the path - - """ - path_segs = get_path_segs(path) - if path_segs is not None and len(path_segs) > 0: - return path_segs[-1] - else: - return None - - -def index_of(lst, search_item) -> int: - """ - Returns the index of the item in the list or -1. - Args: - lst: The list. - search_item: The item to search for. - - Returns: The index in the list of the item or -1. - - """ - for idx, item in enumerate(lst): - if item == search_item: - return idx - - return -1 - - -def get_sql_insert_value(val) -> str: - """ - Returns the value that would appear in a sql insert statement (i.e. string becomes 'string', None becomes NULL) - Args: - val: The original value. - - Returns: The sql insert equivalent value. - - """ - if val is None: - return "NULL" - - if isinstance(val, str): - escaped_val = val.replace('\n', '\\n').replace("'", "''") - return f"'{escaped_val}'" - - return str(val) - - -def get_sqlite_table_columns(conn) -> Dict[str, List[str]]: - """ - Retrieves a dictionary mapping table names to a list of all the columns for that table - where the columns are in ordinal value. - Args: - conn: The database connection. - - Returns: A dictionary of the form { table_name: [col_name1, col_name2...col_nameN] } - - """ - cur = conn.cursor() - cur.execute("SELECT name FROM sqlite_master tables WHERE tables.type='table'") - tables = list([table[0] for table in cur.fetchall()]) - cur.close() - - to_ret = {} - for table in tables: - cur = conn.cursor() - cur.execute('SELECT name FROM pragma_table_info(?) ORDER BY cid', [table]) - to_ret[table] = list([col[0] for col in cur.fetchall()]) - - return to_ret - - -def get_pg_table_columns(conn) -> Dict[str, List[str]]: - """ - Returns a dictionary mapping table names to the list of their columns in ordinal order. - Args: - conn: The pg database connection. - - Returns: The dictionary of tables mapped to a list of their ordinal-orderd column names. - """ - cursor = conn.cursor() - cursor.execute(""" - SELECT cols.table_name, cols.column_name - FROM information_schema.columns cols - WHERE cols.column_name IS NOT NULL - AND cols.table_name IS NOT NULL - AND cols.table_name IN ( - SELECT tables.tablename FROM pg_catalog.pg_tables tables - WHERE LOWER(schemaname) = 'public' - ) - ORDER by cols.table_name, cols.ordinal_position; - """) - mapping = {} - for row in cursor: - mapping.setdefault(row[0], []).append(row[1]) - - cursor.close() - return mapping - - -def sanitize_schema(original: str) -> str: - """ - Sanitizes sql script representing table/index creations. - Args: - original: The original sql schema creation script. - - Returns: The sanitized schema. - """ - sanitized_lines = [] - dump_line = '' - for line in original.splitlines(): - line = line.strip('\r\n ') - lower_line = line.lower() - # It's comment or alter statement or catalog entry or set idle entry or empty line - if (not line or - line.startswith('--') or - lower_line.startswith('set') or - " set default nextval" in lower_line or - " owner to " in lower_line or - " owned by " in lower_line or - "pg_catalog" in lower_line or - "idle_in_transaction_session_timeout" in lower_line): - continue - - # if there is no white space or parenthesis delimiter, add a space - if re.match(r'^.+?[^\s()]$', dump_line) and re.match(r'^[^\s()]', line): - dump_line += ' ' - - # append the line to the outputted line - dump_line += line - - # if line ends with ';' then this will be one statement in diff - if line.endswith(';'): - sanitized_lines.append(dump_line) - dump_line = '' - - if len(dump_line.strip()) > 0: - sanitized_lines.append(dump_line) - - return "\n".join(sanitized_lines) - - -def get_pg_schema(dbname: str, pg_username: str, pg_pword: str, pg_host: str, pg_port: Union[str, int]): - """ - Gets the schema to be added to the dump text from the postgres database. - Args: - dbname: The name of the database. - pg_username: The postgres user name. - pg_pword: The postgres password. - pg_host: The postgres host. - pg_port: The postgres port. - - Returns: The normalized schema. - - """ - os.environ['PGPASSWORD'] = pg_pword - pg_dump = ["pg_dump", "-U", pg_username, "-h", pg_host, "-p", str(pg_port), - "--schema-only", "-d", dbname, "-t", "public.*"] - output = subprocess.check_output(pg_dump) - output_str = output.decode('UTF-8') - return sanitize_schema(output_str) - - -def get_sqlite_schema(db_conn): - """ - Gets the schema to be added to the dump text from the sqlite database. - Args: - db_conn: The database connection. - - Returns: The normalized schema. - - """ - cursor = db_conn.cursor() - query = "SELECT sql FROM sqlite_master " \ - "WHERE type IN ('table', 'index') AND sql IS NOT NULL " \ - "ORDER BY type DESC, tbl_name ASC" - - cursor.execute(query) - schema = '\n'.join([str(row[0]) + ';' for row in cursor]) - return sanitize_schema(schema) - - -def _mask_event_desc(desc: str) -> str: - """ - Masks dynamic event descriptions of the form ":" so the artifact id is no longer - present. - Args: - desc: The original description. - - Returns: The normalized description. - - """ - - # Takes a string like "Shell Bags: 30840" and replaces with "ShellBags:" - match = re.search(r"^\s*(.+?)\s*:\s*\d+\s*$", desc.strip()) - if match: - return f"{match.group(1)}:" - - return desc - - -def normalize_tsk_event_descriptions(guid_util: TskGuidUtils, row: Dict[str, any]) -> Dict[str, any]: - """ - Normalizes event description rows masking possibly changing column values. - Args: - guid_util: Provides guids for ids that may change from run to run. - row: A dictionary mapping column names to values. - - Returns: The normalized event description row. - """ - row_copy = row.copy() - # replace object ids with information that is deterministic - row_copy['event_description_id'] = MASKED_ID - row_copy['content_obj_id'] = guid_util.get_guid_for_file_objid(row['content_obj_id']) - row_copy['artifact_id'] = guid_util.get_guid_for_artifactid(row['artifact_id']) \ - if row['artifact_id'] is not None else None - row_copy['data_source_obj_id'] = guid_util.get_guid_for_file_objid(row['data_source_obj_id']) - - if row['full_description'] == row['med_description'] == row['short_description']: - row_copy['full_description'] = _mask_event_desc(row['full_description']) - row_copy['med_description'] = _mask_event_desc(row['med_description']) - row_copy['short_description'] = _mask_event_desc(row['short_description']) - - return row_copy - - -def normalize_ingest_jobs(guid_util: TskGuidUtils, row: Dict[str, any]) -> Dict[str, any]: - """ - Normalizes ingest jobs table rows. - Args: - guid_util: Provides guids for ids that may change from run to run. - row: A dictionary mapping column names to values. - - Returns: The normalized ingest job row. - - """ - row_copy = row.copy() - row_copy['host_name'] = "{host_name}" - - start_time = row['start_date_time'] - end_time = row['end_date_time'] - if start_time <= end_time: - row_copy['start_date_time'] = MASKED_TIME - row_copy['end_date_time'] = MASKED_TIME - - return row_copy - - -def normalize_unalloc_files(path_str: Union[str, None]) -> Union[str, None]: - """ - Normalizes a path string removing timestamps from unalloc files. - Args: - path_str: The original path string. - - Returns: The path string where timestamps are removed from unalloc strings. - - """ - # takes a file name like "Unalloc_30580_7466496_2980941312" and removes the object id to become - # "Unalloc_7466496_2980941312" - return None if path_str is None else re.sub('Unalloc_[0-9]+_', 'Unalloc_', path_str) - - -def normalize_regripper_files(path_str: Union[str, None]) -> Union[str, None]: - """ - Normalizes a path string removing timestamps from regripper files. - Args: - path_str: The original path string. - - Returns: The path string where timestamps are removed from regripper paths. - - """ - # takes a file name like "regripper-12345-full" and removes the id to become "regripper-full" - return None if path_str is None else re.sub(r'regripper-[0-9]+-full', 'regripper-full', path_str) - - -def normalize_file_path(path_str: Union[str, None]) -> Union[str, None]: - """ - Normalizes file paths removing or replacing pieces that will change from run to run (i.e. object id) - Args: - path_str: The original path string. - - Returns: The normalized path string - """ - return normalize_unalloc_files(normalize_regripper_files(path_str)) - - -def normalize_tsk_files(guid_util: TskGuidUtils, row: Dict[str, any]) -> Dict[str, any]: - """ - Normalizes files table rows. - Args: - guid_util: Provides guids for ids that may change from run to run. - row: A dictionary mapping column names to values. - - Returns: The normalized files table row. - - """ - # Ignore TIFF size and hash if extracted from PDFs. - # See JIRA-6951 for more details. - row_copy = row.copy() - if row['extension'] is not None and row['extension'].strip().lower() == 'tif' and \ - row['parent_path'] is not None and row['parent_path'].strip().lower().endswith('.pdf/'): - row_copy['size'] = "SIZE_IGNORED" - row_copy['md5'] = "MD5_IGNORED" - row_copy['sha256'] = "SHA256_IGNORED" - - row_copy['data_source_obj_id'] = guid_util.get_guid_for_file_objid(row['data_source_obj_id']) - row_copy['obj_id'] = MASKED_OBJ_ID - row_copy['os_account_obj_id'] = 'MASKED_OS_ACCOUNT_OBJ_ID' - row_copy['parent_path'] = normalize_file_path(row['parent_path']) - row_copy['name'] = normalize_file_path(row['name']) - return row_copy - - -def normalize_tsk_files_path(guid_util: TskGuidUtils, row: Dict[str, any]) -> Dict[str, any]: - """ - Normalizes file path table rows. - Args: - guid_util: Provides guids for ids that may change from run to run. - row: A dictionary mapping column names to values. - - Returns: The normalized file path table row. - """ - row_copy = row.copy() - path = row['path'] - if path is not None: - path_parts = get_path_segs(path) - module_output_idx = index_of(path_parts, 'ModuleOutput') - if module_output_idx >= 0: - # remove everything up to and including ModuleOutput if ModuleOutput present - path_parts = path_parts[module_output_idx:] - if len(path_parts) > 2 and path_parts[1] == 'EFE': - # for embedded file extractor, the next folder is the object id and should be omitted - del path_parts[2] - - row_copy['path'] = os.path.join(*path_parts) if len(path_parts) > 0 else '/' - - row_copy['obj_id'] = guid_util.get_guid_for_file_objid(row['obj_id']) - return row_copy - - -def normalize_tsk_objects_path(guid_util: TskGuidUtils, objid: int, - no_path_placeholder: Union[str, None]) -> Union[str, None]: - """ - Returns a normalized path to be used in a tsk_objects table row. - Args: - guid_util: The utility for fetching guids. - objid: The object id of the item. - no_path_placeholder: text to return if no path value found. - - Returns: The 'no_path_placeholder' text if no path. Otherwise, the normalized path. - - """ - path = guid_util.get_guid_for_objid(objid, omitted_value=None) - - if path is None: - return no_path_placeholder - else: - # remove host name (for multi-user) and dates/times from path for reports - path_parts = get_path_segs(path) - module_output_idx = index_of(path_parts, 'ModuleOutput') - if module_output_idx >= 0: - # remove everything up to and including ModuleOutput if ModuleOutput present - path_parts = path_parts[module_output_idx:] - - if "BulkExtractor" in path_parts or "Smirk" in path_parts: - # chop off the last folder (which contains a date/time) - path_parts = path_parts[:-1] - - if path_parts and len(path_parts) >= 2: - for idx in range(0, len(path_parts) - 1): - if path_parts[idx].lower() == "reports" and \ - path_parts[idx + 1].lower().startswith("autopsytestcase html report"): - path_parts = ["Reports", "AutopsyTestCase HTML Report"] - break - - path = os.path.join(*path_parts) if len(path_parts) > 0 else '/' - - return path - - -def normalize_tsk_objects(guid_util: TskGuidUtils, row: Dict[str, any]) -> Dict[str, any]: - """ - Normalizes object table rows. - Args: - guid_util: Provides guids for ids that may change from run to run. - row: A dictionary mapping column names to values. - - Returns: The normalized object table row. - """ - row_copy = row.copy() - row_copy['obj_id'] = None if row['obj_id'] is None else \ - normalize_tsk_objects_path(guid_util, row['obj_id'], MASKED_OBJ_ID) - - row_copy['par_obj_id'] = None if row['par_obj_id'] is None else \ - normalize_tsk_objects_path(guid_util, row['par_obj_id'], 'MASKED_PARENT_OBJ_ID') - - return row_copy - - -MASKED_TIME = "MASKED_TIME" -MASKED_OBJ_ID = "MASKED_OBJ_ID" -MASKED_ID = "MASKED_ID" - -IGNORE_TABLE = "IGNORE_TABLE" - -TableNormalization = Union[IGNORE_TABLE, NormalizeRow] - -""" -This dictionary maps tables where data should be specially handled to how they should be handled. -""" -TABLE_NORMALIZATIONS: Dict[str, TableNormalization] = { - "blackboard_artifacts": IGNORE_TABLE, - "blackboard_attributes": IGNORE_TABLE, - "data_source_info": NormalizeColumns({ - "device_id": "{device id}", - "added_date_time": "{dateTime}" - }), - "image_gallery_groups": NormalizeColumns({ - "group_id": MASKED_ID, - "data_source_obj_id": lambda guid_util, col: guid_util.get_guid_for_objid(col, omitted_value=None), - }), - "image_gallery_groups_seen": IGNORE_TABLE, - "ingest_jobs": NormalizeRow(normalize_ingest_jobs), - "reports": NormalizeColumns({ - "obj_id": MASKED_OBJ_ID, - "path": "AutopsyTestCase", - "crtime": MASKED_TIME - }), - "tsk_aggregate_score": NormalizeColumns({ - "obj_id": lambda guid_util, col: guid_util.get_guid_for_objid(col, omitted_value="Object ID Omitted"), - "data_source_obj_id": lambda guid_util, col: guid_util.get_guid_for_objid(col, omitted_value="Data Source Object ID Omitted"), - }), - "tsk_analysis_results": NormalizeColumns({ - "artifact_obj_id": - lambda guid_util, col: guid_util.get_guid_for_objid(col, omitted_value="Artifact Object ID Omitted"), - }), - "tsk_data_artifacts": NormalizeColumns({ - "artifact_obj_id": - lambda guid_util, col: guid_util.get_guid_for_file_objid(col, omitted_value="Artifact Object ID Omitted"), - "os_account_obj_id": - lambda guid_util, col: guid_util.get_guid_for_file_objid(col, omitted_value="Account Object ID Omitted"), - }), - "tsk_event_descriptions": NormalizeRow(normalize_tsk_event_descriptions), - "tsk_events": NormalizeColumns({ - "event_id": "MASKED_EVENT_ID", - "event_description_id": 'ID OMITTED' - }), - "tsk_examiners": NormalizeColumns({ - "login_name": "{examiner_name}" - }), - "tsk_files": NormalizeRow(normalize_tsk_files), - "tsk_file_layout": NormalizeColumns({ - "obj_id": lambda guid_util, col: guid_util.get_guid_for_file_objid(col) - }), - "tsk_files_path": NormalizeRow(normalize_tsk_files_path), - "tsk_image_names": NormalizeColumns({ - "name": lambda guid_util, col: get_filename(col) - }), - "tsk_objects": NormalizeRow(normalize_tsk_objects), - "tsk_os_account_attributes": NormalizeColumns({ - "id": MASKED_ID, - "os_account_obj_id": lambda guid_util, col: guid_util.get_guid_for_accountid(col), - "source_obj_id": lambda guid_util, col: guid_util.get_guid_for_objid(col) - }), - "tsk_os_account_instances": NormalizeColumns({ - "id": MASKED_ID, - "os_account_obj_id": lambda guid_util, col: guid_util.get_guid_for_accountid(col) - }), - "tsk_os_accounts": NormalizeColumns({ - "os_account_obj_id": MASKED_OBJ_ID - }), - "tsk_vs_parts": NormalizeColumns({ - "obj_id": MASKED_OBJ_ID - }) -} - - -def write_normalized(guid_utils: TskGuidUtils, output_file, db_conn, table: str, column_names: List[str], - normalizer: Union[TableNormalization, None] = None): - """ - Outputs rows of a file as their normalized values (where values should not change from run to run). - Args: - guid_utils: Provides guids to replace values that would potentially change from run to run. - output_file: The file where the normalized dump will be written. - db_conn: The database connection. - table: The name of the table. - column_names: The name of the columns in the table in ordinal order. - normalizer: The normalizer (if any) to use so that data is properly normalized. - """ - if normalizer == IGNORE_TABLE: - return - - cursor = db_conn.cursor() - - joined_columns = ",".join([col for col in column_names]) - cursor.execute(f"SELECT {joined_columns} FROM {table}") - for row in cursor: - if len(row) != len(column_names): - print( - f"ERROR: in {table}, number of columns retrieved: {len(row)} but columns are" - f" {len(column_names)} with {str(column_names)}") - continue - - row_dict = {} - for col_idx in range(0, len(column_names)): - row_dict[column_names[col_idx]] = row[col_idx] - - if normalizer and isinstance(normalizer, NormalizeRow): - row_masker: NormalizeRow = normalizer - row_dict = row_masker.normalize(guid_utils, row_dict) - - if row_dict is not None: - # show row as json-like value - entries = [] - for column in column_names: - dict_value = row_dict[column] if column in row_dict and row_dict[column] is not None else None - value = get_sql_insert_value(dict_value) - if value is not None: - entries.append((column, value)) - insert_values = ", ".join([f"{pr[0]}: {pr[1]}" for pr in entries]) - insert_statement = f"{table}: {{{insert_values}}}\n" - output_file.write(insert_statement) - - -def db_connect(db_file, is_multi_user, pg_settings=None): - if is_multi_user: # use PostgreSQL - try: - return psycopg2.connect("dbname=" + db_file + " user=" + pg_settings.username + " host=" + - pg_settings.pgHost + " password=" + pg_settings.password), None - except: - print("Failed to connect to the database: " + db_file) - else: # Sqlite - # Make a copy that we can modify - backup_db_file = TskDbDiff._get_tmp_file("tsk_backup_db", ".db") - shutil.copy(db_file, backup_db_file) - # We sometimes get situations with messed up permissions - os.chmod(backup_db_file, 0o777) - return sqlite3.connect(backup_db_file), backup_db_file - - -def main(): - try: - sys.argv.pop(0) - output_db = sys.argv.pop(0) - gold_db = sys.argv.pop(0) - except: - print("usage: tskdbdiff [OUTPUT DB PATH] [GOLD DB PATH]") - sys.exit(1) - - db_diff = TskDbDiff(output_db, gold_db, output_dir=".") - dump_passed, bb_dump_passed = db_diff.run_diff() - - if dump_passed and bb_dump_passed: - print("Database comparison passed.") - if not dump_passed: - print("Non blackboard database comparison failed.") - if not bb_dump_passed: - print("Blackboard database comparison failed.") - - sys.exit(0) - - -if __name__ == "__main__": - if sys.hexversion < 0x03000000: - print("Python 3 required") - sys.exit(1) - - main()