Merge branch 'new_table_load' of github.com:sleuthkit/autopsy into 8191-keywordSearch

This commit is contained in:
Greg DiCristofaro 2021-12-16 11:36:20 -05:00
commit 53bf8cf2eb
58 changed files with 3515 additions and 2889 deletions

View File

@ -35,6 +35,7 @@ import org.sleuthkit.datamodel.BlackboardArtifact;
public class ViewArtifactAction extends AbstractAction {
private static final Logger logger = Logger.getLogger(ViewArtifactAction.class.getName());
private static final long serialVersionUID = 1L;
private final BlackboardArtifact artifact;
/**
@ -50,12 +51,15 @@ public class ViewArtifactAction extends AbstractAction {
@Override
public void actionPerformed(ActionEvent e) {
// Moved this call outside the swingworker to prevent exceptions.
final DirectoryTreeTopComponent comp = DirectoryTreeTopComponent.findInstance();
WindowManager.getDefault().getMainWindow().setCursor(Cursor.getPredefinedCursor(Cursor.WAIT_CURSOR));
new SwingWorker<Void, Void>() {
@Override
protected Void doInBackground() throws Exception {
DirectoryTreeTopComponent.findInstance().viewArtifact(artifact);
comp.viewArtifact(artifact);
return null;
}

View File

@ -72,9 +72,9 @@ DataContentViewerHex.totalPageLabel.text_1=100
DataContentViewerHex.pageLabel2.text=Page
# Product Information panel
LBL_Description=<div style="font-size: 12pt; font-family: Verdana, 'Verdana CE', Arial, 'Arial CE', 'Lucida Grande CE', lucida, 'Helvetica CE', sans-serif;">\n <b>Product Version:</b> {0} ({9}) <br><b>Sleuth Kit Version:</b> {7} <br><b>Netbeans RCP Build:</b> {8} <br> <b>Java:</b> {1}; {2}<br> <b>System:</b> {3}; {4}; {5}<br><b>Userdir:</b> {6}</div>
LBL_Description=<div style=\"font-size: 12pt; font-family: Verdana, 'Verdana CE', Arial, 'Arial CE', 'Lucida Grande CE', lucida, 'Helvetica CE', sans-serif;\">\n <b>Product Version:</b> {0} ({9}) <br><b>Sleuth Kit Version:</b> {7} <br><b>Netbeans RCP Build:</b> {8} <br> <b>Java:</b> {1}; {2}<br> <b>System:</b> {3}; {4}; {5}<br><b>Userdir:</b> {6}</div>
Format_OperatingSystem_Value={0} version {1} running on {2}
LBL_Copyright=<div style="font-size: 12pt; font-family: Verdana, 'Verdana CE', Arial, 'Arial CE', 'Lucida Grande CE', lucida, 'Helvetica CE', sans-serif; ">Autopsy&trade; is a digital forensics platform based on The Sleuth Kit&trade; and other tools. <br><ul><li>General Information: <a style="color: #1E2A60;" href="http://www.sleuthkit.org">http://www.sleuthkit.org</a>.</li><li>Training: <a style="color: #1E2A60;" href="https://www.autopsy.com/support/training/">https://www.autopsy.com/support/training/</a></li><li>Support: <a style="color: #1E2A60;" href="https://www.sleuthkit.org/support.php">https://www.sleuthkit.org/support.php</a></li></ul>Copyright &copy; 2003-2020. </div>
LBL_Copyright=<div style\="font-size: 12pt; font-family: Verdana, 'Verdana CE', Arial, 'Arial CE', 'Lucida Grande CE', lucida, 'Helvetica CE', sans-serif; ">Autopsy&trade; is a digital forensics platform based on The Sleuth Kit&trade; and other tools. <br><ul><li>General Information: <a style\="color: \#1E2A60;" href\="http://www.sleuthkit.org">http://www.sleuthkit.org</a>.</li><li>Training: <a style\="color: \#1E2A60;" href\="https://www.autopsy.com/support/training/">https://www.autopsy.com/support/training/</a></li><li>Support: <a style\="color: \#1E2A60;" href\="https://www.sleuthkit.org/support.php">https://www.sleuthkit.org/support.php</a></li></ul>Copyright &copy; 2003-2020. </div>
SortChooser.dialogTitle=Choose Sort Criteria
ThumbnailViewChildren.progress.cancelling=(Cancelling)
# {0} - file name
@ -97,7 +97,7 @@ DataContentViewerHex.goToPageTextField.text=
DataContentViewerHex.goToPageLabel.text=Go to Page:
DataResultViewerThumbnail.imagesLabel.text=Images:
DataResultViewerThumbnail.imagesRangeLabel.text=-
DataResultViewerThumbnail.filePathLabel.text=\
DataResultViewerThumbnail.filePathLabel.text=\ \ \
AdvancedConfigurationDialog.cancelButton.text=Cancel
DataArtifactContentViewer.waitText=Retrieving and preparing data, please wait...
DataArtifactContentViewer.errorText=Error retrieving result

View File

@ -72,6 +72,8 @@ import org.sleuthkit.autopsy.mainui.datamodel.CommAccountsDAO.CommAccountFetcher
import org.sleuthkit.autopsy.mainui.datamodel.events.DAOAggregateEvent;
import org.sleuthkit.autopsy.mainui.datamodel.DataArtifactDAO.DataArtifactFetcher;
import org.sleuthkit.autopsy.mainui.datamodel.DataArtifactSearchParam;
import org.sleuthkit.autopsy.mainui.datamodel.EmailSearchParams;
import org.sleuthkit.autopsy.mainui.datamodel.EmailsDAO.EmailFetcher;
import org.sleuthkit.autopsy.mainui.datamodel.FileSystemContentSearchParam;
import org.sleuthkit.autopsy.mainui.datamodel.FileSystemDAO.FileSystemFetcher;
import org.sleuthkit.autopsy.mainui.datamodel.FileSystemDAO.FileSystemHostFetcher;
@ -90,6 +92,9 @@ import org.sleuthkit.autopsy.mainui.datamodel.TagsSearchParams;
import org.sleuthkit.autopsy.mainui.datamodel.ViewsDAO.FileTypeExtFetcher;
import org.sleuthkit.autopsy.mainui.datamodel.ViewsDAO.FileTypeMimeFetcher;
import org.sleuthkit.autopsy.mainui.datamodel.ViewsDAO.FileTypeSizeFetcher;
import org.sleuthkit.autopsy.mainui.datamodel.ViewsDAO.DeletedFileFetcher;
import org.sleuthkit.autopsy.mainui.datamodel.DeletedContentSearchParams;
import org.sleuthkit.autopsy.mainui.nodes.ChildNodeSelectionInfo;
import org.sleuthkit.autopsy.mainui.nodes.SearchManager;
/**
@ -1159,7 +1164,7 @@ public class DataResultPanel extends javax.swing.JPanel implements DataResult, C
try {
this.searchResultManager = new SearchManager(new DataArtifactFetcher(dataArtifactParams), getPageSize());
SearchResultsDTO results = searchResultManager.getResults();
displaySearchResults(results, true);
displaySearchResults(results, true, dataArtifactParams.getNodeSelectionInfo());
} catch (ExecutionException ex) {
logger.log(Level.WARNING,
MessageFormat.format("There was an error displaying search results for [artifact type: {0}, data source id: {1}]",
@ -1190,6 +1195,27 @@ public class DataResultPanel extends javax.swing.JPanel implements DataResult, C
}
}
/**
* Display results for querying the DAO for email messages matching the
* search parameters query.
*
* @param searchParams The search parameter query.
*/
void displayEmailMessages(EmailSearchParams searchParams) {
try {
this.searchResultManager = new SearchManager(new EmailFetcher(searchParams), getPageSize());
SearchResultsDTO results = searchResultManager.getResults();
displaySearchResults(results, true);
} catch (ExecutionException ex) {
logger.log(Level.WARNING,
MessageFormat.format("There was an error displaying search results for [data source id: {0}, account: {1}, folder: {2}]",
searchParams.getDataSourceId() == null ? "<null>" : searchParams.getDataSourceId(),
searchParams.getAccount() == null ? "<null>" : searchParams.getAccount(),
searchParams.getFolder() == null ? "<null>" : searchParams.getFolder()),
ex);
}
}
void displayAnalysisResult(AnalysisResultSearchParam analysisResultParams) {
try {
this.searchResultManager = new SearchManager(new AnalysisResultFetcher(analysisResultParams), getPageSize());
@ -1204,6 +1230,24 @@ public class DataResultPanel extends javax.swing.JPanel implements DataResult, C
}
}
/**
* Displays deleted content in the file views section.
* @param deletedSearchParams The deleted content search params.
*/
void displayDeletedContent(DeletedContentSearchParams deletedSearchParams) {
try {
this.searchResultManager = new SearchManager(new DeletedFileFetcher(deletedSearchParams), getPageSize());
SearchResultsDTO results = searchResultManager.getResults();
displaySearchResults(results, true);
} catch (ExecutionException ex) {
logger.log(Level.WARNING,
MessageFormat.format("There was an error displaying search results for [filter: {0}, data source id: {1}]",
deletedSearchParams.getFilter() == null ? "<null>" : deletedSearchParams.getFilter(),
deletedSearchParams.getDataSourceId() == null ? "<null>" : deletedSearchParams.getDataSourceId()),
ex);
}
}
/**
* Displays results of querying the DAO for files matching the file
* extension search parameters query.
@ -1336,7 +1380,7 @@ public class DataResultPanel extends javax.swing.JPanel implements DataResult, C
try {
this.searchResultManager = new SearchManager(new FileSystemFetcher(fileSystemKey), getPageSize());
SearchResultsDTO results = searchResultManager.getResults();
displaySearchResults(results, true, fileSystemKey.getChildIdToSelect());
displaySearchResults(results, true, fileSystemKey.getNodeSelectionInfo());
} catch (ExecutionException | IllegalArgumentException ex) {
logger.log(Level.WARNING, MessageFormat.format(
"There was an error fetching data for file system filter: {0}.",
@ -1398,13 +1442,13 @@ public class DataResultPanel extends javax.swing.JPanel implements DataResult, C
"DataResultPanel_pageIdxOfCount={0} of {1}"
})
private void displaySearchResults(SearchResultsDTO searchResults, boolean resetPaging) {
private void displaySearchResults(SearchResultsDTO searchResults, boolean resetPaging) {
displaySearchResults(searchResults, resetPaging, null);
}
private void displaySearchResults(SearchResultsDTO searchResults, boolean resetPaging, Long contentIdToSelect) {
}
private void displaySearchResults(SearchResultsDTO searchResults, boolean resetPaging, ChildNodeSelectionInfo childSelectionInfo) {
if (!SwingUtilities.isEventDispatchThread()) {
SwingUtilities.invokeLater(() -> displaySearchResults(searchResults, resetPaging, contentIdToSelect));
SwingUtilities.invokeLater(() -> displaySearchResults(searchResults, resetPaging, childSelectionInfo));
return;
}
@ -1412,7 +1456,7 @@ public class DataResultPanel extends javax.swing.JPanel implements DataResult, C
setNode(null, resetPaging);
} else {
SearchResultRootNode node = new SearchResultRootNode(searchResults);
node.setChildIdToSelect(contentIdToSelect);
node.setNodeSelectionInfo(childSelectionInfo);
setNode(node, resetPaging);
setNumberOfChildNodes(
searchResults.getTotalResultsCount() > Integer.MAX_VALUE

View File

@ -47,6 +47,8 @@ import org.sleuthkit.autopsy.mainui.datamodel.FileSystemContentSearchParam;
import org.sleuthkit.autopsy.mainui.datamodel.FileSystemHostSearchParam;
import org.sleuthkit.autopsy.mainui.datamodel.AnalysisResultSetSearchParam;
import org.sleuthkit.autopsy.mainui.datamodel.CommAccountsSearchParams;
import org.sleuthkit.autopsy.mainui.datamodel.EmailSearchParams;
import org.sleuthkit.autopsy.mainui.datamodel.DeletedContentSearchParams;
import org.sleuthkit.autopsy.mainui.datamodel.FileTypeMimeSearchParams;
import org.sleuthkit.autopsy.mainui.datamodel.FileTypeSizeSearchParams;
import org.sleuthkit.autopsy.mainui.datamodel.KeywordHitSearchParam;
@ -394,6 +396,24 @@ public final class DataResultTopComponent extends TopComponent implements DataRe
public void displayDataArtifact(DataArtifactSearchParam dataArtifactParams) {
dataResultPanel.displayDataArtifact(dataArtifactParams);
}
/**
* Displays deleted content in the file views section.
* @param deletedSearchParams The deleted content search params.
*/
public void displayDeletedContent(DeletedContentSearchParams deletedSearchParams) {
dataResultPanel.displayDeletedContent(deletedSearchParams);
}
/**
* Displays results of querying the DAO for demails matching the
* search parameters query.
*
* @param dataArtifactParams The search parameter query.
*/
public void displayEmailMessages(EmailSearchParams searchParams) {
dataResultPanel.displayEmailMessages(searchParams);
}
/**
* Displays results of querying the DAO for files matching the mime search

View File

@ -72,7 +72,9 @@ import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil;
import org.sleuthkit.autopsy.coreutils.ThreadConfined;
import org.sleuthkit.autopsy.datamodel.NodeProperty;
import org.sleuthkit.autopsy.mainui.datamodel.SearchResultsDTO;
import org.sleuthkit.autopsy.mainui.nodes.NodeSelectionInfo.ContentNodeSelectionInfo;
import org.sleuthkit.autopsy.mainui.nodes.ChildNodeSelectionInfo;
import org.sleuthkit.autopsy.mainui.nodes.SearchResultRootNode;
import org.sleuthkit.autopsy.mainui.nodes.TreeNode;
import org.sleuthkit.datamodel.Score.Significance;
/**
@ -387,8 +389,8 @@ public class DataResultViewerTable extends AbstractDataResultViewer {
* If one of the child nodes of the root node is to be selected, select
* it.
*/
if (rootNode instanceof ContentNodeSelectionInfo) {
ContentNodeSelectionInfo selectedChildInfo = ((ContentNodeSelectionInfo) rootNode);
if (rootNode instanceof SearchResultRootNode) {
ChildNodeSelectionInfo selectedChildInfo = ((SearchResultRootNode)rootNode).getNodeSelectionInfo();
if (null != selectedChildInfo) {
Node[] childNodes = rootNode.getChildren().getNodes(true);
for (int i = 0; i < childNodes.length; ++i) {
@ -406,7 +408,7 @@ public class DataResultViewerTable extends AbstractDataResultViewer {
}
}
// Once it is selected clear the id.
((ContentNodeSelectionInfo) rootNode).setChildIdToSelect(null);
((SearchResultRootNode) rootNode).setNodeSelectionInfo(null);
}
}

View File

@ -33,18 +33,10 @@ public interface AutopsyItemVisitor<T> {
T visit(Views v);
T visit(FileTypesByExtension sf);
T visit(RecentFiles rf);
T visit(RecentFiles.RecentFilesFilter rff);
T visit(DeletedContent dc);
T visit(DeletedContent.DeletedContentFilter dcf);
T visit(FileSize fs);
T visit(KeywordHits kh);
T visit(HashsetHits hh);
@ -58,10 +50,6 @@ public interface AutopsyItemVisitor<T> {
T visit(Reports reportsItem);
T visit(Accounts accountsItem);
T visit(FileTypes fileTypesItem);
T visit(FileTypesByMimeType aThis);
T visit(OsAccounts osAccoutItem);
@ -81,31 +69,6 @@ public interface AutopsyItemVisitor<T> {
static abstract public class Default<T> implements AutopsyItemVisitor<T> {
protected abstract T defaultVisit(AutopsyVisitableItem ec);
@Override
public T visit(FileTypesByExtension sf) {
return defaultVisit(sf);
}
@Override
public T visit(FileTypesByMimeType ftByMimeType) {
return defaultVisit(ftByMimeType);
}
@Override
public T visit(DeletedContent dc) {
return defaultVisit(dc);
}
@Override
public T visit(DeletedContent.DeletedContentFilter dcf) {
return defaultVisit(dcf);
}
@Override
public T visit(FileSize fs) {
return defaultVisit(fs);
}
@Override
public T visit(RecentFiles rf) {
@ -167,11 +130,6 @@ public interface AutopsyItemVisitor<T> {
return defaultVisit(personGrouping);
}
@Override
public T visit(FileTypes ft) {
return defaultVisit(ft);
}
@Override
public T visit(Reports reportsItem) {
return defaultVisit(reportsItem);

View File

@ -110,13 +110,6 @@ Category.two=CAT-2: Child Exploitation (Non-Illegal/Age Difficult)
Category.zero=CAT-0: Uncategorized
DataArtifacts_name=Data Artifacts
DataSourcesHostsNode_name=Data Sources
DeletedContent.allDelFilter.text=All
DeletedContent.createSheet.filterType.desc=no description
DeletedContent.createSheet.filterType.displayName=Type
DeletedContent.createSheet.name.desc=no description
DeletedContent.createSheet.name.displayName=Name
DeletedContent.deletedContentsNode.name=Deleted Files
DeletedContent.fsDelFilter.text=File System
DeleteReportAction.showConfirmDialog.errorMsg=An error occurred while deleting the reports.
DeleteReportAction.showConfirmDialog.multiple.explanation=The reports will remain on disk.
DeleteReportAction.showConfirmDialog.single.explanation=The report will remain on disk.
@ -124,12 +117,6 @@ FileNode.getActions.openInExtViewer.text=Open in External Viewer Ctrl+E
FileNode.getActions.searchFilesSameMD5.text=Search for files with the same MD5 hash
FileNode.getActions.viewFileInDir.text=View File in Directory
FileNode.getActions.viewInNewWin.text=View Item in New Window
FileTypes.bgCounting.placeholder=\ (counting...)
FileTypes.createSheet.name.desc=no description
FileTypes.createSheet.name.displayName=Name
FileTypes.createSheet.name.name=Name
FileTypes.name.text=File Types
FileTypesByMimeType.name.text=By MIME Type
GetSCOTask.occurrences.defaultDescription=No correlation properties found
GetSCOTask.occurrences.multipleProperties=Multiple different correlation properties exist for this result
HostGroupingNode_unknownHostNode_title=Unknown Host

View File

@ -1,519 +0,0 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2011-2019 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.datamodel;
import java.beans.PropertyChangeEvent;
import java.beans.PropertyChangeListener;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.EnumSet;
import java.util.List;
import java.util.Observable;
import java.util.Observer;
import java.util.Set;
import java.util.logging.Level;
import org.openide.nodes.AbstractNode;
import org.openide.nodes.ChildFactory;
import org.openide.nodes.Children;
import org.openide.nodes.Node;
import org.openide.nodes.Sheet;
import org.openide.util.NbBundle;
import org.openide.util.lookup.Lookups;
import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.ingest.IngestManager;
import static org.sleuthkit.autopsy.ingest.IngestManager.IngestModuleEvent.CONTENT_CHANGED;
import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.Content;
import org.sleuthkit.datamodel.ContentVisitor;
import org.sleuthkit.datamodel.Directory;
import org.sleuthkit.datamodel.File;
import org.sleuthkit.datamodel.FsContent;
import org.sleuthkit.datamodel.LayoutFile;
import org.sleuthkit.datamodel.SleuthkitCase;
import org.sleuthkit.datamodel.TskCoreException;
import org.sleuthkit.datamodel.TskData;
import org.sleuthkit.datamodel.VirtualDirectory;
/**
* deleted content view nodes
*/
public class DeletedContent implements AutopsyVisitableItem {
private SleuthkitCase skCase;
private final long filteringDSObjId; // 0 if not filtering/grouping by data source
@NbBundle.Messages({"DeletedContent.fsDelFilter.text=File System",
"DeletedContent.allDelFilter.text=All"})
public enum DeletedContentFilter implements AutopsyVisitableItem {
FS_DELETED_FILTER(0, "FS_DELETED_FILTER", //NON-NLS
Bundle.DeletedContent_fsDelFilter_text()),
ALL_DELETED_FILTER(1, "ALL_DELETED_FILTER", //NON-NLS
Bundle.DeletedContent_allDelFilter_text());
private int id;
private String name;
private String displayName;
private DeletedContentFilter(int id, String name, String displayName) {
this.id = id;
this.name = name;
this.displayName = displayName;
}
public String getName() {
return this.name;
}
public int getId() {
return this.id;
}
public String getDisplayName() {
return this.displayName;
}
@Override
public <T> T accept(AutopsyItemVisitor<T> visitor) {
return visitor.visit(this);
}
}
public DeletedContent(SleuthkitCase skCase) {
this(skCase, 0);
}
public DeletedContent(SleuthkitCase skCase, long dsObjId) {
this.skCase = skCase;
this.filteringDSObjId = dsObjId;
}
long filteringDataSourceObjId() {
return this.filteringDSObjId;
}
@Override
public <T> T accept(AutopsyItemVisitor<T> visitor) {
return visitor.visit(this);
}
public SleuthkitCase getSleuthkitCase() {
return this.skCase;
}
public static class DeletedContentsNode extends DisplayableItemNode {
@NbBundle.Messages("DeletedContent.deletedContentsNode.name=Deleted Files")
private static final String NAME = Bundle.DeletedContent_deletedContentsNode_name();
DeletedContentsNode(SleuthkitCase skCase, long datasourceObjId) {
super(Children.create(new DeletedContentsChildren(skCase, datasourceObjId), true), Lookups.singleton(NAME));
super.setName(NAME);
super.setDisplayName(NAME);
this.setIconBaseWithExtension("org/sleuthkit/autopsy/images/file-icon-deleted.png"); //NON-NLS
}
@Override
public boolean isLeafTypeNode() {
return false;
}
@Override
public <T> T accept(DisplayableItemNodeVisitor<T> visitor) {
return visitor.visit(this);
}
@Override
@NbBundle.Messages({
"DeletedContent.createSheet.name.displayName=Name",
"DeletedContent.createSheet.name.desc=no description"})
protected Sheet createSheet() {
Sheet sheet = super.createSheet();
Sheet.Set sheetSet = sheet.get(Sheet.PROPERTIES);
if (sheetSet == null) {
sheetSet = Sheet.createPropertiesSet();
sheet.put(sheetSet);
}
sheetSet.put(new NodeProperty<>("Name", //NON-NLS
Bundle.DeletedContent_createSheet_name_displayName(),
Bundle.DeletedContent_createSheet_name_desc(),
NAME));
return sheet;
}
@Override
public String getItemType() {
return getClass().getName();
}
}
public static class DeletedContentsChildren extends ChildFactory<DeletedContent.DeletedContentFilter> {
private SleuthkitCase skCase;
private Observable notifier;
private final long datasourceObjId;
// true if we have already told user that not all files will be shown
private static volatile boolean maxFilesDialogShown = false;
public DeletedContentsChildren(SleuthkitCase skCase, long dsObjId) {
this.skCase = skCase;
this.datasourceObjId = dsObjId;
this.notifier = new DeletedContentsChildrenObservable();
}
/**
* Listens for case and ingest invest. Updates observers when events are
* fired. Other nodes are listening to this for changes.
*/
private static final class DeletedContentsChildrenObservable extends Observable {
private static final Set<Case.Events> CASE_EVENTS_OF_INTEREST = EnumSet.of(
Case.Events.DATA_SOURCE_ADDED,
Case.Events.CURRENT_CASE
);
private static final Set<IngestManager.IngestJobEvent> INGEST_JOB_EVENTS_OF_INTEREST = EnumSet.of(IngestManager.IngestJobEvent.COMPLETED, IngestManager.IngestJobEvent.CANCELLED);
private static final Set<IngestManager.IngestModuleEvent> INGEST_MODULE_EVENTS_OF_INTEREST = EnumSet.of(CONTENT_CHANGED);
DeletedContentsChildrenObservable() {
IngestManager.getInstance().addIngestJobEventListener(INGEST_JOB_EVENTS_OF_INTEREST, pcl);
IngestManager.getInstance().addIngestModuleEventListener(INGEST_MODULE_EVENTS_OF_INTEREST, pcl);
Case.addEventTypeSubscriber(CASE_EVENTS_OF_INTEREST, pcl);
}
private void removeListeners() {
deleteObservers();
IngestManager.getInstance().removeIngestJobEventListener(pcl);
IngestManager.getInstance().removeIngestModuleEventListener(pcl);
Case.removeEventTypeSubscriber(CASE_EVENTS_OF_INTEREST, pcl);
}
private final PropertyChangeListener pcl = (PropertyChangeEvent evt) -> {
String eventType = evt.getPropertyName();
if (eventType.equals(IngestManager.IngestModuleEvent.CONTENT_CHANGED.toString())) {
/**
* + // @@@ COULD CHECK If the new file is deleted before
* notifying... Checking for a current case is a stop gap
* measure + update(); until a different way of handling the
* closing of cases is worked out. Currently, remote events
* may be received for a case that is already closed.
*/
try {
Case.getCurrentCaseThrows();
// new file was added
// @@@ COULD CHECK If the new file is deleted before notifying...
update();
} catch (NoCurrentCaseException notUsed) {
/**
* Case is closed, do nothing.
*/
}
} else if (eventType.equals(IngestManager.IngestJobEvent.COMPLETED.toString())
|| eventType.equals(IngestManager.IngestJobEvent.CANCELLED.toString())
|| eventType.equals(Case.Events.DATA_SOURCE_ADDED.toString())) {
/**
* Checking for a current case is a stop gap measure until a
* different way of handling the closing of cases is worked
* out. Currently, remote events may be received for a case
* that is already closed.
*/
try {
Case.getCurrentCaseThrows();
update();
} catch (NoCurrentCaseException notUsed) {
/**
* Case is closed, do nothing.
*/
}
} else if (eventType.equals(Case.Events.CURRENT_CASE.toString())) {
// case was closed. Remove listeners so that we don't get called with a stale case handle
if (evt.getNewValue() == null) {
removeListeners();
}
maxFilesDialogShown = false;
}
};
private void update() {
setChanged();
notifyObservers();
}
}
@Override
protected boolean createKeys(List<DeletedContent.DeletedContentFilter> list) {
list.addAll(Arrays.asList(DeletedContent.DeletedContentFilter.values()));
return true;
}
@Override
protected Node createNodeForKey(DeletedContent.DeletedContentFilter key) {
return new DeletedContentNode(skCase, key, notifier, datasourceObjId);
}
public class DeletedContentNode extends DisplayableItemNode {
private final DeletedContent.DeletedContentFilter filter;
private final long datasourceObjId;
// Use version that has observer for updates
@Deprecated
DeletedContentNode(SleuthkitCase skCase, DeletedContent.DeletedContentFilter filter, long dsObjId) {
super(Children.create(new DeletedContentChildren(filter, skCase, null, dsObjId), true), Lookups.singleton(filter.getDisplayName()));
this.filter = filter;
this.datasourceObjId = dsObjId;
init();
}
DeletedContentNode(SleuthkitCase skCase, DeletedContent.DeletedContentFilter filter, Observable o, long dsObjId) {
super(Children.create(new DeletedContentChildren(filter, skCase, o, dsObjId), true), Lookups.singleton(filter.getDisplayName()));
this.filter = filter;
this.datasourceObjId = dsObjId;
init();
o.addObserver(new DeletedContentNodeObserver());
}
private void init() {
super.setName(filter.getName());
String tooltip = filter.getDisplayName();
this.setShortDescription(tooltip);
this.setIconBaseWithExtension("org/sleuthkit/autopsy/images/file-icon-deleted.png"); //NON-NLS
updateDisplayName();
}
// update the display name when new events are fired
private class DeletedContentNodeObserver implements Observer {
@Override
public void update(Observable o, Object arg) {
updateDisplayName();
}
}
private void updateDisplayName() {
//get count of children without preloading all children nodes
final long count = DeletedContentChildren.calculateItems(skCase, filter, datasourceObjId);
//final long count = getChildren().getNodesCount(true);
super.setDisplayName(filter.getDisplayName() + " (" + count + ")");
}
@Override
public <T> T accept(DisplayableItemNodeVisitor<T> visitor) {
return visitor.visit(this);
}
@Override
@NbBundle.Messages({
"DeletedContent.createSheet.filterType.displayName=Type",
"DeletedContent.createSheet.filterType.desc=no description"})
protected Sheet createSheet() {
Sheet sheet = super.createSheet();
Sheet.Set sheetSet = sheet.get(Sheet.PROPERTIES);
if (sheetSet == null) {
sheetSet = Sheet.createPropertiesSet();
sheet.put(sheetSet);
}
sheetSet.put(new NodeProperty<>("Type", //NON_NLS
Bundle.DeletedContent_createSheet_filterType_displayName(),
Bundle.DeletedContent_createSheet_filterType_desc(),
filter.getDisplayName()));
return sheet;
}
@Override
public boolean isLeafTypeNode() {
return true;
}
@Override
public String getItemType() {
/**
* Return getClass().getName() + filter.getName() if custom
* settings are desired for different filters.
*/
return DisplayableItemNode.FILE_PARENT_NODE_KEY;
}
}
static class DeletedContentChildren extends BaseChildFactory<AbstractFile> {
private final SleuthkitCase skCase;
private final DeletedContent.DeletedContentFilter filter;
private static final Logger logger = Logger.getLogger(DeletedContentChildren.class.getName());
private final Observable notifier;
private final long datasourceObjId;
DeletedContentChildren(DeletedContent.DeletedContentFilter filter, SleuthkitCase skCase, Observable o, long datasourceObjId) {
super(filter.getName(), new ViewsKnownAndSlackFilter<>());
this.skCase = skCase;
this.filter = filter;
this.notifier = o;
this.datasourceObjId = datasourceObjId;
}
private final Observer observer = new DeletedContentChildrenObserver();
@Override
protected List<AbstractFile> makeKeys() {
return runFsQuery();
}
// Cause refresh of children if there are changes
private class DeletedContentChildrenObserver implements Observer {
@Override
public void update(Observable o, Object arg) {
refresh(true);
}
}
@Override
protected void onAdd() {
if (notifier != null) {
notifier.addObserver(observer);
}
}
@Override
protected void onRemove() {
if (notifier != null) {
notifier.deleteObserver(observer);
}
}
static private String makeQuery(DeletedContent.DeletedContentFilter filter, long filteringDSObjId) {
String query = "";
switch (filter) {
case FS_DELETED_FILTER:
query = "dir_flags = " + TskData.TSK_FS_NAME_FLAG_ENUM.UNALLOC.getValue() //NON-NLS
+ " AND meta_flags != " + TskData.TSK_FS_META_FLAG_ENUM.ORPHAN.getValue() //NON-NLS
+ " AND type = " + TskData.TSK_DB_FILES_TYPE_ENUM.FS.getFileType(); //NON-NLS
break;
case ALL_DELETED_FILTER:
query = " ( "
+ "( "
+ "(dir_flags = " + TskData.TSK_FS_NAME_FLAG_ENUM.UNALLOC.getValue() //NON-NLS
+ " OR " //NON-NLS
+ "meta_flags = " + TskData.TSK_FS_META_FLAG_ENUM.ORPHAN.getValue() //NON-NLS
+ ")"
+ " AND type = " + TskData.TSK_DB_FILES_TYPE_ENUM.FS.getFileType() //NON-NLS
+ " )"
+ " OR type = " + TskData.TSK_DB_FILES_TYPE_ENUM.CARVED.getFileType() //NON-NLS
+ " OR (dir_flags = " + TskData.TSK_FS_NAME_FLAG_ENUM.UNALLOC.getValue()
+ " AND type = " + TskData.TSK_DB_FILES_TYPE_ENUM.LAYOUT_FILE.getFileType() + " )"
+ " )";
//+ " AND type != " + TskData.TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS.getFileType()
//+ " AND type != " + TskData.TSK_DB_FILES_TYPE_ENUM.UNUSED_BLOCKS.getFileType()
//+ " AND type != " + TskData.TSK_DB_FILES_TYPE_ENUM.UNUSED_BLOCKS.getFileType()
//+ " AND type != " + TskData.TSK_DB_FILES_TYPE_ENUM.DERIVED.getFileType()
//+ " AND type != " + TskData.TSK_DB_FILES_TYPE_ENUM.LOCAL.getFileType()
//+ " AND type != " + TskData.TSK_DB_FILES_TYPE_ENUM.VIRTUAL_DIR.getFileType();
break;
default:
logger.log(Level.SEVERE, "Unsupported filter type to get deleted content: {0}", filter); //NON-NLS
}
if (filteringDSObjId > 0) {
query += " AND data_source_obj_id = " + filteringDSObjId;
}
return query;
}
private List<AbstractFile> runFsQuery() {
List<AbstractFile> ret = new ArrayList<>();
String query = makeQuery(filter, datasourceObjId);
try {
ret = skCase.findAllFilesWhere(query);
} catch (TskCoreException e) {
logger.log(Level.SEVERE, "Error getting files for the deleted content view using: " + query, e); //NON-NLS
}
return ret;
}
/**
* Get children count without actually loading all nodes
*
* @param sleuthkitCase
* @param filter
*
* @return
*/
static long calculateItems(SleuthkitCase sleuthkitCase, DeletedContent.DeletedContentFilter filter, long datasourceObjId) {
try {
return sleuthkitCase.countFilesWhere(makeQuery(filter, datasourceObjId));
} catch (TskCoreException ex) {
logger.log(Level.SEVERE, "Error getting deleted files search view count", ex); //NON-NLS
return 0;
}
}
@Override
protected Node createNodeForKey(AbstractFile key) {
return key.accept(new ContentVisitor.Default<AbstractNode>() {
public FileNode visit(AbstractFile f) {
return new FileNode(f, false);
}
public FileNode visit(FsContent f) {
return new FileNode(f, false);
}
@Override
public FileNode visit(LayoutFile f) {
return new FileNode(f, false);
}
@Override
public FileNode visit(File f) {
return new FileNode(f, false);
}
@Override
public FileNode visit(Directory f) {
return new FileNode(f, false);
}
@Override
public FileNode visit(VirtualDirectory f) {
return new FileNode(f, false);
}
@Override
protected AbstractNode defaultVisit(Content di) {
throw new UnsupportedOperationException("Not supported for this type of Displayable Item: " + di.toString());
}
});
}
}
}
}

View File

@ -25,13 +25,8 @@ import org.sleuthkit.autopsy.commonpropertiessearch.InstanceCaseNode;
import org.sleuthkit.autopsy.commonpropertiessearch.CommonAttributeValueNode;
import org.sleuthkit.autopsy.commonpropertiessearch.CaseDBCommonAttributeInstanceNode;
import org.sleuthkit.autopsy.commonpropertiessearch.InstanceDataSourceNode;
import org.sleuthkit.autopsy.datamodel.DeletedContent.DeletedContentsChildren.DeletedContentNode;
import org.sleuthkit.autopsy.datamodel.DeletedContent.DeletedContentsNode;
import org.sleuthkit.autopsy.datamodel.FileSize.FileSizeRootNode;
import org.sleuthkit.autopsy.datamodel.FileTypes.FileTypesNode;
import org.sleuthkit.autopsy.datamodel.accounts.Accounts;
import org.sleuthkit.autopsy.allcasessearch.CorrelationAttributeInstanceNode;
import org.sleuthkit.autopsy.datamodel.FileTypesByExtension.FileTypesByExtNode;
/**
* Visitor pattern that goes over all nodes in the directory tree. This includes
@ -72,14 +67,6 @@ public interface DisplayableItemNodeVisitor<T> {
T visit(DataSourceGroupingNode dataSourceGroupingNode);
T visit(DeletedContentNode dcn);
T visit(DeletedContentsNode dcn);
T visit(FileSizeRootNode fsrn);
T visit(FileTypesByExtNode sfn);
T visit(RecentFilesNode rfn);
T visit(RecentFilesFilterNode rffn);
@ -164,10 +151,6 @@ public interface DisplayableItemNodeVisitor<T> {
T visit(Accounts.DefaultAccountTypeNode node);
T visit(FileTypes.FileTypesNode fileTypes);
T visit(FileTypesByMimeType.ByMimeTypeNode ftByMimeTypeNode);
T visit(EmptyNode.MessageNode emptyNode);
/*
@ -292,36 +275,11 @@ public interface DisplayableItemNodeVisitor<T> {
return defaultVisit(ecn);
}
@Override
public T visit(FileTypesByMimeType.ByMimeTypeNode ftByMimeTypeNode) {
return defaultVisit(ftByMimeTypeNode);
}
@Override
public T visit(EmptyNode.MessageNode ftByMimeTypeEmptyNode) {
return defaultVisit(ftByMimeTypeEmptyNode);
}
@Override
public T visit(DeletedContentNode dcn) {
return defaultVisit(dcn);
}
@Override
public T visit(DeletedContentsNode dcn) {
return defaultVisit(dcn);
}
@Override
public T visit(FileSizeRootNode fsrn) {
return defaultVisit(fsrn);
}
@Override
public T visit(org.sleuthkit.autopsy.datamodel.FileTypesByExtension.FileTypesByExtNode sfn) {
return defaultVisit(sfn);
}
@Override
public T visit(RecentFilesNode rfn) {
return defaultVisit(rfn);
@ -362,11 +320,6 @@ public interface DisplayableItemNodeVisitor<T> {
return defaultVisit(dataSourceGroupingNode);
}
@Override
public T visit(FileTypesNode ft) {
return defaultVisit(ft);
}
@Override
public T visit(DataSourceFilesNode in) {
return defaultVisit(in);

View File

@ -1,103 +0,0 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2013-2019 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.datamodel;
import org.openide.nodes.Children;
import org.openide.nodes.Node;
import org.openide.nodes.Sheet;
import org.openide.util.NbBundle;
import org.openide.util.lookup.Lookups;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.mainui.nodes.ViewsTypeFactory.FileSizeTypeFactory;
/**
* Files by Size View node and related child nodes
*/
public class FileSize implements AutopsyVisitableItem {
private static final Logger logger = Logger.getLogger(FileTypes.class.getName());
private final long filteringDSObjId; // 0 if not filtering/grouping by data source
public FileSize(long dsObjId) {
this.filteringDSObjId = dsObjId;
}
@Override
public <T> T accept(AutopsyItemVisitor<T> visitor) {
return visitor.visit(this);
}
long filteringDataSourceObjId() {
return this.filteringDSObjId;
}
/*
* Root node. Children are nodes for specific sizes.
*/
public static class FileSizeRootNode extends DisplayableItemNode {
private static final String NAME = NbBundle.getMessage(FileSize.class, "FileSize.fileSizeRootNode.name");
private final long dataSourceObjId;
FileSizeRootNode(long datasourceObjId) {
super(Children.create(new FileSizeTypeFactory(datasourceObjId > 0 ? datasourceObjId : null), true), Lookups.singleton(NAME));
super.setName(NAME);
super.setDisplayName(NAME);
this.setIconBaseWithExtension("org/sleuthkit/autopsy/images/file-size-16.png"); //NON-NLS
this.dataSourceObjId = datasourceObjId;
}
public Node clone() {
return new FileSizeRootNode(this.dataSourceObjId);
}
@Override
public boolean isLeafTypeNode() {
return false;
}
@Override
public <T> T accept(DisplayableItemNodeVisitor<T> visitor) {
return visitor.visit(this);
}
@Override
protected Sheet createSheet() {
Sheet sheet = super.createSheet();
Sheet.Set sheetSet = sheet.get(Sheet.PROPERTIES);
if (sheetSet == null) {
sheetSet = Sheet.createPropertiesSet();
sheet.put(sheetSet);
}
sheetSet.put(new NodeProperty<>(NbBundle.getMessage(this.getClass(), "FileSize.createSheet.name.name"),
NbBundle.getMessage(this.getClass(), "FileSize.createSheet.name.displayName"),
NbBundle.getMessage(this.getClass(), "FileSize.createSheet.name.desc"),
NAME));
return sheet;
}
@Override
public String getItemType() {
return getClass().getName();
}
}
}

View File

@ -1,500 +0,0 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2011-2017 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.datamodel;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.List;
import java.util.Observable;
import java.util.Observer;
import java.util.Set;
import java.util.concurrent.ExecutionException;
import java.util.logging.Level;
import javax.swing.SwingWorker;
import org.openide.nodes.AbstractNode;
import org.openide.nodes.Children;
import org.openide.nodes.Sheet;
import org.openide.util.Lookup;
import org.openide.util.NbBundle;
import org.openide.util.lookup.Lookups;
import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.datamodel.AnalysisResult;
import org.sleuthkit.datamodel.AnalysisResultAdded;
import org.sleuthkit.datamodel.BlackboardArtifact;
import org.sleuthkit.datamodel.BlackboardAttribute;
import org.sleuthkit.datamodel.Content;
import org.sleuthkit.datamodel.ContentVisitor;
import org.sleuthkit.datamodel.DataArtifact;
import org.sleuthkit.datamodel.DerivedFile;
import org.sleuthkit.datamodel.Directory;
import org.sleuthkit.datamodel.File;
import org.sleuthkit.datamodel.LayoutFile;
import org.sleuthkit.datamodel.LocalFile;
import org.sleuthkit.datamodel.OsAccount;
import org.sleuthkit.datamodel.Score;
import org.sleuthkit.datamodel.SlackFile;
import org.sleuthkit.datamodel.SleuthkitCase;
import org.sleuthkit.datamodel.SleuthkitItemVisitor;
import org.sleuthkit.datamodel.TskCoreException;
/**
* File Types node support
*/
public final class FileTypes implements AutopsyVisitableItem {
private static final Logger logger = Logger.getLogger(FileTypes.class.getName());
@NbBundle.Messages("FileTypes.name.text=File Types")
private static final String NAME = Bundle.FileTypes_name_text();
/**
* Threshold used to limit db queries for child node counts. When the
* tsk_files table has more than this number of rows, we don't query for the
* child node counts, and since we don't have an accurate number we don't
* show the counts.
*/
private static final int NODE_COUNT_FILE_TABLE_THRESHOLD = 1_000_000;
/**
* Used to keep track of whether we have hit
* NODE_COUNT_FILE_TABLE_THRESHOLD. If we have, we stop querying for the
* number of rows in tsk_files, since it is already too large.
*/
private boolean showCounts = true;
private final long datasourceObjId;
FileTypes(long dsObjId) {
this.datasourceObjId = dsObjId;
updateShowCounts();
}
@Override
public <T> T accept(AutopsyItemVisitor<T> visitor) {
return visitor.visit(this);
}
long filteringDataSourceObjId() {
return this.datasourceObjId;
}
/**
* Check the db to determine if the nodes should show child counts.
*/
void updateShowCounts() {
/*
* once we have passed the threshold, we don't need to keep checking the
* number of rows in tsk_files
*/
if (showCounts) {
try {
if (Case.getCurrentCaseThrows().getSleuthkitCase().countFilesWhere("1=1") > NODE_COUNT_FILE_TABLE_THRESHOLD) { //NON-NLS
showCounts = false;
}
} catch (NoCurrentCaseException | TskCoreException tskCoreException) {
showCounts = false;
logger.log(Level.SEVERE, "Error counting files.", tskCoreException); //NON-NLS
}
}
}
/**
* Node which will contain By Mime Type and By Extension nodes.
*/
public final class FileTypesNode extends DisplayableItemNode {
FileTypesNode() {
super(new RootContentChildren(Arrays.asList(
new FileTypesByExtension(FileTypes.this),
new FileTypesByMimeType(FileTypes.this))),
Lookups.singleton(NAME));
this.setName(NAME);
this.setDisplayName(NAME);
this.setIconBaseWithExtension("org/sleuthkit/autopsy/images/file_types.png"); //NON-NLS
}
@Override
public boolean isLeafTypeNode() {
return false;
}
@Override
public <T> T accept(DisplayableItemNodeVisitor<T> visitor) {
return visitor.visit(this);
}
@Override
@NbBundle.Messages({
"FileTypes.createSheet.name.name=Name",
"FileTypes.createSheet.name.displayName=Name",
"FileTypes.createSheet.name.desc=no description"})
protected Sheet createSheet() {
Sheet sheet = super.createSheet();
Sheet.Set sheetSet = sheet.get(Sheet.PROPERTIES);
if (sheetSet == null) {
sheetSet = Sheet.createPropertiesSet();
sheet.put(sheetSet);
}
sheetSet.put(new NodeProperty<>(Bundle.FileTypes_createSheet_name_name(),
Bundle.FileTypes_createSheet_name_displayName(),
Bundle.FileTypes_createSheet_name_desc(),
NAME
));
return sheet;
}
@Override
public String getItemType() {
return getClass().getName();
}
}
static class FileNodeCreationVisitor extends ContentVisitor.Default<AbstractNode> {
FileNodeCreationVisitor() {
}
@Override
public FileNode visit(File f) {
return new FileNode(f, false);
}
@Override
public DirectoryNode visit(Directory d) {
return new DirectoryNode(d);
}
@Override
public LayoutFileNode visit(LayoutFile lf) {
return new LayoutFileNode(lf);
}
@Override
public LocalFileNode visit(DerivedFile df) {
return new LocalFileNode(df);
}
@Override
public LocalFileNode visit(LocalFile lf) {
return new LocalFileNode(lf);
}
@Override
public SlackFileNode visit(SlackFile sf) {
return new SlackFileNode(sf, false);
}
@Override
protected AbstractNode defaultVisit(Content di) {
throw new UnsupportedOperationException(NbBundle.getMessage(this.getClass(), "FileTypeChildren.exception.notSupported.msg", di.toString()));
}
}
static abstract class BGCountUpdatingNode extends DisplayableItemNode implements Observer {
private long childCount = -1;
private FileTypes typesRoot;
BGCountUpdatingNode(FileTypes typesRoot, Children children) {
this(typesRoot, children, null);
}
BGCountUpdatingNode(FileTypes typesRoot, Children children, Lookup lookup) {
super(children, lookup);
this.typesRoot = typesRoot;
}
@Override
public void update(Observable o, Object arg) {
updateDisplayName();
}
abstract String getDisplayNameBase();
/**
* Calculate the number of children of this node, possibly by querying
* the DB.
*
* @return @throws TskCoreException if there was an error querying the
* DB to calculate the number of children.
*/
abstract long calculateChildCount() throws TskCoreException;
/**
* Updates the display name of the mediaSubTypeNode to include the count
* of files which it represents.
*/
@NbBundle.Messages("FileTypes.bgCounting.placeholder= (counting...)")
void updateDisplayName() {
if (typesRoot.showCounts) {
//only show "(counting...)" the first time, otherwise it is distracting.
setDisplayName(getDisplayNameBase() + ((childCount < 0) ? Bundle.FileTypes_bgCounting_placeholder()
: (" (" + childCount + ")"))); //NON-NLS
new SwingWorker<Long, Void>() {
@Override
protected Long doInBackground() throws Exception {
return calculateChildCount();
}
@Override
protected void done() {
try {
childCount = get();
setDisplayName(getDisplayNameBase() + " (" + childCount + ")"); //NON-NLS
} catch (InterruptedException | ExecutionException ex) {
setDisplayName(getDisplayNameBase());
logger.log(Level.WARNING, "Failed to get count of files for " + getDisplayNameBase(), ex); //NON-NLS
}
}
}.execute();
} else {
setDisplayName(getDisplayNameBase() + ((childCount < 0) ? "" : (" (" + childCount + "+)"))); //NON-NLS
}
}
}
/**
* Class that is used as a key by NetBeans for creating result nodes. This
* is a wrapper around a Content object and is being put in place as an
* optimization to avoid the Content.hashCode() implementation which issues
* a database query to get the number of children when determining whether 2
* Content objects represent the same thing. TODO: This is a temporary
* solution that can hopefully be removed once we address the issue of
* determining how many children a Content has (JIRA-2823).
*/
static class FileTypesKey implements Content {
private final Content content;
public FileTypesKey(Content content) {
this.content = content;
}
@Override
public boolean equals(Object obj) {
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
final FileTypesKey other = (FileTypesKey) obj;
return this.content.getId() == other.content.getId();
}
@Override
public int hashCode() {
int hash = 7;
hash = 101 * hash + (int)(this.content.getId() ^ (this.content.getId() >>> 32));
return hash;
}
@Override
public <T> T accept(SleuthkitItemVisitor<T> v) {
return content.accept(v);
}
@Override
public int read(byte[] buf, long offset, long len) throws TskCoreException {
return content.read(buf, offset, len);
}
@Override
public void close() {
content.close();
}
@Override
public long getSize() {
return content.getSize();
}
@Override
public <T> T accept(ContentVisitor<T> v) {
return content.accept(v);
}
@Override
public String getName() {
return content.getName();
}
@Override
public String getUniquePath() throws TskCoreException {
return content.getUniquePath();
}
@Override
public long getId() {
return content.getId();
}
@Override
public Content getDataSource() throws TskCoreException {
return content.getDataSource();
}
@Override
public List<Content> getChildren() throws TskCoreException {
return content.getChildren();
}
@Override
public boolean hasChildren() throws TskCoreException {
return content.hasChildren();
}
@Override
public int getChildrenCount() throws TskCoreException {
return content.getChildrenCount();
}
@Override
public Content getParent() throws TskCoreException {
return content.getParent();
}
@Override
public List<Long> getChildrenIds() throws TskCoreException {
return content.getChildrenIds();
}
@Deprecated
@SuppressWarnings("deprecation")
@Override
public BlackboardArtifact newArtifact(int artifactTypeID) throws TskCoreException {
return content.newArtifact(artifactTypeID);
}
@Deprecated
@SuppressWarnings("deprecation")
@Override
public BlackboardArtifact newArtifact(BlackboardArtifact.ARTIFACT_TYPE type) throws TskCoreException {
return content.newArtifact(type);
}
@Override
public DataArtifact newDataArtifact(BlackboardArtifact.Type artifactType, Collection<BlackboardAttribute> attributesList, Long osAccountId) throws TskCoreException {
return content.newDataArtifact(artifactType, attributesList, osAccountId);
}
@Override
public DataArtifact newDataArtifact(BlackboardArtifact.Type artifactType, Collection<BlackboardAttribute> attributesList, Long osAccountId, long dataSourceId) throws TskCoreException {
return content.newDataArtifact(artifactType, attributesList, osAccountId, dataSourceId);
}
@Override
public DataArtifact newDataArtifact(BlackboardArtifact.Type artifactType, Collection<BlackboardAttribute> attributesList) throws TskCoreException {
return content.newDataArtifact(artifactType, attributesList);
}
@Override
public ArrayList<BlackboardArtifact> getArtifacts(String artifactTypeName) throws TskCoreException {
return content.getArtifacts(artifactTypeName);
}
@Override
public BlackboardArtifact getGenInfoArtifact() throws TskCoreException {
return content.getGenInfoArtifact();
}
@Override
public BlackboardArtifact getGenInfoArtifact(boolean create) throws TskCoreException {
return content.getGenInfoArtifact(create);
}
@Override
public ArrayList<BlackboardAttribute> getGenInfoAttributes(BlackboardAttribute.ATTRIBUTE_TYPE attr_type) throws TskCoreException {
return content.getGenInfoAttributes(attr_type);
}
@Override
public ArrayList<BlackboardArtifact> getArtifacts(int artifactTypeID) throws TskCoreException {
return content.getArtifacts(artifactTypeID);
}
@Override
public ArrayList<BlackboardArtifact> getArtifacts(BlackboardArtifact.ARTIFACT_TYPE type) throws TskCoreException {
return content.getArtifacts(type);
}
@Override
public ArrayList<BlackboardArtifact> getAllArtifacts() throws TskCoreException {
return content.getAllArtifacts();
}
@Override
public Set<String> getHashSetNames() throws TskCoreException {
return content.getHashSetNames();
}
@Override
public long getArtifactsCount(String artifactTypeName) throws TskCoreException {
return content.getArtifactsCount(artifactTypeName);
}
@Override
public long getArtifactsCount(int artifactTypeID) throws TskCoreException {
return content.getArtifactsCount(artifactTypeID);
}
@Override
public long getArtifactsCount(BlackboardArtifact.ARTIFACT_TYPE type) throws TskCoreException {
return content.getArtifactsCount(type);
}
@Override
public long getAllArtifactsCount() throws TskCoreException {
return content.getAllArtifactsCount();
}
@Override
public AnalysisResultAdded newAnalysisResult(BlackboardArtifact.Type type, Score score, String string, String string1, String string2, Collection<BlackboardAttribute> clctn) throws TskCoreException {
return content.newAnalysisResult(type, score, string, string1, string2, clctn);
}
@Override
public AnalysisResultAdded newAnalysisResult(BlackboardArtifact.Type type, Score score, String string, String string1, String string2, Collection<BlackboardAttribute> clctn, long dataSourceId) throws TskCoreException {
return content.newAnalysisResult(type, score, string, string1, string2, clctn, dataSourceId);
}
@Override
public Score getAggregateScore() throws TskCoreException {
return content.getAggregateScore();
}
@Override
public List<AnalysisResult> getAnalysisResults(BlackboardArtifact.Type type) throws TskCoreException {
return content.getAnalysisResults(type);
}
@Override
public List<AnalysisResult> getAllAnalysisResults() throws TskCoreException {
return content.getAllAnalysisResults();
}
@Override
public List<DataArtifact> getAllDataArtifacts() throws TskCoreException {
return content.getAllDataArtifacts();
}
}
}

View File

@ -1,93 +0,0 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2011-2019 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.datamodel;
import org.openide.nodes.Children;
import org.openide.nodes.Node;
import org.openide.nodes.Sheet;
import org.openide.util.NbBundle;
import org.openide.util.lookup.Lookups;
import org.sleuthkit.autopsy.mainui.nodes.ViewsTypeFactory;
/**
* Filters database results by file extension.
*/
public final class FileTypesByExtension implements AutopsyVisitableItem {
private final FileTypes typesRoot;
public FileTypesByExtension(FileTypes typesRoot) {
this.typesRoot = typesRoot;
}
@Override
public <T> T accept(AutopsyItemVisitor<T> visitor) {
return visitor.visit(this);
}
long filteringDataSourceObjId() {
return this.typesRoot.filteringDataSourceObjId();
}
public static class FileTypesByExtNode extends DisplayableItemNode {
private static final String FNAME = NbBundle.getMessage(FileTypesByExtNode.class, "FileTypesByExtNode.fname.text");
private final long dataSourceId;
FileTypesByExtNode(long dataSourceId) {
super(Children.create(new ViewsTypeFactory.FileExtFactory(dataSourceId > 0 ? dataSourceId : null), true), Lookups.singleton(FNAME));
this.dataSourceId = dataSourceId;
super.setName(FNAME);
super.setDisplayName(FNAME);
this.setIconBaseWithExtension("org/sleuthkit/autopsy/images/file_types.png"); //NON-NLS
} //NON-NLS
public Node clone() {
return new FileTypesByExtNode(dataSourceId);
}
@Override
public boolean isLeafTypeNode() {
return false;
}
@Override
public <T> T accept(DisplayableItemNodeVisitor<T> visitor) {
return visitor.visit(this);
}
@Override
protected Sheet createSheet() {
Sheet sheet = super.createSheet();
Sheet.Set sheetSet = sheet.get(Sheet.PROPERTIES);
if (sheetSet == null) {
sheetSet = Sheet.createPropertiesSet();
sheet.put(sheetSet);
}
sheetSet.put(new NodeProperty<>(NbBundle.getMessage(this.getClass(), "FileTypesByExtNode.createSheet.name.name"), NbBundle.getMessage(this.getClass(), "FileTypesByExtNode.createSheet.name.displayName"), NbBundle.getMessage(this.getClass(), "FileTypesByExtNode.createSheet.name.desc"), getDisplayName()));
return sheet;
}
@Override
public String getItemType() {
return getClass().getName();
}
}
}

View File

@ -1,107 +0,0 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2011-2019 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.datamodel;
import java.util.Observable;
import org.openide.nodes.Children;
import org.openide.nodes.Node;
import org.openide.util.NbBundle;
import org.openide.util.lookup.Lookups;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.mainui.nodes.ViewsTypeFactory.FileMimePrefixFactory;
/**
* Class which contains the Nodes for the 'By Mime Type' view located in the
* File Types view, shows all files with a mime type. Will initially be empty
* until file type identification has been performed. Contains a Property Change
* Listener which is checking for changes in IngestJobEvent Completed or
* Canceled and IngestModuleEvent Content Changed.
*/
public final class FileTypesByMimeType extends Observable implements AutopsyVisitableItem {
private final static Logger logger = Logger.getLogger(FileTypesByMimeType.class.getName());
/**
* Root of the File Types tree. Used to provide single answer to question:
* Should the child counts be shown next to the nodes?
*/
private final FileTypes typesRoot;
FileTypesByMimeType(FileTypes typesRoot) {
this.typesRoot = typesRoot;
}
@Override
public <T> T accept(AutopsyItemVisitor<T> visitor) {
return visitor.visit(this);
}
long filteringDataSourceObjId() {
return typesRoot.filteringDataSourceObjId();
}
/**
* Class which represents the root node of the "By MIME Type" tree, will
* have children of each media type present in the database or no children
* when the file detection module has not been run and MIME type is
* currently unknown.
*/
public static class ByMimeTypeNode extends DisplayableItemNode {
@NbBundle.Messages({"FileTypesByMimeType.name.text=By MIME Type"})
final String NAME = Bundle.FileTypesByMimeType_name_text();
private final long dataSourceId;
ByMimeTypeNode(long dataSourceId) {
super(Children.create(new FileMimePrefixFactory(
dataSourceId > 0
? dataSourceId
: null), true), Lookups.singleton(Bundle.FileTypesByMimeType_name_text()));
super.setName(NAME);
super.setDisplayName(NAME);
this.setIconBaseWithExtension("org/sleuthkit/autopsy/images/file_types.png");
this.dataSourceId = dataSourceId;
}
public Node clone() {
return new ByMimeTypeNode(dataSourceId);
}
@Override
public boolean isLeafTypeNode() {
return false;
}
@Override
public <T> T accept(DisplayableItemNodeVisitor<T> visitor) {
return visitor.visit(this);
}
@Override
public String getItemType() {
return getClass().getName();
}
boolean isEmpty() {
return this.getChildren().getNodesCount(true) <= 0;
}
}
}

View File

@ -24,8 +24,6 @@ import org.openide.nodes.AbstractNode;
import org.openide.nodes.Children;
import org.openide.nodes.Node;
import org.openide.util.NbBundle;
import org.sleuthkit.autopsy.datamodel.FileTypesByExtension.FileTypesByExtNode;
import org.sleuthkit.autopsy.datamodel.FileTypesByMimeType.ByMimeTypeNode;
import org.sleuthkit.autopsy.datamodel.accounts.Accounts;
import org.sleuthkit.datamodel.SleuthkitVisitableItem;
@ -85,26 +83,11 @@ public class RootContentChildren extends Children.Keys<Object> {
*/
static class CreateAutopsyNodeVisitor extends AutopsyItemVisitor.Default<AbstractNode> {
@Override
public AbstractNode visit(FileTypesByExtension sf) {
return new FileTypesByExtNode(sf.filteringDataSourceObjId());
}
@Override
public AbstractNode visit(RecentFiles rf) {
return new RecentFilesNode(rf.getSleuthkitCase());
}
@Override
public AbstractNode visit(DeletedContent dc) {
return new DeletedContent.DeletedContentsNode(dc.getSleuthkitCase(), dc.filteringDataSourceObjId());
}
@Override
public AbstractNode visit(FileSize dc) {
return new FileSize.FileSizeRootNode(dc.filteringDataSourceObjId());
}
@Override
public AbstractNode visit(KeywordHits kh) {
return kh.new RootNode();
@ -142,12 +125,7 @@ public class RootContentChildren extends Children.Keys<Object> {
@Override
public AbstractNode visit(Views v) {
return new ViewsNode(v.getSleuthkitCase(), v.filteringDataSourceObjId());
}
@Override
public AbstractNode visit(FileTypes ft) {
return ft.new FileTypesNode();
return new ViewsNode(v.filteringDataSourceObjId());
}
@Override
@ -172,11 +150,6 @@ public class RootContentChildren extends Children.Keys<Object> {
"AbstractContentChildren.createAutopsyNodeVisitor.exception.noNodeMsg"));
}
@Override
public AbstractNode visit(FileTypesByMimeType ftByMimeTypeItem) {
return new ByMimeTypeNode(ftByMimeTypeItem.filteringDataSourceObjId());
}
@Override
public AbstractNode visit(PersonGrouping personGrouping) {
return new PersonNode(personGrouping.getPerson());

View File

@ -18,11 +18,11 @@
*/
package org.sleuthkit.autopsy.datamodel;
import java.util.Arrays;
import org.openide.nodes.Node;
import org.openide.nodes.Sheet;
import org.openide.util.NbBundle;
import org.openide.util.lookup.Lookups;
import org.sleuthkit.datamodel.SleuthkitCase;
import org.sleuthkit.autopsy.mainui.nodes.ViewsTypeFactory.ViewsChildren;
/**
*
@ -32,30 +32,25 @@ import org.sleuthkit.datamodel.SleuthkitCase;
public class ViewsNode extends DisplayableItemNode {
public static final String NAME = NbBundle.getMessage(ViewsNode.class, "ViewsNode.name.text");
private final long dsObjId;
public ViewsNode(SleuthkitCase sleuthkitCase) {
this(sleuthkitCase, 0);
public ViewsNode() {
this(0);
}
public ViewsNode(SleuthkitCase sleuthkitCase, long dsObjId) {
super(
new RootContentChildren(Arrays.asList(
new FileTypes(dsObjId),
// June '15: Recent Files was removed because it was not useful w/out filtering
// add it back in if we can filter the results to a more managable size.
// new RecentFiles(sleuthkitCase),
new DeletedContent(sleuthkitCase, dsObjId),
new FileSize(dsObjId))
),
Lookups.singleton(NAME)
);
public ViewsNode(long dsObjId) {
super(new ViewsChildren(dsObjId > 0 ? dsObjId : null), Lookups.singleton(NAME));
setName(NAME);
setDisplayName(NAME);
this.setIconBaseWithExtension("org/sleuthkit/autopsy/images/views.png"); //NON-NLS
this.dsObjId = dsObjId;
}
public Node clone() {
return new ViewsNode(dsObjId);
}
@Override
public boolean isLeafTypeNode() {
return false;

View File

@ -41,7 +41,6 @@ import org.sleuthkit.autopsy.datamodel.DirectoryNode;
import org.sleuthkit.autopsy.datamodel.DisplayableItemNode;
import org.sleuthkit.autopsy.datamodel.DisplayableItemNodeVisitor;
import org.sleuthkit.autopsy.datamodel.FileNode;
import org.sleuthkit.autopsy.datamodel.FileTypes.FileTypesNode;
import org.sleuthkit.autopsy.commonpropertiessearch.InstanceCountNode;
import org.sleuthkit.autopsy.commonpropertiessearch.InstanceCaseNode;
import org.sleuthkit.autopsy.commonpropertiessearch.InstanceDataSourceNode;
@ -253,11 +252,6 @@ public class DataResultFilterNode extends FilterNode {
return null;
}
@Override
public List<Action> visit(FileTypesNode fileTypes) {
return defaultVisit(fileTypes);
}
@Override
protected List<Action> defaultVisit(DisplayableItemNode ditem) {
return Arrays.asList(ditem.getActions(true));
@ -361,11 +355,6 @@ public class DataResultFilterNode extends FilterNode {
return openChild(c);
}
@Override
public AbstractAction visit(FileTypesNode fileTypes) {
return openChild(fileTypes);
}
/**
* Tell the originating ExplorerManager to display the given
* dataModelNode.

View File

@ -33,14 +33,11 @@ import org.sleuthkit.autopsy.datamodel.DataArtifacts;
import org.sleuthkit.autopsy.datamodel.DisplayableItemNode;
import org.sleuthkit.autopsy.datamodel.DisplayableItemNodeVisitor;
import org.sleuthkit.autopsy.datamodel.FileNode;
import org.sleuthkit.autopsy.datamodel.FileSize;
import org.sleuthkit.autopsy.datamodel.FileTypes.FileTypesNode;
import org.sleuthkit.autopsy.datamodel.FileTypesByExtension.FileTypesByExtNode;
import org.sleuthkit.autopsy.datamodel.FileTypesByMimeType;
import org.sleuthkit.autopsy.datamodel.LayoutFileNode;
import org.sleuthkit.autopsy.datamodel.LocalFileNode;
import org.sleuthkit.autopsy.datamodel.LocalDirectoryNode;
import org.sleuthkit.autopsy.datamodel.SlackFileNode;
import org.sleuthkit.autopsy.datamodel.ViewsNode;
import org.sleuthkit.autopsy.datamodel.VirtualDirectoryNode;
import org.sleuthkit.autopsy.datamodel.VolumeNode;
import org.sleuthkit.autopsy.mainui.nodes.FileSystemFactory;
@ -95,14 +92,8 @@ class DirectoryTreeFilterChildren extends FilterNode.Children {
} else if (origNode instanceof AnalysisResults.RootNode) {
Node cloned = ((AnalysisResults.RootNode) origNode).clone();
return new Node[]{cloned};
} else if (origNode instanceof FileTypesByExtNode) {
Node cloned = ((FileTypesByExtNode) origNode).clone();
return new Node[]{cloned};
} else if (origNode instanceof FileTypesByMimeType.ByMimeTypeNode) {
Node cloned = ((FileTypesByMimeType.ByMimeTypeNode) origNode).clone();
return new Node[]{cloned};
} else if (origNode instanceof FileSize.FileSizeRootNode) {
Node cloned = ((FileSize.FileSizeRootNode) origNode).clone();
} else if (origNode instanceof ViewsNode) {
Node cloned = ((ViewsNode) origNode).clone();
return new Node[]{cloned};
} else if (origNode instanceof FileSystemFactory.FileSystemTreeNode) {
Node cloned = ((FileSystemFactory.FileSystemTreeNode) origNode).clone();
@ -269,11 +260,6 @@ class DirectoryTreeFilterChildren extends FilterNode.Children {
return visitDeep(ldn);
}
@Override
public Boolean visit(FileTypesNode ft) {
return defaultVisit(ft);
}
@Override
public Boolean visit(BlackboardArtifactNode bbafn) {
// Only show Message arttifacts with children
@ -332,11 +318,6 @@ class DirectoryTreeFilterChildren extends FilterNode.Children {
return true;
}
@Override
public Boolean visit(FileTypesNode fileTypes) {
return defaultVisit(fileTypes);
}
@Override
public Boolean visit(BlackboardArtifactNode bbafn) {

View File

@ -81,7 +81,6 @@ import org.sleuthkit.autopsy.datamodel.CreditCards;
import org.sleuthkit.autopsy.datamodel.DisplayableItemNode;
import org.sleuthkit.autopsy.datamodel.EmailExtracted;
import org.sleuthkit.autopsy.datamodel.EmptyNode;
import org.sleuthkit.autopsy.datamodel.FileTypesByMimeType;
import org.sleuthkit.autopsy.datamodel.KeywordHits;
import org.sleuthkit.autopsy.datamodel.AutopsyTreeChildFactory;
import org.sleuthkit.autopsy.datamodel.DataArtifacts;
@ -92,6 +91,9 @@ import org.sleuthkit.autopsy.datamodel.ViewsNode;
import org.sleuthkit.autopsy.datamodel.accounts.Accounts;
import org.sleuthkit.autopsy.datamodel.accounts.BINRange;
import org.sleuthkit.autopsy.corecomponents.SelectionResponder;
import org.sleuthkit.autopsy.mainui.nodes.ChildNodeSelectionInfo.BlackboardArtifactNodeSelectionInfo;
import org.sleuthkit.autopsy.mainui.nodes.TreeNode;
import org.sleuthkit.autopsy.mainui.nodes.ViewsTypeFactory.MimeParentNode;
import org.sleuthkit.datamodel.Account;
import org.sleuthkit.datamodel.BlackboardArtifact;
import org.sleuthkit.datamodel.BlackboardArtifact.Category;
@ -880,7 +882,7 @@ public final class DirectoryTreeTopComponent extends TopComponent implements Dat
Node drfn = new DataResultFilterNode(originNode, DirectoryTreeTopComponent.this.em);
if (originNode instanceof SelectionResponder) {
((SelectionResponder) originNode).respondSelection(dataResult);
} else if (originNode instanceof FileTypesByMimeType.ByMimeTypeNode &&
} else if (originNode instanceof MimeParentNode &&
originNode.getChildren().getNodesCount(true) <= 0) {
//Special case for when File Type Identification has not yet been run and
//there are no mime types to populate Files by Mime Type Tree
@ -1394,8 +1396,13 @@ public final class DirectoryTreeTopComponent extends TopComponent implements Dat
return;
}
DisplayableItemNode undecoratedParentNode = (DisplayableItemNode) ((DirectoryTreeFilterNode) treeNode).getOriginal();
undecoratedParentNode.setChildNodeSelectionInfo(new ArtifactNodeSelectionInfo(art));
// DisplayableItemNode undecoratedParentNode = (DisplayableItemNode) ((DirectoryTreeFilterNode) treeNode).getOriginal();
// undecoratedParentNode.setChildNodeSelectionInfo(new ArtifactNodeSelectionInfo(art));
if(treeNode instanceof TreeNode) {
((TreeNode)treeNode).setNodeSelectionInfo(new BlackboardArtifactNodeSelectionInfo(art));
}
getTree().expandNode(treeNode);
if (this.getSelectedNode().equals(treeNode)) {
this.setDirectoryListingActive();

View File

@ -50,7 +50,8 @@ import org.sleuthkit.autopsy.datamodel.DataSourcesNode;
import org.sleuthkit.autopsy.datamodel.DataSourceFilesNode;
import org.sleuthkit.autopsy.datamodel.PersonNode;
import org.sleuthkit.autopsy.datamodel.RootContentChildren;
import org.sleuthkit.autopsy.mainui.nodes.NodeSelectionInfo.ContentNodeSelectionInfo;
import org.sleuthkit.autopsy.mainui.nodes.ChildNodeSelectionInfo.ContentNodeSelectionInfo;
import org.sleuthkit.autopsy.mainui.nodes.TreeNode;
import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.BlackboardArtifact;
import org.sleuthkit.datamodel.Content;
@ -345,8 +346,8 @@ public class ViewContextAction extends AbstractAction {
}
}
if(parentTreeViewNode instanceof ContentNodeSelectionInfo) {
((ContentNodeSelectionInfo) parentTreeViewNode).setChildIdToSelect(childIdToSelect);
if(parentTreeViewNode instanceof TreeNode) {
((TreeNode) parentTreeViewNode).setNodeSelectionInfo(new ContentNodeSelectionInfo(childIdToSelect));
}
TreeView treeView = treeViewTopComponent.getTree();

View File

@ -84,6 +84,7 @@ IngestProgressSnapshotPanel.SnapshotsTableModel.colNames.jobID=Job ID
IngestJobTableModel.colName.jobID=Job ID
IngestJobTableModel.colName.dataSource=Data Source
IngestJobTableModel.colName.start=Start
IngestJobTableModel.colName.tier=Tier
IngestJobTableModel.colName.numProcessed=Files Processed
IngestJobTableModel.colName.filesPerSec=Files/Sec
IngestJobTableModel.colName.inProgress=In Progress

View File

@ -2,6 +2,10 @@ CTL_RunIngestAction=Run Ingest
FileIngestPipeline_SaveResults_Activity=Saving Results
# {0} - data source name
IngestJob_progress_analysisResultIngest_displayName=Analyzing analysis results from {0}
# {0} - tier number
# {1} - job state modifer
IngestJobExecutor_progress_snapshot_currentTier=Tier {0} {1}
IngestJobExecutor_progress_snapshot_currentTier_shutDown_modifier=shut down
IngestJobSettingsPanel.IngestModulesTableRenderer.info.message=A previous version of this ingest module has been run before on this data source.
IngestJobSettingsPanel.IngestModulesTableRenderer.warning.message=This ingest module has been run before on this data source.
IngestJobSettingsPanel.noPerRunSettings=The selected module has no per-run settings.
@ -102,6 +106,7 @@ IngestProgressSnapshotPanel.SnapshotsTableModel.colNames.jobID=Job ID
IngestJobTableModel.colName.jobID=Job ID
IngestJobTableModel.colName.dataSource=Data Source
IngestJobTableModel.colName.start=Start
IngestJobTableModel.colName.tier=Tier
IngestJobTableModel.colName.numProcessed=Files Processed
IngestJobTableModel.colName.filesPerSec=Files/Sec
IngestJobTableModel.colName.inProgress=In Progress

View File

@ -56,7 +56,7 @@ public class DataSourceIngestModuleProgress {
* @param workUnits Number of work units performed so far by the module.
*/
public void progress(int workUnits) {
ingestJobExecutor.advanceDataSourceIngestProgressBar("", workUnits);
ingestJobExecutor.updateDataSourceIngestProgressBar("", workUnits);
}
/**
@ -65,7 +65,7 @@ public class DataSourceIngestModuleProgress {
* @param message Message to display
*/
public void progress(String message) {
ingestJobExecutor.advanceDataSourceIngestProgressBar(message);
ingestJobExecutor.updateDataSourceIngestProgressBarText(message);
}
/**
@ -76,7 +76,7 @@ public class DataSourceIngestModuleProgress {
* @param workUnits Number of work units performed so far by the module.
*/
public void progress(String currentTask, int workUnits) {
ingestJobExecutor.advanceDataSourceIngestProgressBar(currentTask, workUnits);
ingestJobExecutor.updateDataSourceIngestProgressBar(currentTask, workUnits);
}
}

View File

@ -88,7 +88,7 @@ final class DataSourceIngestPipeline extends IngestPipeline<DataSourceIngestTask
void process(IngestJobExecutor ingestJobExecutor, DataSourceIngestTask task) throws IngestModuleException {
Content dataSource = task.getDataSource();
String progressBarDisplayName = NbBundle.getMessage(this.getClass(), "IngestJob.progress.dataSourceIngest.displayName", getDisplayName(), dataSource.getName());
ingestJobExecutor.updateDataSourceIngestProgressBarDisplayName(progressBarDisplayName);
ingestJobExecutor.changeDataSourceIngestProgressBarTitle(progressBarDisplayName);
ingestJobExecutor.switchDataSourceIngestProgressBarToIndeterminate();
ingestManager.setIngestTaskProgress(task, getDisplayName());
logger.log(Level.INFO, "{0} analysis of {1} starting", new Object[]{getDisplayName(), dataSource.getName()}); //NON-NLS

View File

@ -25,11 +25,12 @@ import java.util.List;
import java.util.concurrent.atomic.AtomicLong;
import java.util.logging.Level;
import org.openide.util.NbBundle;
import org.python.google.common.collect.ImmutableList;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.AnalysisResult;
import org.sleuthkit.datamodel.Content;
import org.sleuthkit.datamodel.DataArtifact;
import org.sleuthkit.datamodel.DataSource;
/**
* Analyzes a data sources using a set of ingest modules specified via ingest
@ -71,7 +72,7 @@ public final class IngestJob {
private static final Logger logger = Logger.getLogger(IngestJob.class.getName());
private final static AtomicLong nextId = new AtomicLong(0L);
private final long id;
private final Content dataSource;
private final DataSource dataSource;
private final List<AbstractFile> files = new ArrayList<>();
private final Mode ingestMode;
private final IngestJobSettings settings;
@ -88,7 +89,7 @@ public final class IngestJob {
* analyzed.
* @param settings The ingest job settings.
*/
IngestJob(Content dataSource, List<AbstractFile> files, IngestJobSettings settings) {
IngestJob(DataSource dataSource, List<AbstractFile> files, IngestJobSettings settings) {
this(dataSource, Mode.BATCH, settings);
this.files.addAll(files);
}
@ -102,7 +103,7 @@ public final class IngestJob {
* @param ingestMode The ingest job mode.
* @param settings The ingest job settings.
*/
IngestJob(Content dataSource, Mode ingestMode, IngestJobSettings settings) {
IngestJob(DataSource dataSource, Mode ingestMode, IngestJobSettings settings) {
id = IngestJob.nextId.getAndIncrement();
this.dataSource = dataSource;
this.settings = settings;
@ -125,10 +126,30 @@ public final class IngestJob {
*
* @return The data source.
*/
Content getDataSource() {
DataSource getDataSource() {
return dataSource;
}
/**
* Gets the subset of files from the data source to be analyzed for this
* job.
*
* @return The subset of files or an empty list if all the files in the data
* source shuld be analyzed.
*/
List<AbstractFile> getFiles() {
return ImmutableList.copyOf(files);
}
/**
* Gets the ingest job settings.
*
* @return The settings.
*/
IngestJobSettings getSettings() {
return settings;
}
/**
* Checks to see if this ingest job has at least one non-empty ingest module
* pipeline.
@ -198,6 +219,10 @@ public final class IngestJob {
* scheduling the ingest tasks that make up the job.
*
* @return A collection of ingest module start up errors, empty on success.
*
* @throws InterruptedException The exception is thrown if the current
* thread is interrupted during the start up
* process.
*/
synchronized List<IngestModuleError> start() throws InterruptedException {
if (ingestModuleExecutor != null) {
@ -205,7 +230,7 @@ public final class IngestJob {
return Collections.emptyList();
}
ingestModuleExecutor = new IngestJobExecutor(this, dataSource, files, settings);
ingestModuleExecutor = new IngestJobExecutor(this);
List<IngestModuleError> errors = new ArrayList<>();
errors.addAll(ingestModuleExecutor.startUp());
if (errors.isEmpty()) {
@ -255,10 +280,10 @@ public final class IngestJob {
*
* @return The snapshot, will be null if the job is not started yet.
*/
Snapshot getDiagnosticStatsSnapshot() {
Snapshot snapshot = null;
IngestJobProgressSnapshot getDiagnosticStatsSnapshot() {
IngestJobProgressSnapshot snapshot = null;
if (ingestModuleExecutor != null) {
snapshot = ingestModuleExecutor.getDiagnosticStatsSnapshot(true);
snapshot = ingestModuleExecutor.getIngestJobProgressSnapshot(true);
}
return snapshot;
}
@ -353,7 +378,7 @@ public final class IngestJob {
*/
public final class DataSourceProcessingSnapshot {
private final Snapshot snapshot;
private final IngestJobProgressSnapshot snapshot;
/**
* Constructs a snapshot of some basic diagnostic statistics for an
@ -362,7 +387,7 @@ public final class IngestJob {
* of multiple data sources, each of which had its own basic
* diagnostic statistics snapshot.
*/
private DataSourceProcessingSnapshot(Snapshot snapshot) {
private DataSourceProcessingSnapshot(IngestJobProgressSnapshot snapshot) {
this.snapshot = snapshot;
}
@ -445,7 +470,7 @@ public final class IngestJob {
* stats part of the snapshot.
*/
private ProgressSnapshot(boolean includeIngestTasksSnapshot) {
Snapshot snapshot = ingestModuleExecutor.getDiagnosticStatsSnapshot(includeIngestTasksSnapshot);
IngestJobProgressSnapshot snapshot = ingestModuleExecutor.getIngestJobProgressSnapshot(includeIngestTasksSnapshot);
dataSourceProcessingSnapshot = new DataSourceProcessingSnapshot(snapshot);
jobCancellationRequested = IngestJob.this.isCancelled();
jobCancellationReason = IngestJob.this.getCancellationReason();

File diff suppressed because it is too large Load Diff

View File

@ -24,9 +24,9 @@ import java.util.Date;
import java.util.List;
/**
* Stores basic diagnostic statistics for an ingest job.
* A snapshot of the progress of an ingest job.
*/
public final class Snapshot implements Serializable {
public final class IngestJobProgressSnapshot implements Serializable {
private static final long serialVersionUID = 1L;
@ -34,6 +34,7 @@ public final class Snapshot implements Serializable {
private final long jobId;
private final long jobStartTime;
private final long snapShotTime;
private final String currentIngestModuleTier;
transient private final DataSourceIngestPipeline.DataSourcePipelineModule dataSourceLevelIngestModule;
private final boolean fileIngestRunning;
private final Date fileIngestStartTime;
@ -45,25 +46,33 @@ public final class Snapshot implements Serializable {
transient private final List<String> cancelledDataSourceModules;
/**
* Constructs an object to store basic diagnostic statistics for an ingest
* job.
* Constructs a snapshot of the progress of an ingest job.
*/
Snapshot(String dataSourceName, long jobId, long jobStartTime, DataSourceIngestPipeline.DataSourcePipelineModule dataSourceIngestModule,
boolean fileIngestRunning, Date fileIngestStartTime,
boolean jobCancelled, IngestJob.CancellationReason cancellationReason, List<String> cancelledModules,
long processedFiles, long estimatedFilesToProcess,
long snapshotTime, IngestTasksScheduler.IngestTasksSnapshot tasksSnapshot) {
IngestJobProgressSnapshot(
String dataSourceName,
long jobId,
long jobStartTime,
String currentIngestModuleTier,
DataSourceIngestPipeline.DataSourcePipelineModule dataSourceIngestModule,
boolean fileIngestRunning,
Date fileIngestStartTime,
boolean jobCancelled,
IngestJob.CancellationReason cancellationReason,
List<String> cancelledModules,
long processedFiles,
long estimatedFilesToProcess,
long snapshotTime,
IngestTasksScheduler.IngestTasksSnapshot tasksSnapshot) {
this.dataSource = dataSourceName;
this.jobId = jobId;
this.jobStartTime = jobStartTime;
this.currentIngestModuleTier = currentIngestModuleTier;
this.dataSourceLevelIngestModule = dataSourceIngestModule;
this.fileIngestRunning = fileIngestRunning;
this.fileIngestStartTime = fileIngestStartTime;
this.jobCancelled = jobCancelled;
this.jobCancellationReason = cancellationReason;
this.cancelledDataSourceModules = cancelledModules;
this.processedFiles = processedFiles;
this.estimatedFilesToProcess = estimatedFilesToProcess;
this.snapShotTime = snapshotTime;
@ -71,7 +80,7 @@ public final class Snapshot implements Serializable {
}
/**
* Gets time these statistics were collected.
* Gets the time this snapshot was taken.
*
* @return The statistics collection time as number of milliseconds since
* January 1, 1970, 00:00:00 GMT.
@ -81,18 +90,16 @@ public final class Snapshot implements Serializable {
}
/**
* Gets the name of the data source associated with the ingest job that is
* the subject of this snapshot.
* Gets the name of the data source for the ingest job.
*
* @return A data source name string.
* @return The data source name.
*/
String getDataSource() {
return dataSource;
}
/**
* Gets the identifier of the ingest job that is the subject of this
* snapshot.
* Gets the identifier of the ingest job.
*
* @return The ingest job id.
*/
@ -110,68 +117,82 @@ public final class Snapshot implements Serializable {
return jobStartTime;
}
/**
* Get the current ingest module tier.
*
* @return The current ingest module tier.
*/
String getCurrentIngestModuleTier() {
return currentIngestModuleTier;
}
/**
* Gets a handle to the currently running data source level ingest module at
* the time this snapshot was taken.
*
* @return The data source ingest module handle, may be null.
*/
DataSourceIngestPipeline.DataSourcePipelineModule getDataSourceLevelIngestModule() {
return this.dataSourceLevelIngestModule;
}
/**
* Gets whether or not file level analysis was in progress at the time this
* snapshot was taken.
*
* @return True or false.
*/
boolean getFileIngestIsRunning() {
return this.fileIngestRunning;
}
/**
* Gets the time that file level analysis was started.
*
* @return The start time.
*/
// RJCTODO: How is this affected by ingest module tiers?
Date getFileIngestStartTime() {
return new Date(fileIngestStartTime.getTime());
}
/**
* Gets files per second throughput since the ingest job that is the subject
* of this snapshot started.
* Gets files per second throughput since the ingest job started.
*
* @return Files processed per second (approximate).
*/
double getSpeed() {
// RJCTODO: How is this affected by ingest module tiers?
double getFilesProcessedPerSec() {
return (double) processedFiles / ((snapShotTime - jobStartTime) / 1000);
}
/**
* Gets the number of files processed for the job so far.
* Gets the total number of files processed so far.
*
* @return The number of processed files.
*/
// RJCTODO: How is this affected by ingest module tiers?
long getFilesProcessed() {
return processedFiles;
}
/**
* Gets an estimate of the files that still need to be processed for this
* job.
* Gets an estimate of the total number files that need to be processed.
*
* @return The estimate.
*/
// RJCTODO: How is this affected by ingest module tiers?
long getFilesEstimated() {
return estimatedFilesToProcess;
}
long getRootQueueSize() {
if (null == this.tasksSnapshot) {
return 0;
}
return this.tasksSnapshot.getRootQueueSize();
}
long getDirQueueSize() {
if (null == this.tasksSnapshot) {
return 0;
}
return this.tasksSnapshot.getDirQueueSize();
}
long getFileQueueSize() {
if (null == this.tasksSnapshot) {
return 0;
}
return this.tasksSnapshot.getFileQueueSize();
}
/**
* Gets the number of data source level ingest tasks for the ingest job that
* are currently in the data source ingest thread queue of the ingest tasks
* scheduler.
*
* @return The number of data source ingest tasks.
*/
long getDsQueueSize() {
if (null == this.tasksSnapshot) {
return 0;
@ -179,6 +200,38 @@ public final class Snapshot implements Serializable {
return this.tasksSnapshot.getDataSourceQueueSize();
}
/**
* Gets the number of file ingest tasks for the ingest job that are
* currently in the root level queue of the ingest tasks scheduler.
*
* @return The number of file ingest tasks.
*/
long getRootQueueSize() {
if (null == this.tasksSnapshot) {
return 0;
}
return this.tasksSnapshot.getRootQueueSize();
}
/**
* Gets the number of file ingest tasks for the ingest job that are
* currently in the directory level queue of the ingest tasks scheduler.
*
* @return The number of file ingest tasks.
*/
long getDirQueueSize() {
if (null == this.tasksSnapshot) {
return 0;
}
return this.tasksSnapshot.getDirQueueSize();
}
/**
* Gets the number of file ingest tasks for the ingest job that are
* currently in the streamed files queue of the ingest tasks scheduler.
*
* @return The number of file ingest tasks.
*/
long getStreamingQueueSize() {
if (null == this.tasksSnapshot) {
return 0;
@ -186,6 +239,53 @@ public final class Snapshot implements Serializable {
return this.tasksSnapshot.getStreamedFilesQueueSize();
}
/**
* Gets the number of file ingest tasks for the ingest job that are
* currently in the file ingest threads queue of the ingest tasks scheduler.
*
* @return The number of file ingest tasks.
*/
long getFileQueueSize() {
if (null == this.tasksSnapshot) {
return 0;
}
return this.tasksSnapshot.getFileQueueSize();
}
/**
* Gets the number of data artifact ingest tasks for the ingest job that are
* currently in the data artifact ingest thread queue of the ingest tasks
* scheduler.
*
* @return The number of data artifact ingest tasks.
*/
long getDataArtifactTasksQueueSize() {
if (tasksSnapshot == null) {
return 0;
}
return tasksSnapshot.getArtifactsQueueSize();
}
/**
* Gets the number of analysis result ingest tasks for the ingest job that
* are currently in the analysis result ingest thread queue of the ingest
* tasks scheduler.
*
* @return The number of analysis result ingest tasks.
*/
long getAnalysisResultTasksQueueSize() {
if (tasksSnapshot == null) {
return 0;
}
return tasksSnapshot.getResultsQueueSize();
}
/**
* Gets the number of ingest tasks for the ingest job that are currently in
* the tasks in progress list of the ingest tasks scheduler.
*
* @return The number of file ingest tasks.
*/
long getRunningListSize() {
if (null == this.tasksSnapshot) {
return 0;
@ -193,26 +293,17 @@ public final class Snapshot implements Serializable {
return this.tasksSnapshot.getProgressListSize();
}
long getArtifactTasksQueueSize() {
if (tasksSnapshot == null) {
return 0;
}
return tasksSnapshot.getArtifactsQueueSize();
}
long getResultTasksQueueSize() {
if (tasksSnapshot == null) {
return 0;
}
return tasksSnapshot.getResultsQueueSize();
}
/**
* Gets whether or not the job has been cancelled.
*
* @return True or false.
*/
boolean isCancelled() {
return this.jobCancelled;
}
/**
* Gets the reason this job was cancelled.
* Gets the reason the job was cancelled.
*
* @return The cancellation reason, may be not cancelled.
*/
@ -222,7 +313,7 @@ public final class Snapshot implements Serializable {
/**
* Gets a list of the display names of any canceled data source level ingest
* modules
* modules.
*
* @return A list of canceled data source level ingest module display names,
* possibly empty.

View File

@ -359,21 +359,30 @@ public class IngestManager implements IngestProgressSnapshotProvider {
* artifact will be analyzed. It also might be analyzed by a
* subsequent ingest job for the data source. This is an
* acceptable edge case.
*
* 5. The user can manually run ad hoc keyword searches,
* which post TSK_KEYWORD_HIT analysis results. Ingest
* of that data source might be running, in which case the analysis
* results will be analyzed. They also might be analyzed by a
* subsequent ingest job for the data source. This is an
* acceptable edge case.
*/
DataArtifact dataArtifact = newDataArtifacts.get(0);
try {
Content artifactDataSource = dataArtifact.getDataSource();
synchronized (ingestJobsById) {
for (IngestJob job : ingestJobsById.values()) {
Content dataSource = job.getDataSource();
if (artifactDataSource.getId() == dataSource.getId()) {
ingestJob = job;
break;
BlackboardArtifact artifact = newArtifacts.iterator().next();
if (artifact != null) {
try {
Content artifactDataSource = artifact.getDataSource();
synchronized (ingestJobsById) {
for (IngestJob job : ingestJobsById.values()) {
Content dataSource = job.getDataSource();
if (artifactDataSource.getId() == dataSource.getId()) {
ingestJob = job;
break;
}
}
}
} catch (TskCoreException ex) {
logger.log(Level.SEVERE, String.format("Failed to get data source for blackboard artifact (object ID = %d)", artifact.getId()), ex); //NON-NLS
}
} catch (TskCoreException ex) {
logger.log(Level.SEVERE, String.format("Failed to get data source for data artifact (object ID = %d)", dataArtifact.getId()), ex); //NON-NLS
}
}
if (ingestJob != null) {
@ -431,6 +440,9 @@ public class IngestManager implements IngestProgressSnapshotProvider {
* @throws TskCoreException if there was an error starting the ingest job.
*/
public IngestStream openIngestStream(DataSource dataSource, IngestJobSettings settings) throws TskCoreException {
if (!(dataSource instanceof DataSource)) {
throw new IllegalArgumentException("dataSource argument does not implement the DataSource interface"); //NON-NLS
}
IngestJob job = new IngestJob(dataSource, IngestJob.Mode.STREAMING, settings);
IngestJobInputStream stream = new IngestJobInputStream(job);
if (stream.getIngestJobStartResult().getJob() != null) {
@ -480,8 +492,11 @@ public class IngestManager implements IngestProgressSnapshotProvider {
* @param settings The settings for the ingest job.
*/
public void queueIngestJob(Content dataSource, List<AbstractFile> files, IngestJobSettings settings) {
if (!(dataSource instanceof DataSource)) {
throw new IllegalArgumentException("dataSource argument does not implement the DataSource interface"); //NON-NLS
}
if (caseIsOpen) {
IngestJob job = new IngestJob(dataSource, files, settings);
IngestJob job = new IngestJob((DataSource) dataSource, files, settings);
if (job.hasIngestPipeline()) {
long taskId = nextIngestManagerTaskId.incrementAndGet();
Future<Void> task = startIngestJobsExecutor.submit(new StartIngestJobTask(taskId, job));
@ -507,9 +522,17 @@ public class IngestManager implements IngestProgressSnapshotProvider {
* attempting to start the ingest jobs.
*/
public IngestJobStartResult beginIngestJob(Collection<Content> dataSources, IngestJobSettings settings) {
List<DataSource> verifiedDataSources = new ArrayList<>();
for (Content content : dataSources) {
if (!(content instanceof DataSource)) {
throw new IllegalArgumentException("Content object in dataSources argument does not implement the DataSource interface"); //NON-NLS
}
DataSource verifiedDataSource = (DataSource) content;
verifiedDataSources.add(verifiedDataSource);
}
IngestJobStartResult startResult = null;
if (caseIsOpen) {
for (Content dataSource : dataSources) {
for (DataSource dataSource : verifiedDataSources) {
List<IngestJob> startedJobs = new ArrayList<>();
IngestJob job = new IngestJob(dataSource, IngestJob.Mode.BATCH, settings);
if (job.hasIngestPipeline()) {
@ -599,7 +622,7 @@ public class IngestManager implements IngestProgressSnapshotProvider {
synchronized (ingestJobsById) {
ingestJobsById.put(job.getId(), job);
}
IngestManager.logger.log(Level.INFO, "Starting ingest job {0}", job.getId()); //NON-NLS
IngestManager.logger.log(Level.INFO, String.format("Starting ingest job %d at %s", job.getId(), new Date().getTime())); //NON-NLS
try {
errors = job.start();
} catch (InterruptedException ex) {
@ -647,10 +670,10 @@ public class IngestManager implements IngestProgressSnapshotProvider {
ingestJobsById.remove(jobId);
}
if (!job.isCancelled()) {
IngestManager.logger.log(Level.INFO, "Ingest job {0} completed", jobId); //NON-NLS
IngestManager.logger.log(Level.INFO, String.format("Ingest job %d completed at %s", job.getId(), new Date().getTime())); //NON-NLS
fireIngestJobCompleted(jobId);
} else {
IngestManager.logger.log(Level.INFO, "Ingest job {0} cancelled", jobId); //NON-NLS
IngestManager.logger.log(Level.INFO, String.format("Ingest job %d cancelled at %s", job.getId(), new Date().getTime())); //NON-NLS
fireIngestJobCancelled(jobId);
}
}
@ -993,11 +1016,11 @@ public class IngestManager implements IngestProgressSnapshotProvider {
* @return A list of ingest job state snapshots.
*/
@Override
public List<Snapshot> getIngestJobSnapshots() {
List<Snapshot> snapShots = new ArrayList<>();
public List<IngestJobProgressSnapshot> getIngestJobSnapshots() {
List<IngestJobProgressSnapshot> snapShots = new ArrayList<>();
synchronized (ingestJobsById) {
ingestJobsById.values().forEach((job) -> {
Snapshot snapshot = job.getDiagnosticStatsSnapshot();
IngestJobProgressSnapshot snapshot = job.getDiagnosticStatsSnapshot();
if (snapshot != null) {
snapShots.add(snapshot);
}

View File

@ -0,0 +1,176 @@
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package org.sleuthkit.autopsy.ingest;
import java.util.ArrayList;
import java.util.List;
import java.util.Optional;
import java.util.concurrent.LinkedBlockingQueue;
import org.python.google.common.collect.ImmutableList;
/**
* A set of ingest module pipelines grouped into a tier for concurrent analysis
* during an ingest job.
*/
class IngestModuleTier {
private DataSourceIngestPipeline dataSourceIngestPipeline;
private final LinkedBlockingQueue<FileIngestPipeline> fileIngestPipelinesQueue = new LinkedBlockingQueue<>();
private final List<FileIngestPipeline> fileIngestPipelines = new ArrayList<>();
private DataArtifactIngestPipeline dataArtifactIngestPipeline;
private AnalysisResultIngestPipeline analysisResultIngestPipeline;
/**
* Sets the data source ingest pipeline for this tier, if there is one.
*
* @param pipeline The pipeline.
*/
void setDataSourceIngestPipeline(DataSourceIngestPipeline pipeline) {
dataSourceIngestPipeline = pipeline;
}
/**
* Checks to see if there is at least one data source level ingest module in
* this tier.
*
* @return True or false.
*/
boolean hasDataSourceIngestModules() {
return (dataSourceIngestPipeline != null && dataSourceIngestPipeline.isEmpty() == false);
}
/**
* Gets the data source ingest pipeline for this tier, if there is one.
*
* @return The pipeline, in Optional form.
*/
Optional<DataSourceIngestPipeline> getDataSourceIngestPipeline() {
return Optional.ofNullable(dataSourceIngestPipeline);
}
/**
* Sets the file ingest pipelines for this tier, if there are any. All of
* the pipelines should be identical copies, and the number of pipeline
* copies should match the number of file ingest threads in the ingest
* manager.
*
* @param pipelines The pipelines.
*
* @throws InterruptedException The exception is thrown if the current
* thread is interrupted while blocked waiting
* for the pipelines to be added to an internal
* data structure.
*/
void setsFileIngestPipelines(List<FileIngestPipeline> pipelines) throws InterruptedException {
fileIngestPipelines.addAll(pipelines);
for (FileIngestPipeline pipeline : pipelines) {
fileIngestPipelinesQueue.put(pipeline);
}
}
/**
* Checks to see if there is at least one file ingest module in this tier.
*
* @return True or false.
*/
boolean hasFileIngestModules() {
return (!fileIngestPipelines.isEmpty() && !fileIngestPipelines.get(0).isEmpty());
}
/**
* Gets all of the file ingest pipeline copies.
*
* @return The pipeline copies, may be an empty list.
*/
List<FileIngestPipeline> getFileIngestPipelines() {
return ImmutableList.copyOf(fileIngestPipelines);
}
/**
* Gets the next available file ingest pipeline copy for this tier, blocking
* until one becomes available.
*
* @return The pipeline.
*
* @throws InterruptedException The exception is thrown if the current
* thread is interrupted while blocked waiting
* for the next available file ingest pipeline.
*/
FileIngestPipeline takeFileIngestPipeline() throws InterruptedException {
return fileIngestPipelinesQueue.take();
}
/**
* Returns a file ingest pipeline.
*
* @param pipeline The pipeline.
*
* @throws InterruptedException The exception is thrown if the current
* thread is interrupted while blocked waiting
* for pipeline to be stored in an internal
* data structure.
*/
void returnFileIngestPipeleine(FileIngestPipeline pipeline) throws InterruptedException {
fileIngestPipelinesQueue.put(pipeline);
}
/**
* Sets the data artifact ingest pipeline for this tier, if there is one.
*
* @param pipeline The pipeline.
*/
void setDataArtifactIngestPipeline(DataArtifactIngestPipeline pipeline) {
dataArtifactIngestPipeline = pipeline;
}
/**
* Checks to see if there is at least one data artifact ingest module in
* this tier.
*
* @return True or false.
*/
boolean hasDataArtifactIngestModules() {
return (dataArtifactIngestPipeline != null && dataArtifactIngestPipeline.isEmpty() == false);
}
/**
* Gets the data artifact ingest pipeline for this tier, if there is one.
*
* @return The pipeline, in Optional form.
*/
Optional<DataArtifactIngestPipeline> getDataArtifactIngestPipeline() {
return Optional.ofNullable(dataArtifactIngestPipeline);
}
/**
* Sets the analysis result ingest pipeline for this tier, if there is one.
*
* @param pipeline The pipeline.
*/
void setAnalysisResultIngestPipeline(AnalysisResultIngestPipeline pipeline) {
analysisResultIngestPipeline = pipeline;
}
/**
* Checks to see if there is at least one analysis result ingest module in
* this tier.
*
* @return True or false.
*/
boolean hasAnalysisResultIngestModules() {
return (analysisResultIngestPipeline != null && analysisResultIngestPipeline.isEmpty() == false);
}
/**
* Gets the analysis result ingest pipeline for this tier, if there is one.
*
* @return The pipeline, in Optional form.
*/
Optional<AnalysisResultIngestPipeline> getAnalysisResultIngestPipeline() {
return Optional.ofNullable(analysisResultIngestPipeline);
}
}

View File

@ -0,0 +1,240 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2021-2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.ingest;
import java.util.ArrayList;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import java.util.stream.Stream;
/**
* A utility that builds the ingest module tiers needed to execute an ingest
* job.
*/
class IngestModuleTierBuilder {
private static final String AUTOPSY_MODULE_PREFIX = "org.sleuthkit.autopsy";
private static final Pattern JYTHON_MODULE_REGEX = Pattern.compile("org\\.python\\.proxies\\.(.+?)\\$(.+?)(\\$[0-9]*)?$");
/**
* Builds the ingest module tiers needed to execute an ingest job.
*
* @param settings The ingest job settings.
* @param executor The ingest job executor.
*
* @return The ingest module tiers.
*
* @throws InterruptedException The exception is thrown if the current
* thread is interrupted while blocked during
* the building process.
*/
static List<IngestModuleTier> buildIngestModuleTiers(IngestJobSettings settings, IngestJobExecutor executor) throws InterruptedException {
/*
* Get the enabled ingest module templates from the ingest job settings.
*/
List<IngestModuleTemplate> enabledTemplates = settings.getEnabledIngestModuleTemplates();
/**
* Sort the ingest module templates into buckets based on the module
* types the template can be used to create. A template may go into more
* than one bucket. Each bucket actually consists of two collections:
* one for Java modules and one for Jython modules.
*/
Map<String, IngestModuleTemplate> javaDataSourceModuleTemplates = new LinkedHashMap<>();
Map<String, IngestModuleTemplate> jythonDataSourceModuleTemplates = new LinkedHashMap<>();
Map<String, IngestModuleTemplate> javaFileModuleTemplates = new LinkedHashMap<>();
Map<String, IngestModuleTemplate> jythonFileModuleTemplates = new LinkedHashMap<>();
Map<String, IngestModuleTemplate> javaArtifactModuleTemplates = new LinkedHashMap<>();
Map<String, IngestModuleTemplate> jythonArtifactModuleTemplates = new LinkedHashMap<>();
Map<String, IngestModuleTemplate> javaResultModuleTemplates = new LinkedHashMap<>();
Map<String, IngestModuleTemplate> jythonResultModuleTemplates = new LinkedHashMap<>();
for (IngestModuleTemplate template : enabledTemplates) {
if (template.isDataSourceIngestModuleTemplate()) {
addModuleTemplateToSortingMap(javaDataSourceModuleTemplates, jythonDataSourceModuleTemplates, template);
}
if (template.isFileIngestModuleTemplate()) {
addModuleTemplateToSortingMap(javaFileModuleTemplates, jythonFileModuleTemplates, template);
}
if (template.isDataArtifactIngestModuleTemplate()) {
addModuleTemplateToSortingMap(javaArtifactModuleTemplates, jythonArtifactModuleTemplates, template);
}
if (template.isAnalysisResultIngestModuleTemplate()) {
addModuleTemplateToSortingMap(javaResultModuleTemplates, jythonResultModuleTemplates, template);
}
}
/**
* Take the module templates that have pipeline configuration entries
* out of the buckets, and add them to ingest module pipeline templates
* in the order prescribed by the pipeline configuration. There is
* currently no pipeline configuration file support for data artifact or
* analysis result ingest module pipelines.
*/
IngestPipelinesConfiguration pipelineConfig = IngestPipelinesConfiguration.getInstance();
List<IngestModuleTemplate> firstStageDataSourcePipelineTemplate = createIngestPipelineTemplate(javaDataSourceModuleTemplates, jythonDataSourceModuleTemplates, pipelineConfig.getStageOneDataSourceIngestPipelineConfig());
List<IngestModuleTemplate> secondStageDataSourcePipelineTemplate = createIngestPipelineTemplate(javaDataSourceModuleTemplates, jythonDataSourceModuleTemplates, pipelineConfig.getStageTwoDataSourceIngestPipelineConfig());
List<IngestModuleTemplate> filePipelineTemplate = createIngestPipelineTemplate(javaFileModuleTemplates, jythonFileModuleTemplates, pipelineConfig.getFileIngestPipelineConfig());
List<IngestModuleTemplate> artifactPipelineTemplate = new ArrayList<>();
List<IngestModuleTemplate> resultsPipelineTemplate = new ArrayList<>();
/**
* Add any ingest module templates remaining in the buckets to the
* appropriate ingest module pipeline templates. Data source level
* ingest modules templates that were not listed in the pipeline
* configuration are added to the first stage data source pipeline
* template, Java modules are added before Jython modules, and Core
* Autopsy modules are added before third party modules.
*/
addToIngestPipelineTemplate(firstStageDataSourcePipelineTemplate, javaDataSourceModuleTemplates, jythonDataSourceModuleTemplates);
addToIngestPipelineTemplate(filePipelineTemplate, javaFileModuleTemplates, jythonFileModuleTemplates);
addToIngestPipelineTemplate(artifactPipelineTemplate, javaArtifactModuleTemplates, jythonArtifactModuleTemplates);
addToIngestPipelineTemplate(resultsPipelineTemplate, javaResultModuleTemplates, jythonResultModuleTemplates);
/**
* Construct the ingest module pipelines from the ingest module pipeline
* templates and populate the ingest module tiers.
*/
List<IngestModuleTier> moduleTiers = new ArrayList<>();
IngestModuleTier firstTier = new IngestModuleTier();
int numberOfFileIngestThreads = IngestManager.getInstance().getNumberOfFileIngestThreads();
List<FileIngestPipeline> fileIngestPipelines = new ArrayList<>();
for (int i = 0; i < numberOfFileIngestThreads; ++i) {
fileIngestPipelines.add(new FileIngestPipeline(executor, filePipelineTemplate));
}
firstTier.setsFileIngestPipelines(fileIngestPipelines);
firstTier.setDataSourceIngestPipeline(new DataSourceIngestPipeline(executor, firstStageDataSourcePipelineTemplate));
firstTier.setDataArtifactIngestPipeline(new DataArtifactIngestPipeline(executor, artifactPipelineTemplate));
firstTier.setAnalysisResultIngestPipeline(new AnalysisResultIngestPipeline(executor, resultsPipelineTemplate));
moduleTiers.add(firstTier);
IngestModuleTier secondTier = new IngestModuleTier();
secondTier.setDataSourceIngestPipeline(new DataSourceIngestPipeline(executor, secondStageDataSourcePipelineTemplate));
// RJCTODO: Remove test
// List<FileIngestPipeline> fileIngestPipelines2 = new ArrayList<>();
// for (int i = 0; i < numberOfFileIngestThreads; ++i) {
// fileIngestPipelines2.add(new FileIngestPipeline(executor, filePipelineTemplate));
// }
// secondTier.setsFileIngestPipelines(fileIngestPipelines2);
moduleTiers.add(secondTier);
return moduleTiers;
}
/**
* Adds an ingest module template to one of two mappings of ingest module
* factory class names to module templates. One mapping is for ingest
* modules imnplemented using Java, and the other is for ingest modules
* implemented using Jython.
*
* @param mapping Mapping for Java ingest module templates.
* @param jythonMapping Mapping for Jython ingest module templates.
* @param template The ingest module template.
*/
private static void addModuleTemplateToSortingMap(Map<String, IngestModuleTemplate> mapping, Map<String, IngestModuleTemplate> jythonMapping, IngestModuleTemplate template) {
String className = template.getModuleFactory().getClass().getCanonicalName();
String jythonName = getModuleNameFromJythonClassName(className);
if (jythonName != null) {
jythonMapping.put(jythonName, template);
} else {
mapping.put(className, template);
}
}
/**
* Extracts a module class name from a Jython module proxy class name. For
* example, a Jython class name such
* "org.python.proxies.GPX_Parser_Module$GPXParserFileIngestModuleFactory$14"
* will be parsed to return
* "GPX_Parser_Module.GPXParserFileIngestModuleFactory."
*
* @param className The canonical class name.
*
* @return The Jython proxu class name or null if the extraction fails.
*/
private static String getModuleNameFromJythonClassName(String className) {
Matcher m = JYTHON_MODULE_REGEX.matcher(className);
if (m.find()) {
return String.format("%s.%s", m.group(1), m.group(2)); //NON-NLS
} else {
return null;
}
}
/**
* Creates an ingest module pipeline template that can be used to construct
* an ingest module pipeline.
*
* @param javaIngestModuleTemplates Ingest module templates for ingest
* modules implemented using Java.
* @param jythonIngestModuleTemplates Ingest module templates for ingest
* modules implemented using Jython.
* @param pipelineConfig An ordered list of the ingest modules
* that belong in the ingest pipeline for
* which the template is being created.
*
* @return An ordered list of ingest module templates, i.e., a template for
* creating ingest module pipelines.
*/
private static List<IngestModuleTemplate> createIngestPipelineTemplate(Map<String, IngestModuleTemplate> javaIngestModuleTemplates, Map<String, IngestModuleTemplate> jythonIngestModuleTemplates, List<String> pipelineConfig) {
List<IngestModuleTemplate> pipelineTemplate = new ArrayList<>();
for (String moduleClassName : pipelineConfig) {
if (javaIngestModuleTemplates.containsKey(moduleClassName)) {
pipelineTemplate.add(javaIngestModuleTemplates.remove(moduleClassName));
} else if (jythonIngestModuleTemplates.containsKey(moduleClassName)) {
pipelineTemplate.add(jythonIngestModuleTemplates.remove(moduleClassName));
}
}
return pipelineTemplate;
}
/**
* Sorts ingest module templates so that core Autopsy ingest modules come
* before third party ingest modules, and ingest modules implemented using
* Java come before ingest modules implemented using Jython.
*
* @param sortedModules The output list to hold the sorted modules.
* @param javaModules The input ingest module templates for modules
* implemented using Java.
* @param jythonModules The ingest module templates for modules implemented
* using Jython.
*/
private static void addToIngestPipelineTemplate(final List<IngestModuleTemplate> sortedModules, final Map<String, IngestModuleTemplate> javaModules, final Map<String, IngestModuleTemplate> jythonModules) {
final List<IngestModuleTemplate> autopsyModules = new ArrayList<>();
final List<IngestModuleTemplate> thirdPartyModules = new ArrayList<>();
Stream.concat(javaModules.entrySet().stream(), jythonModules.entrySet().stream()).forEach((templateEntry) -> {
if (templateEntry.getKey().startsWith(AUTOPSY_MODULE_PREFIX)) {
autopsyModules.add(templateEntry.getValue());
} else {
thirdPartyModules.add(templateEntry.getValue());
}
});
sortedModules.addAll(autopsyModules);
sortedModules.addAll(thirdPartyModules);
}
/**
* Private constructor to prevent instatiation of this utility class.
*/
IngestModuleTierBuilder() {
}
}

View File

@ -6,7 +6,7 @@
<Dimension value="[500, 500]"/>
</Property>
<Property name="preferredSize" type="java.awt.Dimension" editor="org.netbeans.beaninfo.editors.DimensionEditor">
<Dimension value="[1100, 500]"/>
<Dimension value="[1500, 700]"/>
</Property>
</Properties>
<AuxValues>
@ -19,7 +19,7 @@
<AuxValue name="FormSettings_listenerGenerationStyle" type="java.lang.Integer" value="0"/>
<AuxValue name="FormSettings_variablesLocal" type="java.lang.Boolean" value="false"/>
<AuxValue name="FormSettings_variablesModifier" type="java.lang.Integer" value="2"/>
<AuxValue name="designerSize" type="java.awt.Dimension" value="-84,-19,0,5,115,114,0,18,106,97,118,97,46,97,119,116,46,68,105,109,101,110,115,105,111,110,65,-114,-39,-41,-84,95,68,20,2,0,2,73,0,6,104,101,105,103,104,116,73,0,5,119,105,100,116,104,120,112,0,0,1,-12,0,0,3,-123"/>
<AuxValue name="designerSize" type="java.awt.Dimension" value="-84,-19,0,5,115,114,0,18,106,97,118,97,46,97,119,116,46,68,105,109,101,110,115,105,111,110,65,-114,-39,-41,-84,95,68,20,2,0,2,73,0,6,104,101,105,103,104,116,73,0,5,119,105,100,116,104,120,112,0,0,2,35,0,0,3,-2"/>
</AuxValues>
<Layout class="org.netbeans.modules.form.compat2.layouts.DesignGridBagLayout"/>
@ -135,5 +135,14 @@
</Component>
</SubComponents>
</Container>
<Container class="javax.swing.JPanel" name="jPanel1">
<Constraints>
<Constraint layoutClass="org.netbeans.modules.form.compat2.layouts.DesignGridBagLayout" value="org.netbeans.modules.form.compat2.layouts.DesignGridBagLayout$GridBagConstraintsDescription">
<GridBagConstraints gridX="-1" gridY="-1" gridWidth="1" gridHeight="1" fill="0" ipadX="0" ipadY="0" insetsTop="0" insetsLeft="0" insetsBottom="0" insetsRight="0" anchor="10" weightX="0.0" weightY="0.0"/>
</Constraint>
</Constraints>
<Layout class="org.netbeans.modules.form.compat2.layouts.DesignFlowLayout"/>
</Container>
</SubComponents>
</Form>

View File

@ -171,6 +171,7 @@ class IngestProgressSnapshotPanel extends javax.swing.JPanel {
NbBundle.getMessage(this.getClass(), "IngestJobTableModel.colName.jobID"),
NbBundle.getMessage(this.getClass(), "IngestJobTableModel.colName.dataSource"),
NbBundle.getMessage(this.getClass(), "IngestJobTableModel.colName.start"),
NbBundle.getMessage(this.getClass(), "IngestJobTableModel.colName.tier"),
NbBundle.getMessage(this.getClass(), "IngestJobTableModel.colName.numProcessed"),
NbBundle.getMessage(this.getClass(), "IngestJobTableModel.colName.filesPerSec"),
NbBundle.getMessage(this.getClass(), "IngestJobTableModel.colName.inProgress"),
@ -182,7 +183,7 @@ class IngestProgressSnapshotPanel extends javax.swing.JPanel {
NbBundle.getMessage(this.getClass(), "IngestJobTableModel.colName.artifactsQueued"),
NbBundle.getMessage(this.getClass(), "IngestJobTableModel.colName.resultsQueued")};
private List<Snapshot> jobSnapshots;
private List<IngestJobProgressSnapshot> jobSnapshots;
private IngestJobTableModel() {
refresh();
@ -210,7 +211,7 @@ class IngestProgressSnapshotPanel extends javax.swing.JPanel {
@Override
public Object getValueAt(int rowIndex, int columnIndex) {
Snapshot snapShot = jobSnapshots.get(rowIndex);
IngestJobProgressSnapshot snapShot = jobSnapshots.get(rowIndex);
Object cellValue;
switch (columnIndex) {
case 0:
@ -224,34 +225,37 @@ class IngestProgressSnapshotPanel extends javax.swing.JPanel {
cellValue = dateFormat.format(new Date(snapShot.getJobStartTime()));
break;
case 3:
cellValue = snapShot.getFilesProcessed();
cellValue = snapShot.getCurrentIngestModuleTier();
break;
case 4:
cellValue = snapShot.getSpeed();
cellValue = snapShot.getFilesProcessed();
break;
case 5:
cellValue = snapShot.getRunningListSize();
cellValue = snapShot.getFilesProcessedPerSec();
break;
case 6:
cellValue = snapShot.getFileQueueSize();
cellValue = snapShot.getRunningListSize();
break;
case 7:
cellValue = snapShot.getDirQueueSize();
cellValue = snapShot.getFileQueueSize();
break;
case 8:
cellValue = snapShot.getRootQueueSize();
cellValue = snapShot.getDirQueueSize();
break;
case 9:
cellValue = snapShot.getStreamingQueueSize();
cellValue = snapShot.getRootQueueSize();
break;
case 10:
cellValue = snapShot.getDsQueueSize();
cellValue = snapShot.getStreamingQueueSize();
break;
case 11:
cellValue = snapShot.getArtifactTasksQueueSize();
cellValue = snapShot.getDsQueueSize();
break;
case 12:
cellValue = snapShot.getResultTasksQueueSize();
cellValue = snapShot.getDataArtifactTasksQueueSize();
break;
case 13:
cellValue = snapShot.getAnalysisResultTasksQueueSize();
break;
default:
cellValue = null;
@ -376,9 +380,10 @@ class IngestProgressSnapshotPanel extends javax.swing.JPanel {
closeButton = new javax.swing.JButton();
moduleScrollPane = new javax.swing.JScrollPane();
moduleTable = new javax.swing.JTable();
jPanel1 = new javax.swing.JPanel();
setMinimumSize(new java.awt.Dimension(500, 500));
setPreferredSize(new java.awt.Dimension(1100, 500));
setPreferredSize(new java.awt.Dimension(1500, 700));
setLayout(new java.awt.GridBagLayout());
threadActivitySnapshotsTable.setModel(new javax.swing.table.DefaultTableModel(
@ -469,6 +474,7 @@ class IngestProgressSnapshotPanel extends javax.swing.JPanel {
gridBagConstraints.weighty = 1.0;
gridBagConstraints.insets = new java.awt.Insets(6, 10, 0, 10);
add(moduleScrollPane, gridBagConstraints);
add(jPanel1, new java.awt.GridBagConstraints());
}// </editor-fold>//GEN-END:initComponents
private void closeButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_closeButtonActionPerformed
@ -482,6 +488,7 @@ class IngestProgressSnapshotPanel extends javax.swing.JPanel {
}//GEN-LAST:event_refreshButtonActionPerformed
// Variables declaration - do not modify//GEN-BEGIN:variables
private javax.swing.JButton closeButton;
private javax.swing.JPanel jPanel1;
private javax.swing.JScrollPane jobScrollPane;
private javax.swing.JTable jobTable;
private javax.swing.JScrollPane moduleScrollPane;

View File

@ -38,7 +38,7 @@ public interface IngestProgressSnapshotProvider {
*
* @return A list of ingest job snapshots.
*/
List<Snapshot> getIngestJobSnapshots();
List<IngestJobProgressSnapshot> getIngestJobSnapshots();
/**
* Gets the cumulative run times for the ingest module.

View File

@ -75,7 +75,8 @@ abstract class BlackboardArtifactDAO extends AbstractDAO {
private static Logger logger = Logger.getLogger(BlackboardArtifactDAO.class.getName());
// GVDTODO there is a different standard for normal attr strings and email attr strings
static final int STRING_LENGTH_MAX = 160;
static final int EMAIL_CONTENT_MAX_LEN = 160;
static final int TOOL_TEXT_MAX_LEN = 512;
static final String ELLIPSIS = "...";
@SuppressWarnings("deprecation")
@ -83,7 +84,8 @@ abstract class BlackboardArtifactDAO extends AbstractDAO {
BlackboardAttribute.ATTRIBUTE_TYPE.TSK_TAGGED_ARTIFACT.getTypeID(),
BlackboardAttribute.Type.TSK_ASSOCIATED_ARTIFACT.getTypeID(),
BlackboardAttribute.Type.TSK_SET_NAME.getTypeID(),
BlackboardAttribute.Type.TSK_KEYWORD_SEARCH_TYPE.getTypeID()
BlackboardAttribute.Type.TSK_KEYWORD_SEARCH_TYPE.getTypeID(),
BlackboardAttribute.Type.TSK_PATH_ID.getTypeID()
);
static final Set<Integer> HIDDEN_EMAIL_ATTR_TYPES = ImmutableSet.of(
BlackboardAttribute.Type.TSK_DATETIME_SENT.getTypeID(),
@ -155,7 +157,7 @@ abstract class BlackboardArtifactDAO extends AbstractDAO {
for (BlackboardArtifact art : arts) {
Map<BlackboardAttribute.Type, Object> attrs = art.getAttributes().stream()
.filter(attr -> isRenderedAttr(artType, attr.getAttributeType()))
.collect(Collectors.toMap(attr -> attr.getAttributeType(), attr -> getAttrValue(attr), (attr1, attr2) -> attr1));
.collect(Collectors.toMap(attr -> attr.getAttributeType(), attr -> getAttrValue(artType, attr), (attr1, attr2) -> attr1));
artifactAttributes.put(art.getId(), attrs);
}
@ -208,16 +210,18 @@ abstract class BlackboardArtifactDAO extends AbstractDAO {
AbstractFile linkedFile = null;
if (artType.getCategory().equals(BlackboardArtifact.Category.DATA_ARTIFACT)) {
Object linkedId = attrValues.get(BlackboardAttribute.Type.TSK_PATH_ID);
linkedFile = linkedId instanceof Long && ((Long) linkedId) >= 0
? getCase().getAbstractFileById((Long) linkedId)
: null;
// Note that we need to get the attribute from the original artifact since it is not displayed.
if (artifact.getAttribute(BlackboardAttribute.Type.TSK_PATH_ID) != null) {
long linkedId = artifact.getAttribute(BlackboardAttribute.Type.TSK_PATH_ID).getValueLong();
linkedFile = linkedId >= 0
? getCase().getAbstractFileById(linkedId)
: null;
}
}
boolean isTimelineSupported = isTimelineSupported(attrValues.keySet());
rows.add(createRow(artifact, srcContent, linkedFile, isTimelineSupported, cellValues, id));
//rows.add(new AnalysisResultRowDTO(artifact, srcContent, linkedFile, isTimelineSupported, cellValues, id));
}
return new TableData(columnKeys, rows);
@ -238,17 +242,21 @@ abstract class BlackboardArtifactDAO extends AbstractDAO {
}
boolean isRenderedAttr(BlackboardArtifact.Type artType, BlackboardAttribute.Type attrType) {
// JSON attributes are always hidden
if (BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.JSON.equals(attrType.getValueType())) {
return false;
}
if (BlackboardArtifact.Type.TSK_EMAIL_MSG.getTypeID() == artType.getTypeID()) {
return !HIDDEN_EMAIL_ATTR_TYPES.contains(attrType.getTypeID());
} else {
return !HIDDEN_ATTR_TYPES.contains(attrType.getTypeID())
&& !BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.JSON.equals(attrType.getValueType());
return !HIDDEN_ATTR_TYPES.contains(attrType.getTypeID());
}
}
private String getTruncated(String str) {
return str.length() > STRING_LENGTH_MAX
? str.substring(0, STRING_LENGTH_MAX) + ELLIPSIS
private String getTruncated(String str, int maxLen) {
return str.length() > maxLen
? str.substring(0, maxLen) + ELLIPSIS
: str;
}
@ -349,7 +357,26 @@ abstract class BlackboardArtifactDAO extends AbstractDAO {
// }
// return "";
// }
Object getAttrValue(BlackboardAttribute attr) {
@SuppressWarnings("deprecation")
Object getAttrValue(BlackboardArtifact.Type artType, BlackboardAttribute attr) {
// Handle the special cases
if (artType.equals(BlackboardArtifact.Type.TSK_EMAIL_MSG) &&
attr.getAttributeType().equals(BlackboardAttribute.Type.TSK_EMAIL_CONTENT_PLAIN)) {
return getTruncated(attr.getValueString(), EMAIL_CONTENT_MAX_LEN);
}
/* From BlackboardArtifactNode:
* The truncation of text attributes appears to have been
* motivated by the statement that "RegRipper output would
* often cause the UI to get a black line accross it and
* hang if you hovered over large output or selected it.
*/
if ((BlackboardArtifact.ARTIFACT_TYPE.TSK_TOOL_OUTPUT.getTypeID() == artType.getTypeID())
&& attr.getAttributeType().equals(BlackboardAttribute.Type.TSK_TEXT)) {
return getTruncated(attr.getValueString(), TOOL_TEXT_MAX_LEN);
}
switch (attr.getAttributeType().getValueType()) {
case BYTE:
return attr.getValueBytes();
@ -360,11 +387,12 @@ abstract class BlackboardArtifactDAO extends AbstractDAO {
case INTEGER:
return attr.getValueInt();
case JSON:
return getTruncated(attr.getValueString());
// We shouldn't get here since JSON attribute are not displayed in the table
return attr.getValueString();
case LONG:
return attr.getValueLong();
case STRING:
return getTruncated(attr.getValueString());
return attr.getValueString();
default:
throw new IllegalArgumentException("Unknown attribute type value type: " + attr.getAttributeType().getValueType());
}

View File

@ -19,6 +19,8 @@
package org.sleuthkit.autopsy.mainui.datamodel;
import java.util.Objects;
import org.sleuthkit.autopsy.mainui.nodes.ChildNodeSelectionInfo;
import org.sleuthkit.autopsy.mainui.nodes.ChildNodeSelectionInfo.BlackboardArtifactNodeSelectionInfo;
import org.sleuthkit.datamodel.BlackboardArtifact;
/**
@ -27,6 +29,8 @@ import org.sleuthkit.datamodel.BlackboardArtifact;
public class BlackboardArtifactSearchParam {
private static final String TYPE_ID = "BLACKBOARD_ARTIFACT";
private ChildNodeSelectionInfo nodeSelectionInfo;
/**
* @return The type id for this search parameter.
@ -79,4 +83,12 @@ public class BlackboardArtifactSearchParam {
}
return true;
}
public ChildNodeSelectionInfo getNodeSelectionInfo() {
return nodeSelectionInfo;
}
public void setNodeSelectionInfo(ChildNodeSelectionInfo info) {
nodeSelectionInfo = info;
}
}

View File

@ -37,6 +37,10 @@ BlackboardArtifactDAO.columnKeys.srcFile.name=Source Name
CommAccounts.name.text=Communication Accounts
CommAccountsDAO.fileColumns.noDescription=No Description
DataArtifactDAO_Accounts_displayName=Communication Accounts
DeletedContent.allDelFilter.text=All
DeletedContent.fsDelFilter.text=File System
EmailsDAO_getAccountDisplayName_defaultName=Default
EmailsDAO_getFolderDisplayName_defaultName=Default
FileExtDocumentFilter_html_displayName=HTML
FileExtDocumentFilter_office_displayName=Office
FileExtDocumentFilter_pdf_displayName=PDF

View File

@ -0,0 +1,58 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.mainui.datamodel;
import org.openide.util.NbBundle;
/**
* Filters for deleted content
*/
@NbBundle.Messages({"DeletedContent.fsDelFilter.text=File System",
"DeletedContent.allDelFilter.text=All"})
public enum DeletedContentFilter {
/**
* Names are used in sql query so make sure they are sql friendly.
*/
FS_DELETED_FILTER(0, "FS_DELETED_FILTER", Bundle.DeletedContent_fsDelFilter_text()),
ALL_DELETED_FILTER(1, "ALL_DELETED_FILTER", Bundle.DeletedContent_allDelFilter_text());
private int id;
private String name;
private String displayName;
private DeletedContentFilter(int id, String name, String displayName) {
this.id = id;
this.name = name;
this.displayName = displayName;
}
public String getName() {
return this.name;
}
public int getId() {
return this.id;
}
public String getDisplayName() {
return this.displayName;
}
}

View File

@ -0,0 +1,89 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.mainui.datamodel;
import java.util.Objects;
/**
* Search params for accessing data about deleted content.
*/
public class DeletedContentSearchParams {
private static final String TYPE_ID = "DELETED_CONTENT";
/**
* @return The type id for this search parameter.
*/
public static String getTypeId() {
return TYPE_ID;
}
private final DeletedContentFilter filter;
private final Long dataSourceId;
/**
* Main constructor.
*
* @param filter The filter (if null, indicates full refresh
* required).
* @param dataSourceId The data source id or null.
*/
public DeletedContentSearchParams(DeletedContentFilter filter, Long dataSourceId) {
this.filter = filter;
this.dataSourceId = dataSourceId;
}
public DeletedContentFilter getFilter() {
return filter;
}
public Long getDataSourceId() {
return dataSourceId;
}
@Override
public int hashCode() {
int hash = 7;
hash = 71 * hash + Objects.hashCode(this.filter);
hash = 71 * hash + Objects.hashCode(this.dataSourceId);
return hash;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
final DeletedContentSearchParams other = (DeletedContentSearchParams) obj;
if (this.filter != other.filter) {
return false;
}
if (!Objects.equals(this.dataSourceId, other.dataSourceId)) {
return false;
}
return true;
}
}

View File

@ -0,0 +1,83 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.mainui.datamodel;
import java.util.Objects;
import org.sleuthkit.datamodel.BlackboardArtifact;
/**
* Search parameters for email messages.
*/
public class EmailSearchParams extends DataArtifactSearchParam {
private final String account;
private final String folder;
/**
* Main constructor.
*
* @param dataSourceId The data source id or null if no data source
* filtering should occur.
* @param account The email account.
* @param folder The folder within the email account.
*/
public EmailSearchParams(Long dataSourceId, String account, String folder) {
super(BlackboardArtifact.Type.TSK_EMAIL_MSG, dataSourceId);
this.account = account;
this.folder = folder;
}
public String getAccount() {
return account;
}
public String getFolder() {
return folder;
}
@Override
public int hashCode() {
int hash = 7;
hash = 23 * hash + Objects.hashCode(this.account);
hash = 23 * hash + Objects.hashCode(this.folder);
return hash;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
final EmailSearchParams other = (EmailSearchParams) obj;
if (!Objects.equals(this.account, other.account)) {
return false;
}
if (!Objects.equals(this.folder, other.folder)) {
return false;
}
return true;
}
}

View File

@ -0,0 +1,692 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.mainui.datamodel;
import com.google.common.cache.Cache;
import com.google.common.cache.CacheBuilder;
import java.beans.PropertyChangeEvent;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Objects;
import java.util.Set;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.TimeUnit;
import java.util.logging.Level;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.tuple.Pair;
import org.openide.util.NbBundle.Messages;
import org.python.icu.text.MessageFormat;
import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.ingest.ModuleDataEvent;
import org.sleuthkit.autopsy.mainui.datamodel.TreeResultsDTO.TreeDisplayCount;
import org.sleuthkit.autopsy.mainui.datamodel.TreeResultsDTO.TreeItemDTO;
import org.sleuthkit.autopsy.mainui.datamodel.events.DAOEvent;
import org.sleuthkit.autopsy.mainui.datamodel.events.DAOEventUtils;
import org.sleuthkit.autopsy.mainui.datamodel.events.EmailEvent;
import org.sleuthkit.autopsy.mainui.datamodel.events.TreeCounts;
import org.sleuthkit.autopsy.mainui.datamodel.events.TreeEvent;
import org.sleuthkit.autopsy.mainui.nodes.DAOFetcher;
import org.sleuthkit.datamodel.Blackboard;
import org.sleuthkit.datamodel.BlackboardArtifact;
import org.sleuthkit.datamodel.BlackboardAttribute;
import org.sleuthkit.datamodel.CaseDbAccessManager.CaseDbPreparedStatement;
import org.sleuthkit.datamodel.SleuthkitCase;
import org.sleuthkit.datamodel.TskCoreException;
import org.sleuthkit.datamodel.TskData;
/**
* Provides information to populate the results viewer for data in the
* Emails section.
*/
public class EmailsDAO extends AbstractDAO {
private static final Logger logger = Logger.getLogger(EmailsDAO.class.getName());
private static final int CACHE_SIZE = 15;
private static final long CACHE_DURATION = 2;
private static final TimeUnit CACHE_DURATION_UNITS = TimeUnit.MINUTES;
private static final String PATH_DELIMITER = "/";
private static final String ESCAPE_CHAR = "\\";
private final Cache<SearchParams<EmailSearchParams>, SearchResultsDTO> searchParamsCache = CacheBuilder.newBuilder().maximumSize(CACHE_SIZE).expireAfterAccess(CACHE_DURATION, CACHE_DURATION_UNITS).build();
private final TreeCounts<EmailEvent> emailCounts = new TreeCounts<>();
private static EmailsDAO instance = null;
synchronized static EmailsDAO getInstance() {
if (instance == null) {
instance = new EmailsDAO();
}
return instance;
}
SleuthkitCase getCase() throws NoCurrentCaseException {
return Case.getCurrentCaseThrows().getSleuthkitCase();
}
public SearchResultsDTO getEmailMessages(EmailSearchParams searchParams, long startItem, Long maxCount) throws ExecutionException, IllegalArgumentException {
if (searchParams.getDataSourceId() != null && searchParams.getDataSourceId() <= 0) {
throw new IllegalArgumentException("Data source id must be greater than 0 or null");
} else if ((searchParams.getAccount() == null) != (searchParams.getFolder() == null)) {
throw new IllegalArgumentException(
MessageFormat.format(
"Either folder and account are null or they are both non-null. Received [account: {0}, folder: {1}]",
StringUtils.defaultIfBlank(searchParams.getAccount(), "<null>"),
StringUtils.defaultIfBlank(searchParams.getFolder(), "<null>")));
}
SearchParams<EmailSearchParams> emailSearchParams = new SearchParams<>(searchParams, startItem, maxCount);
return searchParamsCache.get(emailSearchParams, () -> fetchEmailMessageDTOs(emailSearchParams));
}
/**
* Returns a pair of the email account and folder.
*
* NOTE: Subject to change; see JIRA-8220.
*
* @param art The artifact.
*
* @return The pair of the account and folder or null if undetermined.
*/
private static Pair<String, String> getAccountAndFolder(BlackboardArtifact art) throws TskCoreException {
BlackboardAttribute pathAttr = art.getAttribute(BlackboardAttribute.Type.TSK_PATH);
if (pathAttr == null) {
return Pair.of("", "");
}
String pathVal = pathAttr.getValueString();
if (pathVal == null) {
return Pair.of("", "");
}
return getPathAccountFolder(pathVal);
}
/**
* Returns a pair of the email account and folder.
*
* NOTE: Subject to change; see JIRA-8220.
*
* @param art The path value.
*
* @return The pair of the account and folder or null if undetermined.
*/
private static Pair<String, String> getPathAccountFolder(String pathVal) {
String[] pieces = pathVal.split(PATH_DELIMITER);
return pieces.length < 4
? Pair.of("", "")
: Pair.of(pieces[2], pieces[3]);
}
private static String likeEscape(String toBeEscaped, String escapeChar) {
if (toBeEscaped == null) {
return "";
}
return toBeEscaped
.replaceAll("%", escapeChar + "%")
.replaceAll("_", escapeChar + "_");
}
private SearchResultsDTO fetchEmailMessageDTOs(SearchParams<EmailSearchParams> searchParams) throws NoCurrentCaseException, TskCoreException, SQLException {
// get current page of communication accounts results
SleuthkitCase skCase = Case.getCurrentCaseThrows().getSleuthkitCase();
Blackboard blackboard = skCase.getBlackboard();
boolean unknownPath = StringUtils.isBlank(searchParams.getParamData().getAccount());
String query = " art.artifact_id AS artifact_id \n"
+ "FROM blackboard_artifacts art \n"
+ "LEFT JOIN blackboard_attributes attr ON attr.artifact_id = art.artifact_id \n"
+ " AND attr.attribute_type_id = " + BlackboardAttribute.Type.TSK_PATH.getTypeID() + " \n"
+ "WHERE art.artifact_type_id = " + BlackboardArtifact.Type.TSK_EMAIL_MSG.getTypeID() + " \n"
+ (unknownPath
? "AND (attr.value_text IS NULL OR attr.value_text NOT LIKE '%/%/%/%' ESCAPE '" + ESCAPE_CHAR + "' OR attr.value_text LIKE ?) \n"
: "AND attr.value_text LIKE ? ESCAPE '" + ESCAPE_CHAR + "' \n")
+ (searchParams.getParamData().getDataSourceId() == null ? "" : "AND art.data_source_obj_id = ? \n")
+ "GROUP BY art.artifact_id \n"
+ "ORDER BY art.artifact_id";
List<Long> allMatchingIds = new ArrayList<>();
// TODO load paged artifacts;
// this could potentially be done in a query obtaining the artifacts and another retrieving the total count.
try (CaseDbPreparedStatement preparedStatement = getCase().getCaseDbAccessManager().prepareSelect(query)) {
int paramIdx = 0;
preparedStatement.setString(++paramIdx, MessageFormat.format("%/%/{0}/{1}%",
likeEscape(searchParams.getParamData().getAccount(), ESCAPE_CHAR),
likeEscape(searchParams.getParamData().getFolder(), ESCAPE_CHAR)
));
if (searchParams.getParamData().getDataSourceId() != null) {
preparedStatement.setLong(++paramIdx, searchParams.getParamData().getDataSourceId());
}
getCase().getCaseDbAccessManager().select(preparedStatement, (resultSet) -> {
try {
while (resultSet.next()) {
allMatchingIds.add(resultSet.getLong("artifact_id"));
}
} catch (SQLException ex) {
logger.log(Level.WARNING, "There was an error fetching emails for ");
}
});
}
Stream<Long> pagedIdStream = allMatchingIds.stream()
.skip(searchParams.getStartItem());
if (searchParams.getMaxResultsCount() != null && searchParams.getMaxResultsCount() > 0) {
pagedIdStream = pagedIdStream.limit(searchParams.getMaxResultsCount());
}
List<Long> pagedIds = pagedIdStream.collect(Collectors.toList());
List<BlackboardArtifact> allArtifacts = Collections.emptyList();
if (!allMatchingIds.isEmpty()) {
String whereClause = "artifacts.artifact_id IN (" + pagedIds.stream().map(l -> Long.toString(l)).collect(Collectors.joining(", ")) + ")";
allArtifacts = getDataArtifactsAsBBA(blackboard, whereClause);
// Populate the attributes for paged artifacts in the list. This is done using one database call as an efficient way to
// load many artifacts/attributes at once.
blackboard.loadBlackboardAttributes(allArtifacts);
}
DataArtifactDAO dataArtDAO = MainDAO.getInstance().getDataArtifactsDAO();
BlackboardArtifactDAO.TableData tableData = dataArtDAO.createTableData(BlackboardArtifact.Type.TSK_EMAIL_MSG, allArtifacts);
return new DataArtifactTableSearchResultsDTO(BlackboardArtifact.Type.TSK_EMAIL_MSG, tableData.columnKeys,
tableData.rows, searchParams.getStartItem(), allMatchingIds.size());
}
@SuppressWarnings("unchecked")
private List<BlackboardArtifact> getDataArtifactsAsBBA(Blackboard blackboard, String whereClause) throws TskCoreException {
return (List<BlackboardArtifact>) (List<? extends BlackboardArtifact>) blackboard.getDataArtifactsWhere(whereClause);
}
@Messages("EmailsDAO_getAccountDisplayName_defaultName=Default")
public String getAccountDisplayName(String account, Set<String> folders) {
String accountName = StringUtils.isBlank(account) ? Bundle.EmailsDAO_getAccountDisplayName_defaultName() : account;
if (CollectionUtils.isEmpty(folders)) {
return accountName;
} else {
String folderDisplay = folders.stream()
.map(f -> StringUtils.isBlank(f) ? Bundle.EmailsDAO_getFolderDisplayName_defaultName() : f)
.sorted((a, b) -> a.compareToIgnoreCase(b))
.collect(Collectors.joining(", "));
return MessageFormat.format("{0} ([{1}])", accountName, folderDisplay);
}
}
@Messages({"EmailsDAO_getFolderDisplayName_defaultName=Default"})
public String getFolderDisplayName(String folder) {
return StringUtils.isBlank(folder) ? Bundle.EmailsDAO_getFolderDisplayName_defaultName() : folder;
}
public TreeItemDTO<EmailSearchParams> createEmailTreeItem(String account, String folder, String displayName,
Long dataSourceId, TreeDisplayCount count) {
return new TreeItemDTO<>(
EmailSearchParams.getTypeId(),
new EmailSearchParams(dataSourceId, account, folder),
Stream.of(account, folder)
.map(s -> StringUtils.isBlank(s) ? "" : s)
.collect(Collectors.joining(PATH_DELIMITER)),
displayName,
count
);
}
/**
* Returns sql to query for email counts.
*
* @param dbType The db type (postgres/sqlite).
* @param account The account string to filter on. Null if no filter.
* Empty if account is default.
* @param dataSourceId The data source id to filter on or null for no
* filter. If non-null, a prepared statement parameter
* will need to be provided at index 2.
*
* @return A tuple of the sql and the account like string (or null if no
* account filter).
*/
private static Pair<String, String> getAccountFolderSql(TskData.DbType dbType, String account, Long dataSourceId) {
// possible and claused depending on whether or not there is an account to filter on and a data source object id to filter on.
String accountClause = "";
if (account != null) {
if (StringUtils.isBlank(account)) {
accountClause = " AND (attr.value_text IS NULL OR attr.value_text NOT LIKE '%/%/%/%' OR attr.value_text LIKE ?)\n";
} else {
accountClause = " AND attr.value_text LIKE ? ESCAPE '" + ESCAPE_CHAR + "'\n";
}
}
String dataSourceClause = (dataSourceId == null ? "" : " AND art.data_source_obj_id = ?\n");
// get path attribute value for emails
String innerQuery = "SELECT\n"
+ " MIN(attr.value_text) AS path, \n"
+ " art.artifact_id\n"
+ " FROM blackboard_artifacts art\n"
+ " LEFT JOIN blackboard_attributes attr ON attr.artifact_id = art.artifact_id\n"
+ " AND attr.attribute_type_id = " + BlackboardAttribute.Type.TSK_PATH.getTypeID() + "\n" // may change due to JIRA-8220
+ " WHERE\n"
+ " art.artifact_type_id = " + BlackboardArtifact.Type.TSK_EMAIL_MSG.getTypeID() + "\n"
+ accountClause
+ dataSourceClause
+ " GROUP BY art.artifact_id\n";
// get index 2 (account) and index 3 (folder) after splitting on delimiter
String accountFolderQuery;
switch (dbType) {
case POSTGRESQL:
accountFolderQuery = "SELECT\n"
+ (account != null ? "" : " SPLIT_PART(a.path, '" + PATH_DELIMITER + "', 3) AS account,\n")
+ " SPLIT_PART(a.path, '" + PATH_DELIMITER + "', 4) AS folder\n"
+ "FROM (\n"
+ innerQuery
+ "\n) a";
break;
case SQLITE:
accountFolderQuery = "SELECT\n"
+ (account != null ? "" : " a.account AS account,\n")
+ " (CASE \n"
+ " WHEN INSTR(a.remaining, '" + PATH_DELIMITER + "') > 0 THEN SUBSTR(a.remaining, 1, INSTR(a.remaining, '" + PATH_DELIMITER + "') - 1) \n"
+ " ELSE a.remaining\n"
+ " END) AS folder\n"
+ "FROM (\n"
+ " SELECT \n"
+ " SUBSTR(l.ltrimmed, 1, INSTR(l.ltrimmed, '" + PATH_DELIMITER + "') - 1) AS account,\n"
+ " SUBSTR(l.ltrimmed, INSTR(l.ltrimmed, '" + PATH_DELIMITER + "') + 1) AS remaining\n"
+ " FROM (\n"
+ " SELECT SUBSTR(email_paths.path, INSTR(SUBSTR(email_paths.path, 2), '" + PATH_DELIMITER + "') + 2) AS ltrimmed\n"
+ " FROM (\n"
+ innerQuery
+ " ) email_paths\n"
+ " ) l\n"
+ ") a\n";
break;
default:
throw new IllegalArgumentException("Unknown db type: " + dbType);
}
// group and get counts
String sql = " COUNT(*) AS count,\n "
+ (account != null ? "" : " account_folder.account,\n")
+ " account_folder.folder\n"
+ "FROM (\n"
+ accountFolderQuery
+ "\n) AS account_folder\n"
+ "GROUP BY \n"
+ (account != null ? "" : " account_folder.account,\n")
+ " account_folder.folder";
String accountLikeStr = (account == null)
? null
: "%/%/" + likeEscape(account, ESCAPE_CHAR) + "/%";
return Pair.of(sql, accountLikeStr);
}
/**
* Returns the accounts and their counts in the current data source if a
* data source id is provided or all accounts if data source id is null.
*
* @param dataSourceId The data source id or null for no data source filter.
*
* @return The results.
*
* @throws ExecutionException
*/
public TreeResultsDTO<EmailSearchParams> getEmailCounts(Long dataSourceId, String account) throws ExecutionException {
// track indeterminate types by key (account if account is null, account folders if account parameter is non-null)
Set<String> indeterminateTypes = this.emailCounts.getEnqueued().stream()
.filter(evt -> (dataSourceId == null || Objects.equals(evt.getDataSourceId(), dataSourceId))
&& (account == null || account.equals(evt.getAccount())))
.map(evt -> account == null ? evt.getAccount() : evt.getFolder())
.collect(Collectors.toSet());
String query = null;
try {
SleuthkitCase skCase = getCase();
Pair<String, String> sqlAndLike = getAccountFolderSql(skCase.getDatabaseType(), account, dataSourceId);
query = sqlAndLike.getLeft();
String accountLikeStr = sqlAndLike.getRight();
try (CaseDbPreparedStatement preparedStatement = skCase.getCaseDbAccessManager().prepareSelect(query)) {
int paramIdx = 0;
if (account != null) {
preparedStatement.setString(++paramIdx, accountLikeStr);
}
if (dataSourceId != null) {
preparedStatement.setLong(++paramIdx, dataSourceId);
}
// query for data
List<EmailCountsData> accumulatedData = new ArrayList<>();
skCase.getCaseDbAccessManager().select(preparedStatement, (resultSet) -> {
accumulatedData.addAll(processCountsResultSet(resultSet, account));
});
// create tree data from that
List<TreeResultsDTO.TreeItemDTO<EmailSearchParams>> emailParams = accumulatedData.stream()
.map(entry -> {
TreeDisplayCount treeDisplayCount = indeterminateTypes.contains(entry.getKey())
? TreeDisplayCount.INDETERMINATE
: TreeResultsDTO.TreeDisplayCount.getDeterminate(entry.getCount());
return createEmailTreeItem(entry.getAccount(), entry.getFolder(), entry.getDisplayName(), dataSourceId, treeDisplayCount);
})
.sorted((a,b) -> {
boolean keyADown = StringUtils.isBlank((account == null ? a.getSearchParams().getAccount() : a.getSearchParams().getFolder()));
boolean keyBDown = StringUtils.isBlank((account == null ? b.getSearchParams().getAccount() : b.getSearchParams().getFolder()));
if (keyADown != keyBDown) {
return Boolean.compare(keyADown, keyBDown);
} else {
return a.getDisplayName().compareToIgnoreCase(b.getDisplayName());
}
})
.collect(Collectors.toList());
// return results
return new TreeResultsDTO<>(emailParams);
}
} catch (SQLException | NoCurrentCaseException | TskCoreException ex) {
throw new ExecutionException(
MessageFormat.format("An error occurred while fetching email counts for account: {0} and sql: \n{1}",
account == null ? "<null>" : account,
query == null ? "<null>" : query),
ex);
}
}
/**
* Processes a result querying for email counts.
*
* @param resultSet The result set.
* @param account The account for which results apply. If null, email
* counts data is returned for an account level.
*
* @return The email counts data.
*/
private List<EmailCountsData> processCountsResultSet(ResultSet resultSet, String account) {
try {
if (account == null) {
Map<String, Set<String>> accountFolders = new HashMap<>();
Map<String, Long> counts = new HashMap<>();
while (resultSet.next()) {
long count = resultSet.getLong("count");
String resultFolder = resultSet.getString("folder");
String resultAccount = resultSet.getString("account");
if (StringUtils.isBlank(resultFolder) || StringUtils.isBlank(resultAccount)) {
resultFolder = "";
resultAccount = "";
}
counts.compute(resultAccount, (k, v) -> v == null ? count : v + count);
accountFolders
.computeIfAbsent(resultAccount, (k) -> new HashSet<>())
.add(resultFolder);
}
return counts.entrySet().stream()
.map(e -> {
String thisAccount = e.getKey();
String displayName = getAccountDisplayName(e.getKey(), accountFolders.get(e.getKey()));
Long count = e.getValue();
return new EmailCountsData(thisAccount, null, thisAccount, displayName, count);
})
.collect(Collectors.toList());
} else {
Map<String, Long> counts = new HashMap<>();
while (resultSet.next()) {
long count = resultSet.getLong("count");
String resultFolder = resultSet.getString("folder");
if (StringUtils.isBlank(resultFolder) || StringUtils.isBlank(account)) {
resultFolder = "";
}
counts.compute(resultFolder, (k, v) -> v == null ? count : v + count);
}
return counts.entrySet().stream()
.map(e -> new EmailCountsData(account, e.getKey(), e.getKey(), getFolderDisplayName(e.getKey()), e.getValue()))
.collect(Collectors.toList());
}
} catch (SQLException ex) {
logger.log(Level.WARNING, "An error occurred while fetching artifact type counts.", ex);
return Collections.emptyList();
}
}
@Override
void clearCaches() {
this.searchParamsCache.invalidateAll();
this.handleIngestComplete();
}
@Override
Set<? extends DAOEvent> handleIngestComplete() {
return SubDAOUtils.getIngestCompleteEvents(
this.emailCounts,
(daoEvt, count) -> createEmailTreeItem(daoEvt.getAccount(), daoEvt.getFolder(), daoEvt.getFolder(), daoEvt.getDataSourceId(), count)
);
}
@Override
Set<TreeEvent> shouldRefreshTree() {
return SubDAOUtils.getRefreshEvents(
this.emailCounts,
(daoEvt, count) -> createEmailTreeItem(daoEvt.getAccount(), daoEvt.getFolder(), daoEvt.getFolder(), daoEvt.getDataSourceId(), count)
);
}
@Override
Set<DAOEvent> processEvent(PropertyChangeEvent evt) {
// get a grouping of artifacts mapping the artifact type id to data source id.
ModuleDataEvent dataEvt = DAOEventUtils.getModuelDataFromArtifactEvent(evt);
if (dataEvt == null) {
return Collections.emptySet();
}
// maps email account => folder => data source id
Map<String, Map<String, Set<Long>>> emailMap = new HashMap<>();
for (BlackboardArtifact art : dataEvt.getArtifacts()) {
try {
if (art.getType().getTypeID() == BlackboardArtifact.Type.TSK_EMAIL_MSG.getTypeID()) {
Pair<String, String> accountFolder = getAccountAndFolder(art);
emailMap
.computeIfAbsent(accountFolder.getLeft(), (k) -> new HashMap<>())
.computeIfAbsent(accountFolder.getRight(), (k) -> new HashSet<>())
.add(art.getDataSourceObjectID());
}
} catch (TskCoreException ex) {
logger.log(Level.WARNING, "Unable to fetch email message info for: " + art.getId(), ex);
}
}
// don't do anything else if no relevant events
if (emailMap.isEmpty()) {
return Collections.emptySet();
}
SubDAOUtils.invalidateKeys(this.searchParamsCache, (searchParams) -> {
Map<String, Set<Long>> folders = emailMap.get(searchParams.getAccount());
if (folders == null) {
return false;
}
Set<Long> dsIds = folders.get(searchParams.getFolder());
if (dsIds == null) {
return false;
}
return searchParams.getDataSourceId() == null || dsIds.contains(searchParams.getDataSourceId());
});
List<EmailEvent> emailEvents = new ArrayList<>();
for (Entry<String, Map<String, Set<Long>>> accountEntry : emailMap.entrySet()) {
String acct = accountEntry.getKey();
for (Entry<String, Set<Long>> folderEntry : accountEntry.getValue().entrySet()) {
String folder = folderEntry.getKey();
for (Long dsObjId : folderEntry.getValue()) {
emailEvents.add(new EmailEvent(dsObjId, acct, folder));
}
}
}
Stream<TreeEvent> treeEvents = this.emailCounts.enqueueAll(emailEvents).stream()
.map(daoEvt -> new TreeEvent(createEmailTreeItem(daoEvt.getAccount(), daoEvt.getFolder(), daoEvt.getFolder(),
daoEvt.getDataSourceId(), TreeResultsDTO.TreeDisplayCount.INDETERMINATE), false));
return Stream.of(emailEvents.stream(), treeEvents)
.flatMap(s -> s)
.collect(Collectors.toSet());
}
/**
* Returns true if the dao event could update the data stored in the
* parameters.
*
* @param parameters The parameters.
* @param evt The event.
*
* @return True if event invalidates parameters.
*/
private boolean isEmailInvalidating(EmailSearchParams parameters, DAOEvent evt) {
if (evt instanceof EmailEvent) {
EmailEvent emailEvt = (EmailEvent) evt;
return (Objects.equals(parameters.getAccount(), emailEvt.getAccount())
&& Objects.equals(parameters.getFolder(), emailEvt.getFolder())
&& (parameters.getDataSourceId() == null || Objects.equals(parameters.getDataSourceId(), emailEvt.getDataSourceId())));
} else {
return false;
}
}
/**
* Holds data for email counts.
*/
private static final class EmailCountsData {
private final String displayName;
private final String account;
private final String folder;
private final String key;
private final Long count;
/**
* Main constructor.
*
* @param account The relevant email account.
* @param folder The relevant email folder.
* @param key The key when querying for what should be
* indeterminate folders (account if no account
* parameter; otherwise, folder).
* @param displayName The display name.
* @param count
*/
public EmailCountsData(String account, String folder, String key, String displayName, Long count) {
this.displayName = displayName;
this.account = account;
this.folder = folder;
this.key = key;
this.count = count;
}
public String getDisplayName() {
return displayName;
}
public String getAccount() {
return account;
}
public String getFolder() {
return folder;
}
public String getKey() {
return key;
}
public Long getCount() {
return count;
}
}
/**
* Handles fetching and paging of data for communication accounts.
*/
public static class EmailFetcher extends DAOFetcher<EmailSearchParams> {
/**
* Main constructor.
*
* @param params Parameters to handle fetching of data.
*/
public EmailFetcher(EmailSearchParams params) {
super(params);
}
protected EmailsDAO getDAO() {
return MainDAO.getInstance().getEmailsDAO();
}
@Override
public SearchResultsDTO getSearchResults(int pageSize, int pageIdx) throws ExecutionException {
return getDAO().getEmailMessages(this.getParameters(), pageIdx * pageSize, (long) pageSize);
}
@Override
public boolean isRefreshRequired(DAOEvent evt) {
return getDAO().isEmailInvalidating(this.getParameters(), evt);
}
}
}

View File

@ -19,14 +19,17 @@
package org.sleuthkit.autopsy.mainui.datamodel;
import java.util.Objects;
import org.sleuthkit.autopsy.mainui.nodes.NodeSelectionInfo.ContentNodeSelectionInfo;
import org.sleuthkit.autopsy.mainui.nodes.ChildNodeSelectionInfo;
/**
* Key for content object in order to retrieve data from DAO.
*/
public class FileSystemContentSearchParam implements ContentNodeSelectionInfo {
public class FileSystemContentSearchParam {
private static final String TYPE_ID = "FILE_SYSTEM_CONTENT";
// This param is can change, is not used as part of the search query and
// therefore is not included in the equals and hashcode methods.
private ChildNodeSelectionInfo childNodeSelectionInfo;
/**
* @return The type id for this search parameter.
@ -36,10 +39,6 @@ public class FileSystemContentSearchParam implements ContentNodeSelectionInfo {
}
private final Long contentObjectId;
// This param is can change, is not used as part of the search query and
// therefore is not included in the equals and hashcode methods.
private Long childContentToSelect;
public FileSystemContentSearchParam(Long contentObjectId) {
this.contentObjectId = contentObjectId;
@ -49,14 +48,12 @@ public class FileSystemContentSearchParam implements ContentNodeSelectionInfo {
return contentObjectId;
}
@Override
public void setChildIdToSelect(Long content) {
childContentToSelect = content;
public ChildNodeSelectionInfo getNodeSelectionInfo() {
return childNodeSelectionInfo;
}
@Override
public Long getChildIdToSelect() {
return childContentToSelect;
public void setNodeSelectionInfo(ChildNodeSelectionInfo info) {
childNodeSelectionInfo = info;
}
@Override

View File

@ -169,6 +169,7 @@ public class MainDAO extends AbstractDAO {
private final TagsDAO tagsDAO = TagsDAO.getInstance();
private final OsAccountsDAO osAccountsDAO = OsAccountsDAO.getInstance();
private final CommAccountsDAO commAccountsDAO = CommAccountsDAO.getInstance();
private final EmailsDAO emailsDAO = EmailsDAO.getInstance();
// NOTE: whenever adding a new sub-dao, it should be added to this list for event updates.
private final List<AbstractDAO> allDAOs = ImmutableList.of(
@ -178,7 +179,8 @@ public class MainDAO extends AbstractDAO {
fileSystemDAO,
tagsDAO,
osAccountsDAO,
commAccountsDAO);
commAccountsDAO,
emailsDAO);
/**
* Registers listeners with autopsy event publishers and starts internal
@ -241,6 +243,10 @@ public class MainDAO extends AbstractDAO {
public CommAccountsDAO getCommAccountsDAO() {
return commAccountsDAO;
}
public EmailsDAO getEmailsDAO() {
return emailsDAO;
}
public PropertyChangeManager getResultEventsManager() {
return this.resultEventsManager;

View File

@ -24,6 +24,7 @@ import com.google.common.cache.CacheBuilder;
import com.google.common.collect.ImmutableSet;
import java.beans.PropertyChangeEvent;
import java.sql.SQLException;
import java.text.MessageFormat;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
@ -37,7 +38,6 @@ import java.util.Objects;
import java.util.Set;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.TimeUnit;
import java.util.function.Predicate;
import java.util.logging.Level;
import java.util.logging.Logger;
import java.util.stream.Collectors;
@ -52,6 +52,7 @@ import static org.sleuthkit.autopsy.core.UserPreferences.hideSlackFilesInViewsTr
import org.sleuthkit.autopsy.mainui.datamodel.TreeResultsDTO.TreeDisplayCount;
import org.sleuthkit.autopsy.mainui.datamodel.TreeResultsDTO.TreeItemDTO;
import org.sleuthkit.autopsy.mainui.datamodel.events.DAOEventUtils;
import org.sleuthkit.autopsy.mainui.datamodel.events.DeletedContentEvent;
import org.sleuthkit.autopsy.mainui.datamodel.events.FileTypeExtensionsEvent;
import org.sleuthkit.autopsy.mainui.datamodel.events.FileTypeMimeEvent;
import org.sleuthkit.autopsy.mainui.datamodel.events.FileTypeSizeEvent;
@ -63,6 +64,10 @@ import org.sleuthkit.datamodel.CaseDbAccessManager.CaseDbPreparedStatement;
import org.sleuthkit.datamodel.SleuthkitCase;
import org.sleuthkit.datamodel.TskCoreException;
import org.sleuthkit.datamodel.TskData;
import org.sleuthkit.datamodel.TskData.FileKnown;
import org.sleuthkit.datamodel.TskData.TSK_DB_FILES_TYPE_ENUM;
import org.sleuthkit.datamodel.TskData.TSK_FS_META_FLAG_ENUM;
import org.sleuthkit.datamodel.TskData.TSK_FS_NAME_FLAG_ENUM;
import org.sleuthkit.datamodel.TskData.TSK_FS_NAME_TYPE_ENUM;
/**
@ -137,6 +142,29 @@ public class ViewsDAO extends AbstractDAO {
return searchParamsCache.get(searchParams, () -> fetchSizeSearchResultsDTOs(key.getSizeFilter(), key.getDataSourceId(), startItem, maxCount));
}
/**
* Returns search results for the given deleted content search params.
*
* @param params The deleted content search params.
* @param startItem The starting item to start returning at.
* @param maxCount The maximum number of items to return.
*
* @return The search results.
*
* @throws ExecutionException
* @throws IllegalArgumentException
*/
public SearchResultsDTO getDeletedContent(DeletedContentSearchParams params, long startItem, Long maxCount) throws ExecutionException, IllegalArgumentException {
if (params.getFilter() == null) {
throw new IllegalArgumentException("Must have non-null filter");
} else if (params.getDataSourceId() != null && params.getDataSourceId() <= 0) {
throw new IllegalArgumentException("Data source id must be greater than 0 or null");
}
SearchParams<Object> searchParams = new SearchParams<>(params, startItem, maxCount);
return searchParamsCache.get(searchParams, () -> fetchDeletedSearchResultsDTOs(params.getFilter(), params.getDataSourceId(), startItem, maxCount));
}
private boolean isFilesByExtInvalidating(FileTypeExtensionsSearchParams key, DAOEvent eventData) {
if (!(eventData instanceof FileTypeExtensionsEvent)) {
return false;
@ -167,6 +195,17 @@ public class ViewsDAO extends AbstractDAO {
&& (key.getDataSourceId() == null || sizeEvt.getDataSourceId() == null || Objects.equals(key.getDataSourceId(), sizeEvt.getDataSourceId()));
}
private boolean isDeletedContentInvalidating(DeletedContentSearchParams params, DAOEvent eventData) {
if (!(eventData instanceof DeletedContentEvent)) {
return false;
}
DeletedContentEvent deletedContentEvt = (DeletedContentEvent) eventData;
return (deletedContentEvt.getFilter() == null || deletedContentEvt.getFilter().equals(params.getFilter()))
&& (params.getDataSourceId() == null || deletedContentEvt.getDataSourceId() == null
|| Objects.equals(params.getDataSourceId(), deletedContentEvt.getDataSourceId()));
}
/**
* Returns a sql 'and' clause to filter by data source id if one is present.
*
@ -202,7 +241,7 @@ public class ViewsDAO extends AbstractDAO {
* hide known files or returns empty string otherwise.
*/
private String getHideKnownAndClause() {
return (hideKnownFilesInViewsTree() ? (" AND (known IS NULL OR known <> " + TskData.FileKnown.KNOWN.getFileKnownValue() + ") ") : "");
return (hideKnownFilesInViewsTree() ? (" AND (known IS NULL OR known <> " + FileKnown.KNOWN.getFileKnownValue() + ") ") : "");
}
/**
@ -210,7 +249,7 @@ public class ViewsDAO extends AbstractDAO {
* is regular.
*/
private String getRegDirTypeClause() {
return "(dir_type = " + TskData.TSK_FS_NAME_TYPE_ENUM.REG.getValue() + ")";
return "(dir_type = " + TSK_FS_NAME_TYPE_ENUM.REG.getValue() + ")";
}
/**
@ -244,18 +283,33 @@ public class ViewsDAO extends AbstractDAO {
* @return The TSK_DB_FILES_TYPE_ENUm values allowed for mime type view
* items.
*/
private Set<TskData.TSK_DB_FILES_TYPE_ENUM> getMimeDbFilesTypes() {
private Set<TSK_DB_FILES_TYPE_ENUM> getMimeDbFilesTypes() {
return Stream.of(
TskData.TSK_DB_FILES_TYPE_ENUM.FS,
TskData.TSK_DB_FILES_TYPE_ENUM.CARVED,
TskData.TSK_DB_FILES_TYPE_ENUM.DERIVED,
TskData.TSK_DB_FILES_TYPE_ENUM.LAYOUT_FILE,
TskData.TSK_DB_FILES_TYPE_ENUM.LOCAL,
(hideSlackFilesInViewsTree() ? null : (TskData.TSK_DB_FILES_TYPE_ENUM.SLACK)))
TSK_DB_FILES_TYPE_ENUM.FS,
TSK_DB_FILES_TYPE_ENUM.CARVED,
TSK_DB_FILES_TYPE_ENUM.DERIVED,
TSK_DB_FILES_TYPE_ENUM.LAYOUT_FILE,
TSK_DB_FILES_TYPE_ENUM.LOCAL,
(hideSlackFilesInViewsTree() ? null : (TSK_DB_FILES_TYPE_ENUM.SLACK)))
.filter(ordinal -> ordinal != null)
.collect(Collectors.toSet());
}
/**
* Returns a statement to be proceeded with 'where' or 'and' that will
* filter results to the provided filter and data source id (if non null).
*
* @param filter The deleted content filter.
* @param dataSourceId The data source id or null if no data source
* filtering is to occur.
*
* @return The sql statement to be proceeded with 'and' or 'where'.
*/
private String getDeletedContentWhereStatement(DeletedContentFilter filter, Long dataSourceId) {
String whereClause = getDeletedContentClause(filter) + getDataSourceAndClause(dataSourceId);
return whereClause;
}
/**
* Returns a statement to be proceeded with 'where' or 'and' that will
* filter out results that should not be viewed in mime types view.
@ -301,6 +355,40 @@ public class ViewsDAO extends AbstractDAO {
: "(size >= " + filter.getMinBound() + " AND size < " + filter.getMaxBound() + ")";
}
/**
* Returns clause to be proceeded with 'where' or 'and' to filter deleted
* content.
*
* @param filter The deleted content filter.
*
* @return The clause to be proceeded with 'where' or 'and'.
*/
private static String getDeletedContentClause(DeletedContentFilter filter) throws IllegalArgumentException {
switch (filter) {
case FS_DELETED_FILTER:
return "dir_flags = " + TSK_FS_NAME_FLAG_ENUM.UNALLOC.getValue() //NON-NLS
+ " AND meta_flags != " + TSK_FS_META_FLAG_ENUM.ORPHAN.getValue() //NON-NLS
+ " AND type = " + TSK_DB_FILES_TYPE_ENUM.FS.getFileType(); //NON-NLS
case ALL_DELETED_FILTER:
return " ( "
+ "( "
+ "(dir_flags = " + TSK_FS_NAME_FLAG_ENUM.UNALLOC.getValue() //NON-NLS
+ " OR " //NON-NLS
+ "meta_flags = " + TSK_FS_META_FLAG_ENUM.ORPHAN.getValue() //NON-NLS
+ ")"
+ " AND type = " + TSK_DB_FILES_TYPE_ENUM.FS.getFileType() //NON-NLS
+ " )"
+ " OR type = " + TSK_DB_FILES_TYPE_ENUM.CARVED.getFileType() //NON-NLS
+ " OR (dir_flags = " + TSK_FS_NAME_FLAG_ENUM.UNALLOC.getValue()
+ " AND type = " + TSK_DB_FILES_TYPE_ENUM.LAYOUT_FILE.getFileType() + " )"
+ " )";
default:
throw new IllegalArgumentException(MessageFormat.format("Unsupported filter type to get deleted content: {0}", filter)); //NON-NLS
}
}
/**
* The filter for all files to remove those that should never be seen in the
* file size views.
@ -309,7 +397,7 @@ public class ViewsDAO extends AbstractDAO {
*/
private String getBaseFileSizeFilter() {
// Ignore unallocated block files.
return "(type != " + TskData.TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS.getFileType() + ")" + getHideKnownAndClause();
return "(type != " + TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS.getFileType() + ")" + getHideKnownAndClause();
}
/**
@ -448,6 +536,77 @@ public class ViewsDAO extends AbstractDAO {
return new TreeResultsDTO<>(treeList);
}
/**
* Returns counts for deleted content categories.
*
* @param dataSourceId The data source object id or null if no data source
* filtering should occur.
*
* @return The results.
*
* @throws IllegalArgumentException
* @throws ExecutionException
*/
public TreeResultsDTO<DeletedContentSearchParams> getDeletedContentCounts(Long dataSourceId) throws IllegalArgumentException, ExecutionException {
Set<DeletedContentFilter> indeterminateFilters = new HashSet<>();
for (DAOEvent evt : this.treeCounts.getEnqueued()) {
if (evt instanceof DeletedContentEvent) {
DeletedContentEvent deletedEvt = (DeletedContentEvent) evt;
if (dataSourceId == null || deletedEvt.getDataSourceId() == null || Objects.equals(deletedEvt.getDataSourceId(), dataSourceId)) {
if (deletedEvt.getFilter() == null) {
// if null filter, indicates full refresh and all file sizes need refresh.
indeterminateFilters.addAll(Arrays.asList(DeletedContentFilter.values()));
break;
} else {
indeterminateFilters.add(deletedEvt.getFilter());
}
}
}
}
String queryStr = Stream.of(DeletedContentFilter.values())
.map((filter) -> {
String clause = getDeletedContentClause(filter);
return MessageFormat.format(" (SELECT COUNT(*) FROM tsk_files WHERE {0}) AS {1}", clause, filter.name());
})
.collect(Collectors.joining(", \n"));
try {
SleuthkitCase skCase = getCase();
List<TreeItemDTO<DeletedContentSearchParams>> treeList = new ArrayList<>();
skCase.getCaseDbAccessManager().select(queryStr, (resultSet) -> {
try {
if (resultSet.next()) {
for (DeletedContentFilter filter : DeletedContentFilter.values()) {
long count = resultSet.getLong(filter.name());
TreeDisplayCount displayCount = indeterminateFilters.contains(filter)
? TreeDisplayCount.INDETERMINATE
: TreeDisplayCount.getDeterminate(count);
treeList.add(createDeletedContentTreeItem(filter, dataSourceId, displayCount));
}
}
} catch (SQLException ex) {
logger.log(Level.WARNING, "An error occurred while fetching file type counts.", ex);
}
});
return new TreeResultsDTO<>(treeList);
} catch (NoCurrentCaseException | TskCoreException ex) {
throw new ExecutionException("An error occurred while fetching file counts with query:\n" + queryStr, ex);
}
}
private static TreeItemDTO<DeletedContentSearchParams> createDeletedContentTreeItem(DeletedContentFilter filter, Long dataSourceId, TreeDisplayCount displayCount) {
return new TreeItemDTO<>(
"DELETED_CONTENT",
new DeletedContentSearchParams(filter, dataSourceId),
filter,
filter == null ? "" : filter.getDisplayName(),
displayCount);
}
/**
* Creates a size tree item.
*
@ -659,7 +818,7 @@ public class ViewsDAO extends AbstractDAO {
+ "(SELECT \n"
+ switchStatement
+ "FROM tsk_files \n"
+ (baseWhereClauses != null ? ("WHERE " + baseWhereClauses) : "") + ") res \n"
+ (StringUtils.isNotBlank(baseWhereClauses) ? ("WHERE " + baseWhereClauses) : "") + ") res \n"
+ "WHERE res.type_id >= 0 \n"
+ "GROUP BY res.type_id";
@ -694,6 +853,11 @@ public class ViewsDAO extends AbstractDAO {
return typeCounts;
}
private SearchResultsDTO fetchDeletedSearchResultsDTOs(DeletedContentFilter filter, Long dataSourceId, long startItem, Long maxResultCount) throws NoCurrentCaseException, TskCoreException {
String whereStatement = getDeletedContentWhereStatement(filter, dataSourceId);
return fetchFileViewFiles(whereStatement, filter.getDisplayName(), startItem, maxResultCount);
}
private SearchResultsDTO fetchExtensionSearchResultsDTOs(FileExtSearchFilter filter, Long dataSourceId, long startItem, Long maxResultCount) throws NoCurrentCaseException, TskCoreException {
String whereStatement = getFileExtensionWhereStatement(filter, dataSourceId);
return fetchFileViewFiles(whereStatement, filter.getDisplayName(), startItem, maxResultCount);
@ -743,7 +907,7 @@ public class ViewsDAO extends AbstractDAO {
file.getName(),
file.getNameExtension(),
MediaTypeUtils.getExtensionMediaType(file.getNameExtension()),
file.isDirNameFlagSet(TskData.TSK_FS_NAME_FLAG_ENUM.ALLOC),
file.isDirNameFlagSet(TSK_FS_NAME_FLAG_ENUM.ALLOC),
file.getType(),
cellValues));
}
@ -770,6 +934,9 @@ public class ViewsDAO extends AbstractDAO {
} else if (daoEvent instanceof FileTypeSizeEvent) {
FileTypeSizeEvent sizeEvt = (FileTypeSizeEvent) daoEvent;
return createSizeTreeItem(sizeEvt.getSizeFilter(), sizeEvt.getDataSourceId(), count);
} else if (daoEvent instanceof DeletedContentEvent) {
DeletedContentEvent sizeEvt = (DeletedContentEvent) daoEvent;
return createDeletedContentTreeItem(sizeEvt.getFilter(), sizeEvt.getDataSourceId(), count);
} else {
return null;
}
@ -784,7 +951,7 @@ public class ViewsDAO extends AbstractDAO {
@Override
Set<? extends DAOEvent> handleIngestComplete() {
SubDAOUtils.invalidateKeys(this.searchParamsCache,
(searchParams) -> searchParamsMatchEvent(null, null, null, null, true, searchParams));
(searchParams) -> searchParamsMatchEvent(null, null, null, null, null, true, searchParams));
Set<? extends DAOEvent> treeEvts = SubDAOUtils.getIngestCompleteEvents(this.treeCounts,
(daoEvt, count) -> createTreeItem(daoEvt, count));
@ -811,6 +978,7 @@ public class ViewsDAO extends AbstractDAO {
Long dsId = null;
boolean dataSourceAdded = false;
Set<FileExtSearchFilter> evtExtFilters = null;
Set<DeletedContentFilter> deletedContentFilters = null;
String evtMimeType = null;
FileSizeFilter evtFileSize = null;
@ -832,6 +1000,8 @@ public class ViewsDAO extends AbstractDAO {
evtExtFilters = EXTENSION_FILTER_MAP.getOrDefault("." + af.getNameExtension(), Collections.emptySet());
}
deletedContentFilters = getMatchingDeletedContentFilters(af);
// create a mime type mapping if mime type present
if (!StringUtils.isBlank(af.getMIMEType()) && TSK_FS_NAME_TYPE_ENUM.REG.equals(af.getDirType()) && getMimeDbFilesTypes().contains(af.getType())) {
evtMimeType = af.getMIMEType();
@ -845,36 +1015,39 @@ public class ViewsDAO extends AbstractDAO {
.orElse(null);
}
if (evtExtFilters == null || evtExtFilters.isEmpty() && evtMimeType == null && evtFileSize == null) {
if (evtExtFilters == null || evtExtFilters.isEmpty() && deletedContentFilters.isEmpty() && evtMimeType == null && evtFileSize == null) {
return Collections.emptySet();
}
}
return invalidateAndReturnEvents(evtExtFilters, evtMimeType, evtFileSize, dsId, dataSourceAdded);
return invalidateAndReturnEvents(evtExtFilters, evtMimeType, evtFileSize, deletedContentFilters, dsId, dataSourceAdded);
}
/**
* Handles invalidating caches and returning events based on digest.
*
* @param evtExtFilters The file extension filters or empty set.
* @param evtMimeType The mime type or null.
* @param evtFileSize The file size filter or null.
* @param dsId The data source id or null.
* @param dataSourceAdded Whether or not this is a data source added event.
* @param evtExtFilters The file extension filters or empty set.
* @param evtMimeType The mime type or null.
* @param evtFileSize The file size filter or null.
* @param deletedContentFilters The set of affected deleted content filters.
* @param dsId The data source id or null.
* @param dataSourceAdded Whether or not this is a data source added
* event.
*
* @return The set of dao events to be fired.
*/
private Set<DAOEvent> invalidateAndReturnEvents(Set<FileExtSearchFilter> evtExtFilters, String evtMimeType,
FileSizeFilter evtFileSize, Long dsId, boolean dataSourceAdded) {
FileSizeFilter evtFileSize, Set<DeletedContentFilter> deletedContentFilters, Long dsId, boolean dataSourceAdded) {
SubDAOUtils.invalidateKeys(this.searchParamsCache,
(Predicate<Object>) (searchParams) -> searchParamsMatchEvent(evtExtFilters, evtMimeType,
evtFileSize, dsId, dataSourceAdded, searchParams));
(searchParams) -> searchParamsMatchEvent(evtExtFilters, deletedContentFilters,
evtMimeType, evtFileSize, dsId, dataSourceAdded, searchParams));
return getDAOEvents(evtExtFilters, evtMimeType, evtFileSize, dsId, dataSourceAdded);
return getDAOEvents(evtExtFilters, deletedContentFilters, evtMimeType, evtFileSize, dsId, dataSourceAdded);
}
private boolean searchParamsMatchEvent(Set<FileExtSearchFilter> evtExtFilters,
Set<DeletedContentFilter> deletedContentFilters,
String evtMimeType,
FileSizeFilter evtFileSize,
Long dsId,
@ -899,6 +1072,10 @@ public class ViewsDAO extends AbstractDAO {
return (dataSourceAdded || Objects.equals(sizeParams.getSizeFilter(), evtFileSize))
// and data source is either null or they are equal data source ids
&& (sizeParams.getDataSourceId() == null || dsId == null || Objects.equals(sizeParams.getDataSourceId(), dsId));
} else if (searchParams instanceof DeletedContentSearchParams) {
DeletedContentSearchParams deletedParams = (DeletedContentSearchParams) searchParams;
return (dataSourceAdded || (deletedContentFilters != null && deletedContentFilters.contains(deletedParams.getFilter())))
&& (deletedParams.getDataSourceId() == null || dsId == null || Objects.equals(deletedParams.getDataSourceId(), dsId));
} else {
return false;
}
@ -908,21 +1085,34 @@ public class ViewsDAO extends AbstractDAO {
* Clears relevant cache entries from cache based on digest of autopsy
* events.
*
* @param extFilters The set of affected extension filters.
* @param mimeType The affected mime type or null.
* @param sizeFilter The affected size filter or null.
* @param dsId The file object id.
* @param dataSourceAdded A data source was added.
* @param extFilters The set of affected extension filters.
* @param deletedContentFilters The set of affected deleted content filters.
* @param mimeType The affected mime type or null.
* @param sizeFilter The affected size filter or null.
* @param dsId The file object id.
* @param dataSourceAdded A data source was added.
*
* @return The list of affected dao events.
*/
private Set<DAOEvent> getDAOEvents(Set<FileExtSearchFilter> extFilters, String mimeType, FileSizeFilter sizeFilter, Long dsId, boolean dataSourceAdded) {
private Set<DAOEvent> getDAOEvents(Set<FileExtSearchFilter> extFilters,
Set<DeletedContentFilter> deletedContentFilters,
String mimeType,
FileSizeFilter sizeFilter,
Long dsId,
boolean dataSourceAdded) {
List<DAOEvent> daoEvents = extFilters == null
? new ArrayList<>()
Stream<DAOEvent> extEvents = extFilters == null
? Stream.empty()
: extFilters.stream()
.map(extFilter -> new FileTypeExtensionsEvent(extFilter, dsId))
.collect(Collectors.toList());
.map(extFilter -> new FileTypeExtensionsEvent(extFilter, dsId));
Stream<DAOEvent> deletedEvents = deletedContentFilters == null
? Stream.empty()
: deletedContentFilters.stream()
.map(deletedFilter -> new DeletedContentEvent(deletedFilter, dsId));
List<DAOEvent> daoEvents = Stream.concat(extEvents, deletedEvents)
.collect(Collectors.toList());
if (mimeType != null) {
daoEvents.add(new FileTypeMimeEvent(mimeType, dsId));
@ -937,19 +1127,41 @@ public class ViewsDAO extends AbstractDAO {
.collect(Collectors.toList());
// data source added events are not necessarily fired before ingest completed/cancelled, so don't handle dataSourceAdded events with delay.
Set<DAOEvent> forceRefreshEvents = (dataSourceAdded)
Set<DAOEvent> forceRefreshEvents = (dataSourceAdded)
? getFileViewRefreshEvents(dsId)
: Collections.emptySet();
List<TreeEvent> forceRefreshTreeEvents = forceRefreshEvents.stream()
.map(evt -> new TreeEvent(createTreeItem(evt, TreeDisplayCount.UNSPECIFIED), true))
.collect(Collectors.toList());
return Stream.of(daoEvents, treeEvents, forceRefreshEvents, forceRefreshTreeEvents)
.flatMap(lst -> lst.stream())
.collect(Collectors.toSet());
}
private Set<DeletedContentFilter> getMatchingDeletedContentFilters(AbstractFile af) {
Set<DeletedContentFilter> toRet = new HashSet<>();
TSK_DB_FILES_TYPE_ENUM type = af.getType();
if (af.isDirNameFlagSet(TSK_FS_NAME_FLAG_ENUM.UNALLOC)
&& !af.isMetaFlagSet(TSK_FS_META_FLAG_ENUM.ORPHAN)
&& TSK_DB_FILES_TYPE_ENUM.FS.equals(type)) {
toRet.add(DeletedContentFilter.FS_DELETED_FILTER);
}
if ((((af.isDirNameFlagSet(TSK_FS_NAME_FLAG_ENUM.UNALLOC) || af.isMetaFlagSet(TSK_FS_META_FLAG_ENUM.ORPHAN)) && TSK_DB_FILES_TYPE_ENUM.FS.equals(type))
|| TSK_DB_FILES_TYPE_ENUM.CARVED.equals(type)
|| (af.isDirNameFlagSet(TSK_FS_NAME_FLAG_ENUM.UNALLOC) && TSK_DB_FILES_TYPE_ENUM.LAYOUT_FILE.equals(type)))) {
toRet.add(DeletedContentFilter.ALL_DELETED_FILTER);
}
return toRet;
}
/**
* Returns events for when a full refresh is required because module content
* events will not necessarily provide events for files (i.e. data source
@ -961,6 +1173,7 @@ public class ViewsDAO extends AbstractDAO {
*/
private Set<DAOEvent> getFileViewRefreshEvents(Long dataSourceId) {
return ImmutableSet.of(
new DeletedContentEvent(null, dataSourceId),
new FileTypeSizeEvent(null, dataSourceId),
new FileTypeExtensionsEvent(null, dataSourceId)
);
@ -1052,4 +1265,34 @@ public class ViewsDAO extends AbstractDAO {
return getDAO().isFilesBySizeInvalidating(this.getParameters(), evt);
}
}
/**
* Handles fetching and paging of data for deleted content.
*/
public static class DeletedFileFetcher extends DAOFetcher<DeletedContentSearchParams> {
/**
* Main constructor.
*
* @param params Parameters to handle fetching of data.
*/
public DeletedFileFetcher(DeletedContentSearchParams params) {
super(params);
}
protected ViewsDAO getDAO() {
return MainDAO.getInstance().getViewsDAO();
}
@Override
public SearchResultsDTO getSearchResults(int pageSize, int pageIdx) throws ExecutionException {
return getDAO().getDeletedContent(this.getParameters(), pageIdx * pageSize, (long) pageSize);
}
@Override
public boolean isRefreshRequired(DAOEvent evt) {
return getDAO().isDeletedContentInvalidating(this.getParameters(), evt);
}
}
}

View File

@ -0,0 +1,81 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.mainui.datamodel.events;
import java.util.Objects;
import org.sleuthkit.autopsy.mainui.datamodel.DeletedContentFilter;
/**
* An event to signal that deleted files have been added to the given case on
* the given data source.
*/
public class DeletedContentEvent implements DAOEvent {
private final DeletedContentFilter filter;
private final Long dataSourceId;
public DeletedContentEvent(DeletedContentFilter filter, Long dataSourceId) {
this.filter = filter;
this.dataSourceId = dataSourceId;
}
public DeletedContentFilter getFilter() {
return filter;
}
public Long getDataSourceId() {
return dataSourceId;
}
@Override
public int hashCode() {
int hash = 3;
hash = 41 * hash + Objects.hashCode(this.filter);
hash = 41 * hash + Objects.hashCode(this.dataSourceId);
return hash;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
final DeletedContentEvent other = (DeletedContentEvent) obj;
if (this.filter != other.filter) {
return false;
}
if (!Objects.equals(this.dataSourceId, other.dataSourceId)) {
return false;
}
return true;
}
@Override
public Type getType() {
return Type.RESULT;
}
}

View File

@ -0,0 +1,83 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.mainui.datamodel.events;
import java.util.Objects;
import org.sleuthkit.datamodel.BlackboardArtifact;
/**
* Event for new email messages.
*/
public class EmailEvent extends DataArtifactEvent {
private final String account;
private final String folder;
/**
* Main constructor.
*
* @param dataSourceId The data source id that the email message belongs to.
* @param account The email message account.
* @param folder The folder within that account of the email message.
*/
public EmailEvent(long dataSourceId, String account, String folder) {
super(BlackboardArtifact.Type.TSK_EMAIL_MSG, dataSourceId);
this.account = account;
this.folder = folder;
}
public String getAccount() {
return account;
}
public String getFolder() {
return folder;
}
@Override
public int hashCode() {
int hash = 7;
hash = 89 * hash + Objects.hashCode(this.account);
hash = 89 * hash + Objects.hashCode(this.folder);
return hash;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
final EmailEvent other = (EmailEvent) obj;
if (!Objects.equals(this.account, other.account)) {
return false;
}
if (!Objects.equals(this.folder, other.folder)) {
return false;
}
return true;
}
}

View File

@ -251,7 +251,7 @@ public class AnalysisResultTypeFactory extends TreeChildFactory<AnalysisResultSe
* A factory that shows all sets in keyword hits.
*/
@Messages({
"AnalysisResultTypeFactory_adHocName=Adhoc Results"
"AnalysisResultTypeFactory_adHocName=Ad Hoc Results"
})
static class KeywordSetFactory extends TreeSetFactory {

View File

@ -1,4 +1,4 @@
AnalysisResultTypeFactory_adHocName=Adhoc Results
AnalysisResultTypeFactory_adHocName=Ad Hoc Results
DataArtifactTypeFactory_AccountTypeParentNode_displayName=Communication Accounts
FileSystemFactory.FileSystemTreeNode.ExtractUnallocAction.text=Extract Unallocated Space to Single Files
FileSystemFactory.UnsupportedTreeNode.displayName=Unsupported Content
@ -8,4 +8,9 @@ SearchResultRootNode_createSheet_childCount_name=Child Count
SearchResultRootNode_createSheet_type_displayName=Name
SearchResultRootNode_createSheet_type_name=Name
SearchResultRootNode_noDesc=No Description
ViewsTypeFactory_DeletedParentNode_displayName=Deleted Files
ViewsTypeFactory_ExtensionParentNode_displayName=By Extension
ViewsTypeFactory_FileTypesParentNode_displayName=File Types
ViewsTypeFactory_MimeParentNode_displayName=By MIME Type
ViewsTypeFactory_SizeParentNode_displayName=File Size
VolumnNode_ExtractUnallocAction_text=Extract Unallocated Space to Single Files

View File

@ -19,31 +19,54 @@
package org.sleuthkit.autopsy.mainui.nodes;
import org.openide.nodes.Node;
import org.sleuthkit.datamodel.BlackboardArtifact;
import org.sleuthkit.datamodel.Content;
/**
* An interface for nodes that support the view selected file\directory.
*/
public interface NodeSelectionInfo {
public interface ChildNodeSelectionInfo {
/**
* Determine of the given node represents the child content to
* be selected.
*
* Determine of the given node represents the child content to be selected.
*
* @param node
*
* @return True if there is a match.
*
* @return True if there is a match.
*/
boolean matches(Node node);
public interface ContentNodeSelectionInfo extends NodeSelectionInfo{
void setChildIdToSelect(Long contentId);
Long getChildIdToSelect();
default boolean matches(Node node) {
public class ContentNodeSelectionInfo implements ChildNodeSelectionInfo {
private final Long contentId;
public ContentNodeSelectionInfo(Long contentId) {
this.contentId = contentId;
}
@Override
public boolean matches(Node node) {
Content content = node.getLookup().lookup(Content.class);
if (content != null && getChildIdToSelect() != null) {
return getChildIdToSelect().equals(content.getId());
if (content != null && contentId != null) {
return contentId.equals(content.getId());
}
return false;
}
}
public class BlackboardArtifactNodeSelectionInfo implements ChildNodeSelectionInfo {
private final long objId;
public BlackboardArtifactNodeSelectionInfo(BlackboardArtifact artifact) {
this.objId = artifact.getId();
}
@Override
public boolean matches(Node node) {
BlackboardArtifact nodeArtifact = node.getLookup().lookup(BlackboardArtifact.class);
if (nodeArtifact != null) {
return objId == nodeArtifact.getId();
}
return false;

View File

@ -28,6 +28,8 @@ import org.sleuthkit.autopsy.datamodel.accounts.Accounts;
import org.sleuthkit.autopsy.datamodel.utils.IconsUtil;
import org.sleuthkit.autopsy.mainui.datamodel.DataArtifactDAO;
import org.sleuthkit.autopsy.mainui.datamodel.DataArtifactSearchParam;
import org.sleuthkit.autopsy.mainui.datamodel.EmailSearchParams;
import org.sleuthkit.autopsy.mainui.datamodel.EmailsDAO;
import org.sleuthkit.autopsy.mainui.datamodel.MainDAO;
import org.sleuthkit.autopsy.mainui.datamodel.TreeResultsDTO;
import static org.sleuthkit.autopsy.mainui.nodes.TreeNode.getDefaultLookup;
@ -39,7 +41,7 @@ import org.sleuthkit.datamodel.BlackboardArtifact;
* Factory for displaying data artifact types in the tree.
*/
public class DataArtifactTypeFactory extends TreeChildFactory<DataArtifactSearchParam> {
private final Long dataSourceId;
/**
@ -51,30 +53,31 @@ public class DataArtifactTypeFactory extends TreeChildFactory<DataArtifactSearch
this.dataSourceId = dataSourceId;
}
@Override
protected TreeResultsDTO<? extends DataArtifactSearchParam> getChildResults() throws IllegalArgumentException, ExecutionException {
return MainDAO.getInstance().getDataArtifactsDAO().getDataArtifactCounts(dataSourceId);
}
@Override
protected TreeNode<DataArtifactSearchParam> createNewNode(TreeResultsDTO.TreeItemDTO<? extends DataArtifactSearchParam> rowData) {
if (rowData.getSearchParams().getArtifactType().getTypeID() == BlackboardArtifact.Type.TSK_ACCOUNT.getTypeID()) {
return new AccountTypeParentNode(rowData, this.dataSourceId);
return new AccountTypeParentNode(rowData);
} else if (rowData.getSearchParams().getArtifactType().getTypeID() == BlackboardArtifact.Type.TSK_EMAIL_MSG.getTypeID()) {
return new EmailTypeParentNode(rowData);
} else {
return new DataArtifactTypeTreeNode(rowData);
}
}
@Override
protected TreeItemDTO<DataArtifactSearchParam> getOrCreateRelevantChild(TreeEvent treeEvt) {
TreeItemDTO<DataArtifactSearchParam> originalTreeItem = super.getTypedTreeItem(treeEvt, DataArtifactSearchParam.class);
if (originalTreeItem != null
&& !DataArtifactDAO.getIgnoredTreeTypes().contains(originalTreeItem.getSearchParams().getArtifactType())
&& (this.dataSourceId == null || Objects.equals(this.dataSourceId, originalTreeItem.getSearchParams().getDataSourceId()))) {
DataArtifactSearchParam searchParam = originalTreeItem.getSearchParams();
return new TreeItemDTO<>(
DataArtifactSearchParam.getTypeId(),
@ -85,13 +88,13 @@ public class DataArtifactTypeFactory extends TreeChildFactory<DataArtifactSearch
}
return null;
}
@Override
public int compare(TreeItemDTO<? extends DataArtifactSearchParam> o1, TreeItemDTO<? extends DataArtifactSearchParam> o2) {
DataArtifactDAO dao = MainDAO.getInstance().getDataArtifactsDAO();
return dao.getDisplayName(o1.getSearchParams().getArtifactType()).compareToIgnoreCase(dao.getDisplayName(o2.getSearchParams().getArtifactType()));
}
private static String getIconPath(BlackboardArtifact.Type artType) {
String iconPath = IconsUtil.getIconFilePath(artType.getTypeID());
return iconPath != null && iconPath.charAt(0) == '/' ? iconPath.substring(1) : iconPath;
@ -101,19 +104,35 @@ public class DataArtifactTypeFactory extends TreeChildFactory<DataArtifactSearch
* Display name and count of a data artifact type in the tree.
*/
public static class DataArtifactTypeTreeNode extends TreeNode<DataArtifactSearchParam> {
public DataArtifactTypeTreeNode(TreeResultsDTO.TreeItemDTO<? extends DataArtifactSearchParam> itemData) {
super(itemData.getSearchParams().getArtifactType().getTypeName(),
getIconPath(itemData.getSearchParams().getArtifactType()),
itemData);
}
@Override
public void respondSelection(DataResultTopComponent dataResultPanel) {
this.getItemData().getSearchParams().setNodeSelectionInfo(getNodeSelectionInfo());
dataResultPanel.displayDataArtifact(this.getItemData().getSearchParams());
}
}
/**
* Display name and count of email messages in the tree.
*/
public static class EmailTypeParentNode extends TreeNode<DataArtifactSearchParam> {
public EmailTypeParentNode(TreeResultsDTO.TreeItemDTO<? extends DataArtifactSearchParam> itemData) {
super(itemData.getSearchParams().getArtifactType().getTypeName(),
getIconPath(itemData.getSearchParams().getArtifactType()),
itemData,
Children.create(new EmailAccountTypeFactory(itemData.getSearchParams().getDataSourceId()), true),
getDefaultLookup(itemData)
);
}
}
/**
* The account node that has nested children of account types.
*/
@ -146,27 +165,27 @@ public class DataArtifactTypeFactory extends TreeChildFactory<DataArtifactSearch
* @param dataSourceId The data source id to filter on or null if no
* data source filter.
*/
public AccountTypeParentNode(TreeResultsDTO.TreeItemDTO<? extends DataArtifactSearchParam> itemData, Long dataSourceId) {
public AccountTypeParentNode(TreeResultsDTO.TreeItemDTO<? extends DataArtifactSearchParam> itemData) {
super(itemData.getSearchParams().getArtifactType().getTypeName(),
getIconPath(itemData.getSearchParams().getArtifactType()),
createTitledData(itemData),
Children.create(new AccountTypeFactory(dataSourceId), true),
Children.create(new AccountTypeFactory(itemData.getSearchParams().getDataSourceId()), true),
getDefaultLookup(itemData)
);
}
@Override
protected void updateDisplayName(TreeItemDTO<? extends DataArtifactSearchParam> prevData, TreeItemDTO<? extends DataArtifactSearchParam> curData) {
super.updateDisplayName(prevData, createTitledData(curData));
}
}
/**
* Factory for displaying account types.
*/
static class AccountTypeFactory extends TreeChildFactory<CommAccountsSearchParams> {
private final Long dataSourceId;
/**
@ -179,32 +198,32 @@ public class DataArtifactTypeFactory extends TreeChildFactory<DataArtifactSearch
public AccountTypeFactory(Long dataSourceId) {
this.dataSourceId = dataSourceId;
}
@Override
protected TreeResultsDTO<? extends CommAccountsSearchParams> getChildResults() throws IllegalArgumentException, ExecutionException {
return MainDAO.getInstance().getCommAccountsDAO().getAccountsCounts(this.dataSourceId);
}
@Override
protected TreeNode<CommAccountsSearchParams> createNewNode(TreeResultsDTO.TreeItemDTO<? extends CommAccountsSearchParams> rowData) {
return new AccountTypeNode(rowData);
}
@Override
protected TreeItemDTO<? extends CommAccountsSearchParams> getOrCreateRelevantChild(TreeEvent treeEvt) {
TreeItemDTO<CommAccountsSearchParams> originalTreeItem = getTypedTreeItem(treeEvt, CommAccountsSearchParams.class);
if (originalTreeItem != null
&& (this.dataSourceId == null || Objects.equals(this.dataSourceId, originalTreeItem.getSearchParams().getDataSourceId()))) {
CommAccountsSearchParams searchParam = originalTreeItem.getSearchParams();
return TreeChildFactory.createTreeItemDTO(originalTreeItem,
new CommAccountsSearchParams(searchParam.getType(), this.dataSourceId));
}
return null;
}
@Override
public int compare(TreeItemDTO<? extends CommAccountsSearchParams> o1, TreeItemDTO<? extends CommAccountsSearchParams> o2) {
return o1.getSearchParams().getType().getDisplayName().compareToIgnoreCase(o2.getSearchParams().getType().getDisplayName());
@ -226,10 +245,186 @@ public class DataArtifactTypeFactory extends TreeChildFactory<DataArtifactSearch
Accounts.getIconFilePath(itemData.getSearchParams().getType()),
itemData);
}
@Override
public void respondSelection(DataResultTopComponent dataResultPanel) {
dataResultPanel.displayAccounts(super.getItemData().getSearchParams());
}
}
/**
* Factory for displaying account types.
*/
static class EmailAccountTypeFactory extends TreeChildFactory<EmailSearchParams> {
private final Long dataSourceId;
/**
* Main constructor.
*
* @param dataSourceId The data source object id for which the results
* should be filtered or null if no data source
* filtering.
*/
public EmailAccountTypeFactory(Long dataSourceId) {
this.dataSourceId = dataSourceId;
}
private EmailsDAO getDAO() {
return MainDAO.getInstance().getEmailsDAO();
}
@Override
protected TreeResultsDTO<? extends EmailSearchParams> getChildResults() throws IllegalArgumentException, ExecutionException {
return getDAO().getEmailCounts(dataSourceId, null);
}
@Override
protected TreeNode<EmailSearchParams> createNewNode(TreeResultsDTO.TreeItemDTO<? extends EmailSearchParams> rowData) {
return new EmailAccountTypeNode(rowData);
}
@Override
protected TreeItemDTO<? extends EmailSearchParams> getOrCreateRelevantChild(TreeEvent treeEvt) {
TreeItemDTO<EmailSearchParams> originalTreeItem = getTypedTreeItem(treeEvt, EmailSearchParams.class);
if (originalTreeItem != null
&& (this.dataSourceId == null || Objects.equals(this.dataSourceId, originalTreeItem.getSearchParams().getDataSourceId()))) {
EmailSearchParams originalSearchParam = originalTreeItem.getSearchParams();
return getDAO().createEmailTreeItem(
originalSearchParam.getAccount(),
null,
getDAO().getAccountDisplayName(originalSearchParam.getAccount(), null),
dataSourceId,
originalTreeItem.getDisplayCount());
}
return null;
}
@Override
public int compare(TreeItemDTO<? extends EmailSearchParams> o1, TreeItemDTO<? extends EmailSearchParams> o2) {
boolean firstDown = o1.getSearchParams().getAccount() == null;
boolean secondDown = o2.getSearchParams().getAccount() == null;
if (firstDown == secondDown) {
return o1.getDisplayName().compareToIgnoreCase(o2.getDisplayName());
} else {
return Boolean.compare(firstDown, secondDown);
}
}
}
/**
* A node representing a single account type in the tree.
*/
static class EmailAccountTypeNode extends TreeNode<EmailSearchParams> {
/**
* Main constructor.
*
* @param itemData The data to display.
*/
public EmailAccountTypeNode(TreeResultsDTO.TreeItemDTO<? extends EmailSearchParams> itemData) {
super(itemData.getSearchParams().getAccount(),
"org/sleuthkit/autopsy/images/account-icon-16.png",
itemData,
Children.create(new EmailFolderTypeFactory(itemData.getSearchParams().getAccount(), itemData.getSearchParams().getDataSourceId()), true),
getDefaultLookup(itemData)
);
}
}
/**
* Factory for displaying account types.
*/
static class EmailFolderTypeFactory extends TreeChildFactory<EmailSearchParams> {
private final String account;
private final Long dataSourceId;
/**
* Main constructor.
*
* @param account The email account for the factory.
* @param dataSourceId The data source object id for which the results
* should be filtered or null if no data source
* filtering.
*/
public EmailFolderTypeFactory(String account, Long dataSourceId) {
this.dataSourceId = dataSourceId;
this.account = account;
}
private EmailsDAO getDAO() {
return MainDAO.getInstance().getEmailsDAO();
}
@Override
protected TreeResultsDTO<? extends EmailSearchParams> getChildResults() throws IllegalArgumentException, ExecutionException {
return getDAO().getEmailCounts(dataSourceId, account);
}
@Override
protected TreeNode<EmailSearchParams> createNewNode(TreeResultsDTO.TreeItemDTO<? extends EmailSearchParams> rowData) {
return new EmailFolderTypeNode(rowData);
}
@Override
protected TreeItemDTO<? extends EmailSearchParams> getOrCreateRelevantChild(TreeEvent treeEvt) {
TreeItemDTO<EmailSearchParams> originalTreeItem = getTypedTreeItem(treeEvt, EmailSearchParams.class);
if (originalTreeItem != null
&& Objects.equals(this.account, originalTreeItem.getSearchParams().getAccount())
&& (this.dataSourceId == null || Objects.equals(this.dataSourceId, originalTreeItem.getSearchParams().getDataSourceId()))) {
EmailSearchParams originalSearchParam = originalTreeItem.getSearchParams();
return getDAO().createEmailTreeItem(
originalSearchParam.getAccount(),
originalSearchParam.getFolder(),
getDAO().getFolderDisplayName(originalSearchParam.getFolder()),
dataSourceId,
originalTreeItem.getDisplayCount());
}
return null;
}
@Override
public int compare(TreeItemDTO<? extends EmailSearchParams> o1, TreeItemDTO<? extends EmailSearchParams> o2) {
boolean firstDown = o1.getSearchParams().getFolder() == null;
boolean secondDown = o2.getSearchParams().getFolder() == null;
if (firstDown == secondDown) {
return o1.getSearchParams().getFolder().compareToIgnoreCase(o2.getSearchParams().getFolder());
} else {
return Boolean.compare(firstDown, secondDown);
}
}
}
/**
* A node representing a single account type in the tree.
*/
static class EmailFolderTypeNode extends TreeNode<EmailSearchParams> {
/**
* Main constructor.
*
* @param itemData The data to display.
*/
public EmailFolderTypeNode(TreeResultsDTO.TreeItemDTO<? extends EmailSearchParams> itemData) {
super(itemData.getSearchParams().getFolder(),
"org/sleuthkit/autopsy/images/folder-icon-16.png",
itemData);
}
@Override
public void respondSelection(DataResultTopComponent dataResultPanel) {
dataResultPanel.displayEmailMessages(super.getItemData().getSearchParams());
}
}
}

View File

@ -47,7 +47,6 @@ import static org.sleuthkit.autopsy.mainui.nodes.NodeIconUtil.CARVED_FILE;
import static org.sleuthkit.autopsy.mainui.nodes.NodeIconUtil.DELETED_FILE;
import static org.sleuthkit.autopsy.mainui.nodes.NodeIconUtil.DELETED_FOLDER;
import static org.sleuthkit.autopsy.mainui.nodes.NodeIconUtil.FOLDER;
import org.sleuthkit.autopsy.mainui.nodes.NodeSelectionInfo.ContentNodeSelectionInfo;
import static org.sleuthkit.autopsy.mainui.nodes.TreeNode.getDefaultLookup;
import org.sleuthkit.autopsy.mainui.nodes.actions.ActionContext;
import org.sleuthkit.autopsy.mainui.nodes.actions.ActionsFactory;
@ -242,9 +241,7 @@ public class FileSystemFactory extends TreeChildFactory<FileSystemContentSearchP
*/
@NbBundle.Messages({
"FileSystemFactory.FileSystemTreeNode.ExtractUnallocAction.text=Extract Unallocated Space to Single Files"})
public abstract static class FileSystemTreeNode extends TreeNode<FileSystemContentSearchParam> implements ActionContext, ContentNodeSelectionInfo {
private Long childContentToSelect;
public abstract static class FileSystemTreeNode extends TreeNode<FileSystemContentSearchParam> implements ActionContext {
protected FileSystemTreeNode(String icon, TreeResultsDTO.TreeItemDTO<? extends FileSystemContentSearchParam> itemData, Children children, Lookup lookup) {
super(ContentNodeUtil.getContentName(itemData.getSearchParams().getContentObjectId()), icon, itemData, children, lookup);
@ -267,7 +264,7 @@ public class FileSystemFactory extends TreeChildFactory<FileSystemContentSearchP
@Override
public void respondSelection(DataResultTopComponent dataResultPanel) {
getItemData().getSearchParams().setChildIdToSelect(childContentToSelect);
getItemData().getSearchParams().setNodeSelectionInfo(getNodeSelectionInfo());
dataResultPanel.displayFileSystemContent(this.getItemData().getSearchParams());
}
@ -277,16 +274,6 @@ public class FileSystemFactory extends TreeChildFactory<FileSystemContentSearchP
public Action[] getActions(boolean context) {
return ActionsFactory.getActions(this);
}
@Override
public void setChildIdToSelect(Long content) {
childContentToSelect = content;
}
@Override
public Long getChildIdToSelect() {
return childContentToSelect;
}
}
static class ImageTreeNode extends FileSystemTreeNode {

View File

@ -24,19 +24,19 @@ import org.openide.nodes.Sheet;
import org.openide.util.NbBundle.Messages;
import org.sleuthkit.autopsy.datamodel.NodeProperty;
import org.sleuthkit.autopsy.mainui.datamodel.SearchResultsDTO;
import org.sleuthkit.autopsy.mainui.nodes.NodeSelectionInfo.ContentNodeSelectionInfo;
import org.sleuthkit.autopsy.mainui.nodes.ChildNodeSelectionInfo;
/**
* A node whose children will be displayed in the results view and determines
* children based on a SearchResultDTO.
*/
public class SearchResultRootNode extends AbstractNode implements ContentNodeSelectionInfo{
public class SearchResultRootNode extends AbstractNode {
private final SearchResultChildFactory factory;
// This param is can change, is not used as part of the search query and
// therefore is not included in the equals and hashcode methods.
private Long childContentToSelect;
private ChildNodeSelectionInfo childNodeSelectionInfo;
public SearchResultRootNode(SearchResultsDTO initialResults) {
this(initialResults, new SearchResultChildFactory(initialResults));
@ -50,14 +50,12 @@ public class SearchResultRootNode extends AbstractNode implements ContentNodeSel
setDisplayName(initialResults.getDisplayName());
}
@Override
public void setChildIdToSelect(Long contentId) {
childContentToSelect = contentId;
public ChildNodeSelectionInfo getNodeSelectionInfo() {
return childNodeSelectionInfo;
}
@Override
public Long getChildIdToSelect() {
return childContentToSelect;
public void setNodeSelectionInfo(ChildNodeSelectionInfo info) {
childNodeSelectionInfo = info;
}
@Messages({

View File

@ -24,11 +24,13 @@ import java.util.Objects;
import java.util.logging.Level;
import javax.swing.Action;
import org.openide.nodes.AbstractNode;
import org.openide.nodes.ChildFactory;
import org.openide.nodes.Children;
import org.openide.util.Lookup;
import org.openide.util.lookup.Lookups;
import org.sleuthkit.autopsy.corecomponents.DataResultTopComponent;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.mainui.datamodel.TreeResultsDTO.TreeDisplayCount;
import org.sleuthkit.autopsy.directorytree.DirectoryTreeTopComponent;
import org.sleuthkit.autopsy.mainui.datamodel.TreeResultsDTO.TreeItemDTO;
@ -38,6 +40,8 @@ import org.sleuthkit.autopsy.mainui.datamodel.TreeResultsDTO.TreeItemDTO;
public abstract class TreeNode<T> extends AbstractNode implements SelectionResponder {
private static final Logger logger = Logger.getLogger(TreeNode.class.getName());
private ChildNodeSelectionInfo childNodeSelectionInfo;
/**
* Returns the default lookup based on the item dto.
@ -140,6 +144,7 @@ public abstract class TreeNode<T> extends AbstractNode implements SelectionRespo
dataResultPanel.setNode(this);
}
@Override
public Action getPreferredAction() {
// TreeNodes are used for both the result viewer and the tree viewer. For the result viewer,
@ -152,4 +157,33 @@ public abstract class TreeNode<T> extends AbstractNode implements SelectionRespo
}
return openChildAction;
}
public ChildNodeSelectionInfo getNodeSelectionInfo() {
return childNodeSelectionInfo;
}
public void setNodeSelectionInfo(ChildNodeSelectionInfo info) {
childNodeSelectionInfo = info;
}
/**
* Tree node for displaying static content in the tree.
*/
public static class StaticTreeNode extends TreeNode<String> {
public StaticTreeNode(String nodeName, String displayName, String icon) {
this(nodeName, displayName, icon, Children.LEAF);
}
public StaticTreeNode(String nodeName, String displayName, String icon, ChildFactory<?> childFactory) {
this(nodeName, displayName, icon, Children.create(childFactory, true), null);
}
public StaticTreeNode(String nodeName, String displayName, String icon, Children children) {
this(nodeName, displayName, icon, children, null);
}
public StaticTreeNode(String nodeName, String displayName, String icon, Children children, Lookup lookup) {
super(nodeName, icon, new TreeItemDTO<String>(nodeName, nodeName, nodeName, displayName, TreeDisplayCount.NOT_SHOWN), children, lookup);
}
}
}

View File

@ -18,6 +18,7 @@
*/
package org.sleuthkit.autopsy.mainui.nodes;
import com.google.common.collect.ImmutableList;
import java.util.Collection;
import java.util.Comparator;
import java.util.Objects;
@ -25,6 +26,7 @@ import java.util.concurrent.ExecutionException;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import org.openide.nodes.Children;
import org.openide.util.NbBundle.Messages;
import org.sleuthkit.autopsy.corecomponents.DataResultTopComponent;
import org.sleuthkit.autopsy.mainui.datamodel.AnalysisResultSearchParam;
import org.sleuthkit.autopsy.mainui.datamodel.FileExtDocumentFilter;
@ -33,6 +35,7 @@ import org.sleuthkit.autopsy.mainui.datamodel.FileExtRootFilter;
import org.sleuthkit.autopsy.mainui.datamodel.FileExtSearchFilter;
import org.sleuthkit.autopsy.mainui.datamodel.FileTypeExtensionsSearchParams;
import org.sleuthkit.autopsy.mainui.datamodel.FileTypeMimeSearchParams;
import org.sleuthkit.autopsy.mainui.datamodel.DeletedContentSearchParams;
import org.sleuthkit.autopsy.mainui.datamodel.FileTypeSizeSearchParams;
import org.sleuthkit.autopsy.mainui.datamodel.MainDAO;
import org.sleuthkit.autopsy.mainui.datamodel.TreeResultsDTO;
@ -40,6 +43,7 @@ import org.sleuthkit.autopsy.mainui.datamodel.TreeResultsDTO.TreeItemDTO;
import org.sleuthkit.autopsy.mainui.datamodel.events.DAOAggregateEvent;
import org.sleuthkit.autopsy.mainui.datamodel.events.DAOEvent;
import org.sleuthkit.autopsy.mainui.datamodel.events.TreeEvent;
import org.sleuthkit.autopsy.mainui.nodes.TreeNode.StaticTreeNode;
/**
*
@ -49,6 +53,207 @@ public class ViewsTypeFactory {
private static final Comparator<String> STRING_COMPARATOR = Comparator.nullsFirst(Comparator.naturalOrder());
private static final String FILE_TYPES_ICON = "org/sleuthkit/autopsy/images/file_types.png";
private static final String SIZE_ICON = "org/sleuthkit/autopsy/images/file-size-16.png";
/**
* Node for file extensions parent in the tree.
*/
@Messages({"ViewsTypeFactory_ExtensionParentNode_displayName=By Extension"})
private static class ExtensionParentNode extends StaticTreeNode {
ExtensionParentNode(Long dataSourceId) {
super(
"FILE_VIEW_EXTENSIONS_PARENT",
Bundle.ViewsTypeFactory_ExtensionParentNode_displayName(),
FILE_TYPES_ICON,
new FileExtFactory(dataSourceId)
);
}
}
/**
* Parent mime types node in the tree.
*/
@Messages({"ViewsTypeFactory_MimeParentNode_displayName=By MIME Type"})
public static class MimeParentNode extends StaticTreeNode {
MimeParentNode(Long dataSourceId) {
super(
"FILE_VIEW_MIME_TYPE_PARENT",
Bundle.ViewsTypeFactory_MimeParentNode_displayName(),
FILE_TYPES_ICON,
new FileMimePrefixFactory(dataSourceId)
);
}
}
/**
* Parent of deleted content nodes in the tree.
*/
@Messages({"ViewsTypeFactory_DeletedParentNode_displayName=Deleted Files"})
private static class DeletedParentNode extends StaticTreeNode {
DeletedParentNode(Long dataSourceId) {
super(
"FILE_VIEW_DELETED_PARENT",
Bundle.ViewsTypeFactory_DeletedParentNode_displayName(),
NodeIconUtil.DELETED_FILE.getPath(),
new DeletedContentFactory(dataSourceId)
);
}
}
/**
* Parent of file size nodes in the tree.
*/
@Messages({"ViewsTypeFactory_SizeParentNode_displayName=File Size"})
private static class SizeParentNode extends StaticTreeNode {
SizeParentNode(Long dataSourceId) {
super(
"FILE_VIEW_SIZE_PARENT",
Bundle.ViewsTypeFactory_SizeParentNode_displayName(),
SIZE_ICON,
new FileSizeTypeFactory(dataSourceId)
);
}
}
/**
* 'File Types' children in the tree.
*/
public static class FileTypesChildren extends Children.Array {
FileTypesChildren(Long dataSourceId) {
super(ImmutableList.of(
new ExtensionParentNode(dataSourceId),
new MimeParentNode(dataSourceId)
));
}
}
/**
* 'File Types' parent node in the tree.
*/
@Messages({"ViewsTypeFactory_FileTypesParentNode_displayName=File Types"})
private static class FileTypesParentNode extends StaticTreeNode {
public FileTypesParentNode(Long dataSourceId) {
super(
"FILE_TYPES_PARENT",
Bundle.ViewsTypeFactory_FileTypesParentNode_displayName(),
FILE_TYPES_ICON,
new FileTypesChildren(dataSourceId)
);
}
}
/**
* Children of 'File Views' in the tree.
*/
public static class ViewsChildren extends Children.Array {
public ViewsChildren(Long dataSourceId) {
super(ImmutableList.of(
new FileTypesParentNode(dataSourceId),
new DeletedParentNode(dataSourceId),
new SizeParentNode(dataSourceId)
));
}
}
/**
* The factory for creating deleted content tree nodes.
*/
public static class DeletedContentFactory extends TreeChildFactory<DeletedContentSearchParams> {
private final Long dataSourceId;
/**
* Main constructor.
*
* @param dataSourceId The data source to filter files to or null.
*/
public DeletedContentFactory(Long dataSourceId) {
this.dataSourceId = dataSourceId;
}
@Override
protected TreeNode<DeletedContentSearchParams> createNewNode(TreeResultsDTO.TreeItemDTO<? extends DeletedContentSearchParams> rowData) {
return new DeletedContentTypeNode(rowData);
}
@Override
protected TreeResultsDTO<? extends DeletedContentSearchParams> getChildResults() throws IllegalArgumentException, ExecutionException {
return MainDAO.getInstance().getViewsDAO().getDeletedContentCounts(dataSourceId);
}
@Override
protected void handleDAOAggregateEvent(DAOAggregateEvent aggEvt) {
for (DAOEvent evt : aggEvt.getEvents()) {
if (evt instanceof TreeEvent) {
TreeResultsDTO.TreeItemDTO<DeletedContentSearchParams> treeItem = super.getTypedTreeItem((TreeEvent) evt, DeletedContentSearchParams.class);
// if search params has null filter, trigger full refresh
if (treeItem != null && treeItem.getSearchParams().getFilter() == null) {
super.update();
return;
}
}
}
super.handleDAOAggregateEvent(aggEvt);
}
@Override
protected TreeResultsDTO.TreeItemDTO<? extends DeletedContentSearchParams> getOrCreateRelevantChild(TreeEvent treeEvt) {
TreeResultsDTO.TreeItemDTO<DeletedContentSearchParams> originalTreeItem = super.getTypedTreeItem(treeEvt, DeletedContentSearchParams.class);
if (originalTreeItem != null
// only create child if size filter is present (if null, update should be triggered separately)
&& originalTreeItem.getSearchParams().getFilter() != null
&& (this.dataSourceId == null || Objects.equals(this.dataSourceId, originalTreeItem.getSearchParams().getDataSourceId()))) {
// generate new type so that if it is a subtree event (i.e. keyword hits), the right tree item is created.
DeletedContentSearchParams searchParam = originalTreeItem.getSearchParams();
return new TreeResultsDTO.TreeItemDTO<>(
DeletedContentSearchParams.getTypeId(),
new DeletedContentSearchParams(searchParam.getFilter(), this.dataSourceId),
searchParam.getFilter(),
searchParam.getFilter().getDisplayName(),
originalTreeItem.getDisplayCount());
}
return null;
}
@Override
public int compare(TreeItemDTO<? extends DeletedContentSearchParams> o1, TreeItemDTO<? extends DeletedContentSearchParams> o2) {
return Integer.compare(o1.getSearchParams().getFilter().getId(), o2.getSearchParams().getFilter().getId());
}
/**
* Shows a deleted content tree node.
*/
static class DeletedContentTypeNode extends TreeNode<DeletedContentSearchParams> {
/**
* Main constructor.
*
* @param itemData The data for the node.
*/
DeletedContentTypeNode(TreeResultsDTO.TreeItemDTO<? extends DeletedContentSearchParams> itemData) {
super("DELETED_CONTENT_" + itemData.getSearchParams().getFilter().getName(), NodeIconUtil.DELETED_FILE.getPath(), itemData);
}
@Override
public void respondSelection(DataResultTopComponent dataResultPanel) {
dataResultPanel.displayDeletedContent(this.getItemData().getSearchParams());
}
}
}
/**
* The factory for creating file size tree nodes.
*/
@ -130,7 +335,7 @@ public class ViewsTypeFactory {
* @param itemData The data for the node.
*/
FileSizeTypeNode(TreeResultsDTO.TreeItemDTO<? extends FileTypeSizeSearchParams> itemData) {
super("FILE_SIZE_" + itemData.getSearchParams().getSizeFilter().getName(), "org/sleuthkit/autopsy/images/file-size-16.png", itemData);
super("FILE_SIZE_" + itemData.getSearchParams().getSizeFilter().getName(), SIZE_ICON, itemData);
}
@Override
@ -208,7 +413,7 @@ public class ViewsTypeFactory {
public FileMimePrefixNode(TreeResultsDTO.TreeItemDTO<? extends FileTypeMimeSearchParams> itemData) {
super(
"FILE_MIME_" + itemData.getSearchParams().getMimeType(),
"org/sleuthkit/autopsy/images/file_types.png",
FILE_TYPES_ICON,
itemData,
Children.create(new FileMimeSuffixFactory(itemData.getSearchParams().getDataSourceId(), itemData.getSearchParams().getMimeType()), true),
getDefaultLookup(itemData));

View File

@ -352,7 +352,9 @@ public final class ActionsFactory {
@Messages({
"# {0} - contentType",
"ArtifactFactory_getViewSrcContentAction_displayName=View Source {0} in Directory"
"ArtifactFactory_getViewSrcContentAction_displayName=View Source {0} in Directory",
"# {0} - contentType",
"ArtifactFactory_getViewSrcContentAction_displayName2=View Source {0}"
})
/**
* Create an action to navigate to source content in tree hierarchy.
@ -369,12 +371,12 @@ public final class ActionsFactory {
if (sourceContent.get() instanceof DataArtifact) {
return Optional.of(new ViewArtifactAction(
(BlackboardArtifact) sourceContent.get(),
Bundle.ArtifactFactory_getViewSrcContentAction_displayName(
Bundle.ArtifactFactory_getViewSrcContentAction_displayName2(
getContentTypeStr(sourceContent.get()))));
} else if (sourceContent.get() instanceof OsAccount) {
return Optional.of(new ViewOsAccountAction(
(OsAccount) sourceContent.get(),
Bundle.ArtifactFactory_getViewSrcContentAction_displayName(
Bundle.ArtifactFactory_getViewSrcContentAction_displayName2(
getContentTypeStr(sourceContent.get()))));
} else if (sourceContent.get() instanceof AbstractFile || (artifact.isPresent() && artifact.get() instanceof DataArtifact)) {
return Optional.of(new ViewContextAction(

View File

@ -33,7 +33,7 @@ import javax.annotation.concurrent.Immutable;
import javax.annotation.concurrent.ThreadSafe;
import org.sleuthkit.autopsy.corecomponentinterfaces.DataSourceProcessor;
import org.sleuthkit.autopsy.coreutils.NetworkUtils;
import org.sleuthkit.autopsy.ingest.Snapshot;
import org.sleuthkit.autopsy.ingest.IngestJobProgressSnapshot;
import org.sleuthkit.autopsy.ingest.IngestJob;
import org.sleuthkit.autopsy.ingest.IngestManager.IngestThreadActivitySnapshot;
import org.sleuthkit.autopsy.ingest.IngestProgressSnapshotProvider;
@ -98,7 +98,7 @@ final class AutoIngestJob implements Comparable<AutoIngestJob>, IngestProgressSn
* Version 3 fields.
*/
private List<IngestThreadActivitySnapshot> ingestThreadsSnapshot;
private List<Snapshot> ingestJobsSnapshot;
private List<IngestJobProgressSnapshot> ingestJobsSnapshot;
private Map<String, Long> moduleRunTimesSnapshot;
/*
@ -409,7 +409,7 @@ final class AutoIngestJob implements Comparable<AutoIngestJob>, IngestProgressSn
*
* @param snapshot
*/
synchronized void setIngestJobsSnapshot(List<Snapshot> snapshot) {
synchronized void setIngestJobsSnapshot(List<IngestJobProgressSnapshot> snapshot) {
this.ingestJobsSnapshot = snapshot;
}
@ -643,7 +643,7 @@ final class AutoIngestJob implements Comparable<AutoIngestJob>, IngestProgressSn
}
@Override
public List<Snapshot> getIngestJobSnapshots() {
public List<IngestJobProgressSnapshot> getIngestJobSnapshots() {
return this.ingestJobsSnapshot;
}

View File

@ -37,7 +37,7 @@ import org.sleuthkit.autopsy.datamodel.NodeProperty;
import org.sleuthkit.autopsy.experimental.autoingest.AutoIngestJob.Stage;
import org.sleuthkit.autopsy.guiutils.DurationCellRenderer;
import org.sleuthkit.autopsy.guiutils.StatusIconCellRenderer;
import org.sleuthkit.autopsy.ingest.Snapshot;
import org.sleuthkit.autopsy.ingest.IngestJobProgressSnapshot;
/**
* A node which represents all AutoIngestJobs of a given AutoIngestJobStatus.
@ -98,7 +98,7 @@ final class AutoIngestJobsNode extends AbstractNode {
* they can be changed by events in other threads which
*/
private final Stage jobStage;
private final List<Snapshot> jobSnapshot;
private final List<IngestJobProgressSnapshot> jobSnapshot;
private final Integer jobPriority;
private final Boolean ocrFlag;

View File

@ -371,6 +371,7 @@ SolrSearchService.exceptionMessage.noCurrentSolrCore=IndexMetadata did not conta
SolrSearchService.exceptionMessage.noIndexMetadata=Unable to create IndexMetaData from case directory: {0}
# {0} - collection name
SolrSearchService.exceptionMessage.unableToDeleteCollection=Unable to delete collection {0}
SolrSearchService.indexingError=Unable to index blackboard artifact.
SolrSearchService.ServiceName=Solr Keyword Search Service
SolrSearchService.DeleteDataSource.msg=Error Deleting Solr data for data source id {0}
DropdownSingleTermSearchPanel.dataSourceCheckBox.text=Restrict search to the selected data sources:

View File

@ -56,6 +56,7 @@ import org.sleuthkit.autopsy.url.analytics.DomainCategory;
@SuppressWarnings("try")
public class DefaultDomainCategorizer implements DomainCategorizer {
private static final String COMMENT_PREFIX = "#";
private static final String CSV_DELIMITER = ",";
private static final String DOMAIN_TYPE_CSV = "default_domain_categories.csv"; //NON-NLS
private static final Logger logger = Logger.getLogger(DefaultDomainCategorizer.class.getName());
@ -76,7 +77,7 @@ public class DefaultDomainCategorizer implements DomainCategorizer {
int lineNum = 1;
while (reader.ready()) {
String line = reader.readLine();
if (!StringUtils.isBlank(line)) {
if (!StringUtils.isBlank(line) && !line.startsWith(COMMENT_PREFIX)) {
addItem(mapping, line.trim(), lineNum);
lineNum++;
}

View File

@ -1,3 +1,12 @@
#
# This file was compiled from multiple sources.
# Web Mail: https://github.com/mailcheck/mailcheck/wiki/List-of-Popular-Domains
# Disposable Mail: https://www.npmjs.com/package/disposable-email-domains
# Messaging: https://www.raymond.cc/blog/list-of-web-messengers-for-your-convenience/
#
#
#
#
icq.com,Messaging
webmessenger.yahoo.com,Messaging
talk.google.com,Messaging

Can't render this file because it is too large.