merge from new_table_load

This commit is contained in:
Greg DiCristofaro 2021-11-05 10:13:18 -04:00
commit e529562180
148 changed files with 4555 additions and 4049 deletions

View File

@ -1,7 +1,7 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2015-2019 Basis Technology Corp.
* Copyright 2015-2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
@ -56,7 +56,7 @@ public final class Blackboard implements Closeable {
@Deprecated
public synchronized void indexArtifact(BlackboardArtifact artifact) throws BlackboardException {
try {
Case.getCurrentCase().getSleuthkitCase().getBlackboard().postArtifact(artifact, "");
Case.getCurrentCase().getSleuthkitCase().getBlackboard().postArtifact(artifact, "", null);
} catch (org.sleuthkit.datamodel.Blackboard.BlackboardException ex) {
throw new BlackboardException(ex.getMessage(), ex);
}
@ -117,6 +117,7 @@ public final class Blackboard implements Closeable {
* @deprecated Do not use.
*/
@Deprecated
@Override
public void close() throws IOException {
/*
* No-op maintained for backwards compatibility. Clients should not

View File

@ -761,8 +761,7 @@ public final class CaseEventListener implements PropertyChangeListener {
BlackboardArtifact.Type.TSK_PREVIOUSLY_SEEN, score,
null, Bundle.CaseEventsListener_prevExists_text(), justification, attributesForNewArtifact, osAccountInstance.getDataSource().getId()).getAnalysisResult();
try {
// index the artifact for keyword search
blackboard.postArtifact(newAnalysisResult, MODULE_NAME);
blackboard.postArtifact(newAnalysisResult, MODULE_NAME, null);
break;
} catch (Blackboard.BlackboardException ex) {
LOGGER.log(Level.SEVERE, "Unable to index blackboard artifact " + newAnalysisResult.getArtifactID(), ex); //NON-NLS

View File

@ -87,7 +87,7 @@ public class IngestEventsListener {
private final PropertyChangeListener pcl1 = new IngestModuleEventListener();
private final PropertyChangeListener pcl2 = new IngestJobEventListener();
final Collection<String> recentlyAddedCeArtifacts = new LinkedHashSet<>();
static final int MAX_NUM_PREVIOUS_CASES_FOR_LIKELY_NOTABLE_SCORE = 10;
static final int MAX_NUM_PREVIOUS_CASES_FOR_PREV_SEEN_ARTIFACT_CREATION = 20;
@ -195,7 +195,7 @@ public class IngestEventsListener {
public synchronized static void setFlagSeenDevices(boolean value) {
flagSeenDevices = value;
}
/**
* Configure the listener to flag unique apps or not.
*
@ -204,7 +204,7 @@ public class IngestEventsListener {
public synchronized static void setFlagUniqueArtifacts(boolean value) {
flagUniqueArtifacts = value;
}
/**
* Are unique apps being flagged?
*
@ -256,10 +256,12 @@ public class IngestEventsListener {
}
/**
* Create a "previously seen" hit for a device which was previously seen
* in the central repository. NOTE: Artifacts that are too common will be skipped.
* Create a "previously seen" hit for a device which was previously seen in
* the central repository. NOTE: Artifacts that are too common will be
* skipped.
*
* @param originalArtifact the artifact to create the "previously seen" item for
* @param originalArtifact the artifact to create the "previously seen" item
* for
* @param caseDisplayNames the case names the artifact was previously seen
* in
* @param aType The correlation type.
@ -271,19 +273,19 @@ public class IngestEventsListener {
"IngestEventsListener.prevCount.text=Number of previous {0}: {1}"})
static private void makeAndPostPreviousSeenArtifact(BlackboardArtifact originalArtifact, List<String> caseDisplayNames,
CorrelationAttributeInstance.Type aType, String value) {
// calculate score
Score score;
int numCases = caseDisplayNames.size();
if (numCases <= MAX_NUM_PREVIOUS_CASES_FOR_LIKELY_NOTABLE_SCORE) {
score = Score.SCORE_LIKELY_NOTABLE;
} else if (numCases > MAX_NUM_PREVIOUS_CASES_FOR_LIKELY_NOTABLE_SCORE && numCases <= MAX_NUM_PREVIOUS_CASES_FOR_PREV_SEEN_ARTIFACT_CREATION) {
score = Score.SCORE_NONE;
score = Score.SCORE_NONE;
} else {
// don't make an Analysis Result, the artifact is too common.
return;
}
String prevCases = caseDisplayNames.stream().distinct().collect(Collectors.joining(","));
String justification = "Previously seen in cases " + prevCases;
Collection<BlackboardAttribute> attributesForNewArtifact = Arrays.asList(new BlackboardAttribute(
@ -297,40 +299,42 @@ public class IngestEventsListener {
value),
new BlackboardAttribute(
TSK_OTHER_CASES, MODULE_NAME,
prevCases));
prevCases));
makeAndPostArtifact(BlackboardArtifact.Type.TSK_PREVIOUSLY_SEEN, originalArtifact, attributesForNewArtifact, Bundle.IngestEventsListener_prevExists_text(),
score, justification);
}
/**
* Create a "previously unseen" hit for an application which was never seen in
* the central repository.
* Create a "previously unseen" hit for an application which was never seen
* in the central repository.
*
* @param originalArtifact the artifact to create the "previously unseen" item
* for
* @param originalArtifact the artifact to create the "previously unseen"
* item for
* @param aType The correlation type.
* @param value The correlation value.
*/
static private void makeAndPostPreviouslyUnseenArtifact(BlackboardArtifact originalArtifact, CorrelationAttributeInstance.Type aType, String value) {
Collection<BlackboardAttribute> attributesForNewArtifact = Arrays.asList(
Collection<BlackboardAttribute> attributesForNewArtifact = Arrays.asList(
new BlackboardAttribute(
TSK_CORRELATION_TYPE, MODULE_NAME,
aType.getDisplayName()),
TSK_CORRELATION_TYPE, MODULE_NAME,
aType.getDisplayName()),
new BlackboardAttribute(
TSK_CORRELATION_VALUE, MODULE_NAME,
value));
TSK_CORRELATION_VALUE, MODULE_NAME,
value));
makeAndPostArtifact(BlackboardArtifact.Type.TSK_PREVIOUSLY_UNSEEN, originalArtifact, attributesForNewArtifact, "",
Score.SCORE_LIKELY_NOTABLE, "This application has not been previously seen before");
}
}
/**
* Make an artifact to flag the passed in artifact.
*
* @param newArtifactType Type of artifact to create.
* @param originalArtifact Artifact in current case we want to flag
* @param attributesForNewArtifact Attributes to assign to the new artifact
* @param configuration The configuration to be specified for the new artifact hit
* @param score sleuthkit.datamodel.Score to be assigned to this artifact
* @param configuration The configuration to be specified for the
* new artifact hit
* @param score sleuthkit.datamodel.Score to be assigned
* to this artifact
* @param justification Justification string
*/
private static void makeAndPostArtifact(BlackboardArtifact.Type newArtifactType, BlackboardArtifact originalArtifact, Collection<BlackboardAttribute> attributesForNewArtifact, String configuration,
@ -341,14 +345,13 @@ public class IngestEventsListener {
// Create artifact if it doesn't already exist.
BlackboardArtifact.ARTIFACT_TYPE type = BlackboardArtifact.ARTIFACT_TYPE.fromID(newArtifactType.getTypeID());
if (!blackboard.artifactExists(originalArtifact, type, attributesForNewArtifact)) {
BlackboardArtifact newArtifact = originalArtifact.newAnalysisResult(
newArtifactType, score,
BlackboardArtifact newArtifact = originalArtifact.newAnalysisResult(
newArtifactType, score,
null, configuration, justification, attributesForNewArtifact)
.getAnalysisResult();
try {
// index the artifact for keyword search
blackboard.postArtifact(newArtifact, MODULE_NAME);
blackboard.postArtifact(newArtifact, MODULE_NAME, null);
} catch (Blackboard.BlackboardException ex) {
LOGGER.log(Level.SEVERE, "Unable to index blackboard artifact " + newArtifact.getArtifactID(), ex); //NON-NLS
}
@ -549,14 +552,14 @@ public class IngestEventsListener {
for (BlackboardArtifact bbArtifact : bbArtifacts) {
// makeCorrAttrToSave will filter out artifacts which should not be sources of CR data.
List<CorrelationAttributeInstance> convertedArtifacts = new ArrayList<>();
if (bbArtifact instanceof DataArtifact){
convertedArtifacts.addAll(CorrelationAttributeUtil.makeCorrAttrsToSave((DataArtifact)bbArtifact));
}
if (bbArtifact instanceof DataArtifact) {
convertedArtifacts.addAll(CorrelationAttributeUtil.makeCorrAttrsToSave((DataArtifact) bbArtifact));
}
for (CorrelationAttributeInstance eamArtifact : convertedArtifacts) {
try {
// Only do something with this artifact if it's unique within the job
if (recentlyAddedCeArtifacts.add(eamArtifact.toString())) {
// Get a list of instances for a given value (hash, email, etc.)
List<CorrelationAttributeInstance> previousOccurrences = new ArrayList<>();
// check if we are flagging things
@ -591,7 +594,7 @@ public class IngestEventsListener {
continue;
}
}
// flag previously seen devices and communication accounts (emails, phones, etc)
if (flagPreviousItemsEnabled && !previousOccurrences.isEmpty()
&& (eamArtifact.getCorrelationType().getId() == CorrelationAttributeInstance.USBID_TYPE_ID
@ -605,12 +608,12 @@ public class IngestEventsListener {
List<String> caseDisplayNames = getCaseDisplayNames(previousOccurrences);
makeAndPostPreviousSeenArtifact(bbArtifact, caseDisplayNames, eamArtifact.getCorrelationType(), eamArtifact.getCorrelationValue());
}
// flag previously unseen apps and domains
if (flagUniqueItemsEnabled
&& (eamArtifact.getCorrelationType().getId() == CorrelationAttributeInstance.INSTALLED_PROGS_TYPE_ID
|| eamArtifact.getCorrelationType().getId() == CorrelationAttributeInstance.DOMAIN_TYPE_ID)) {
if (previousOccurrences.isEmpty()) {
makeAndPostPreviouslyUnseenArtifact(bbArtifact, eamArtifact.getCorrelationType(), eamArtifact.getCorrelationValue());
}
@ -635,7 +638,7 @@ public class IngestEventsListener {
} // DATA_ADDED
}
}
/**
* Gets case display names for a list of CorrelationAttributeInstance.
*
@ -666,5 +669,5 @@ public class IngestEventsListener {
}
}
return caseNames;
}
}
}

View File

@ -87,6 +87,7 @@ final class CentralRepoIngestModule implements FileIngestModule {
private Blackboard blackboard;
private final boolean createCorrelationProperties;
private final boolean flagUniqueArtifacts;
private IngestJobContext context;
/**
* Instantiate the Central Repository ingest module.
@ -229,6 +230,8 @@ final class CentralRepoIngestModule implements FileIngestModule {
})
@Override
public void startUp(IngestJobContext context) throws IngestModuleException {
this.context = context;
IngestEventsListener.incrementCorrelationEngineModuleCount();
/*
@ -256,7 +259,7 @@ final class CentralRepoIngestModule implements FileIngestModule {
}
if (IngestEventsListener.getCeModuleInstanceCount() == 1 || !IngestEventsListener.isFlagUniqueArtifacts()) {
IngestEventsListener.setFlagUniqueArtifacts(flagUniqueArtifacts);
}
}
if (CentralRepository.isEnabled() == false) {
/*
@ -360,12 +363,12 @@ final class CentralRepoIngestModule implements FileIngestModule {
// Create artifact if it doesn't already exist.
if (!blackboard.artifactExists(abstractFile, TSK_PREVIOUSLY_NOTABLE, attributes)) {
BlackboardArtifact tifArtifact = abstractFile.newAnalysisResult(
BlackboardArtifact.Type.TSK_PREVIOUSLY_NOTABLE, Score.SCORE_NOTABLE,
BlackboardArtifact.Type.TSK_PREVIOUSLY_NOTABLE, Score.SCORE_NOTABLE,
null, Bundle.CentralRepoIngestModule_prevTaggedSet_text(), justification, attributes)
.getAnalysisResult();
try {
// index the artifact for keyword search
blackboard.postArtifact(tifArtifact, MODULE_NAME);
blackboard.postArtifact(tifArtifact, MODULE_NAME, context.getJobId());
} catch (Blackboard.BlackboardException ex) {
logger.log(Level.SEVERE, "Unable to index blackboard artifact " + tifArtifact.getArtifactID(), ex); //NON-NLS
}

View File

@ -64,7 +64,7 @@ public class CentralRepoIngestModuleFactory extends IngestModuleFactoryAdapter {
return Version.getVersion();
}
@Override
@Override
public boolean isFileIngestModuleFactory() {
return true;
}
@ -83,7 +83,7 @@ public class CentralRepoIngestModuleFactory extends IngestModuleFactoryAdapter {
}
throw new IllegalArgumentException("Expected settings argument to be an instance of IngestSettings");
}
@Override
public boolean hasGlobalSettingsPanel() {
return true;

View File

@ -148,7 +148,7 @@ public class ContactArtifactViewer extends javax.swing.JPanel implements Artifac
@Override
public Component getComponent() {
// Slap a vertical scrollbar on the panel.
return new JScrollPane(this, JScrollPane.VERTICAL_SCROLLBAR_AS_NEEDED, JScrollPane.HORIZONTAL_SCROLLBAR_NEVER);
return new JScrollPane(this, JScrollPane.VERTICAL_SCROLLBAR_AS_NEEDED, JScrollPane.HORIZONTAL_SCROLLBAR_AS_NEEDED);
}
@Override

View File

@ -149,7 +149,8 @@ class MessageArtifactWorker extends SwingWorker<MessageArtifactWorker.MesssageAr
static Optional<BlackboardArtifact> getAssociatedArtifact(final BlackboardArtifact artifact) throws TskCoreException {
BlackboardAttribute attribute = artifact.getAttribute(TSK_ASSOCIATED_TYPE);
if (attribute != null) {
return Optional.of(artifact.getSleuthkitCase().getArtifactByArtifactId(attribute.getValueLong()));
//in the context of the Message content viewer the associated artifact will always be a data artifact
return Optional.of(artifact.getSleuthkitCase().getBlackboard().getDataArtifactById(attribute.getValueLong()));
}
return Optional.empty();
}

View File

@ -75,6 +75,8 @@ import org.sleuthkit.autopsy.mainui.datamodel.HashHitSearchParam;
import org.sleuthkit.autopsy.mainui.datamodel.KeywordHitSearchParam;
import org.sleuthkit.autopsy.mainui.nodes.SearchResultRootNode;
import org.sleuthkit.autopsy.mainui.datamodel.SearchResultsDTO;
import org.sleuthkit.autopsy.mainui.datamodel.TagsDAO.TagFetcher;
import org.sleuthkit.autopsy.mainui.datamodel.TagsSearchParams;
import org.sleuthkit.autopsy.mainui.datamodel.ViewsDAO;
import org.sleuthkit.autopsy.mainui.datamodel.ViewsDAO.FileTypeExtFetcher;
import org.sleuthkit.autopsy.mainui.datamodel.ViewsDAO.FileTypeMimeFetcher;
@ -1289,6 +1291,26 @@ public class DataResultPanel extends javax.swing.JPanel implements DataResult, C
ex);
}
}
/**
* Displays results of querying the DAO for the given search parameters
* query.
*
* @param tagParams The search parameters.
*/
void displayTags(TagsSearchParams tagParams) {
try {
this.searchResultManager = new SearchManager(new TagFetcher(tagParams), getPageSize());
SearchResultsDTO results = searchResultManager.getResults();
displaySearchResults(results, true);
} catch (ExecutionException | IllegalArgumentException ex) {
logger.log(Level.WARNING, MessageFormat.format(
"There was an error fetching data for Tags filter: {0} and data source id: {1}.",
tagParams.getTagName(),
tagParams.getDataSourceId() == null ? "<null>" : tagParams.getDataSourceId()),
ex);
}
}
/**
* Displays current search result in the result view. This assumes that

View File

@ -48,6 +48,7 @@ import org.sleuthkit.autopsy.mainui.datamodel.FileTypeMimeSearchParams;
import org.sleuthkit.autopsy.mainui.datamodel.FileTypeSizeSearchParams;
import org.sleuthkit.autopsy.mainui.datamodel.HashHitSearchParam;
import org.sleuthkit.autopsy.mainui.datamodel.KeywordHitSearchParam;
import org.sleuthkit.autopsy.mainui.datamodel.TagsSearchParams;
/**
* A DataResultTopComponent object is a NetBeans top component that provides
@ -438,6 +439,16 @@ public final class DataResultTopComponent extends TopComponent implements DataRe
public void displayKeywordHits(KeywordHitSearchParam keywordParams) {
dataResultPanel.displayKeywordHits(keywordParams);
}
/**
* Displays results for querying the DAO for tags matching the search
* parameters query.
*
* @param tagParams The search parameter query.
*/
public void displayTags(TagsSearchParams tagParams) {
dataResultPanel.displayTags(tagParams);
}
@Override
public void setTitle(String title) {

View File

@ -1185,8 +1185,7 @@ public class DataResultViewerTable extends AbstractDataResultViewer {
})
private void exportCSVButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_exportCSVButtonActionPerformed
Node currentRoot = this.getExplorerManager().getRootContext();
// GVDTODO disabled for Search Result node
if (currentRoot != null && (!(currentRoot instanceof SearchResultRootNode)) && currentRoot.getChildren().getNodesCount() > 0) {
if (currentRoot != null && currentRoot.getChildren().getNodesCount() > 0) {
org.sleuthkit.autopsy.directorytree.ExportCSVAction.saveNodesToCSV(java.util.Arrays.asList(currentRoot.getChildren().getNodes()), this);
} else {
MessageNotifyUtil.Message.info(Bundle.DataResultViewerTable_exportCSVButtonActionPerformed_empty());

View File

@ -1,189 +0,0 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2013-2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.datamodel;
import java.text.MessageFormat;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.logging.Level;
import javax.swing.Action;
import org.openide.nodes.Sheet;
import org.openide.util.Lookup;
import org.openide.util.NbBundle;
import org.openide.util.NbBundle.Messages;
import org.openide.util.lookup.Lookups;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.timeline.actions.ViewArtifactInTimelineAction;
import org.sleuthkit.autopsy.timeline.actions.ViewFileInTimelineAction;
import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.AnalysisResult;
import org.sleuthkit.datamodel.BlackboardArtifact;
import org.sleuthkit.datamodel.BlackboardArtifactTag;
import org.sleuthkit.datamodel.TskCoreException;
import static org.sleuthkit.autopsy.datamodel.Bundle.*;
import org.sleuthkit.datamodel.Content;
import org.sleuthkit.datamodel.DataArtifact;
/**
* Instances of this class wrap BlackboardArtifactTag objects. In the Autopsy
* presentation of the SleuthKit data model, they are leaf nodes of a sub-tree
* organized as follows: there is a tags root node with tag name child nodes;
* tag name nodes have tag type child nodes; tag type nodes are the parents of
* either content or blackboard artifact tag nodes.
*/
public class BlackboardArtifactTagNode extends TagNode {
private static final Logger LOGGER = Logger.getLogger(BlackboardArtifactTagNode.class.getName());
private static final String ICON_PATH = "org/sleuthkit/autopsy/images/green-tag-icon-16.png"; //NON-NLS
private final BlackboardArtifactTag tag;
public BlackboardArtifactTagNode(BlackboardArtifactTag tag) {
super(createLookup(tag), tag.getContent());
String name = tag.getContent().getName(); // As a backup.
try {
name = tag.getArtifact().getShortDescription();
} catch (TskCoreException ex) {
LOGGER.log(Level.WARNING, "Failed to get short description for artifact id=" + tag.getArtifact().getId(), ex);
}
setName(name);
setDisplayName(name);
this.setIconBaseWithExtension(ICON_PATH);
this.tag = tag;
}
/**
* Create the Lookup for this node.
*
* @param tag The artifact tag that this node represents.
*
* @return The Lookup object.
*/
private static Lookup createLookup(BlackboardArtifactTag tag) {
/*
* Make an Autopsy Data Model wrapper for the artifact.
*
* NOTE: The creation of an Autopsy Data Model independent of the
* NetBeans nodes is a work in progress. At the time this comment is
* being written, this object is only being used to indicate the item
* represented by this BlackboardArtifactTagNode.
*/
Content sourceContent = tag.getContent();
BlackboardArtifact artifact = tag.getArtifact();
BlackboardArtifactItem<?> artifactItem;
if (artifact instanceof AnalysisResult) {
artifactItem = new AnalysisResultItem((AnalysisResult) artifact, sourceContent);
} else {
artifactItem = new DataArtifactItem((DataArtifact) artifact, sourceContent);
}
return Lookups.fixed(tag, artifactItem, artifact, sourceContent);
}
@Messages({"BlackboardArtifactTagNode.createSheet.userName.text=User Name"})
@Override
protected Sheet createSheet() {
Sheet propertySheet = super.createSheet();
Sheet.Set properties = propertySheet.get(Sheet.PROPERTIES);
if (properties == null) {
properties = Sheet.createPropertiesSet();
propertySheet.put(properties);
}
properties.put(new NodeProperty<>(
NbBundle.getMessage(this.getClass(), "BlackboardArtifactTagNode.createSheet.srcFile.text"),
NbBundle.getMessage(this.getClass(), "BlackboardArtifactTagNode.createSheet.srcFile.text"),
"",
getDisplayName()));
addOriginalNameProp(properties);
String contentPath;
try {
contentPath = tag.getContent().getUniquePath();
} catch (TskCoreException ex) {
Logger.getLogger(ContentTagNode.class.getName()).log(Level.SEVERE, "Failed to get path for content (id = " + tag.getContent().getId() + ")", ex); //NON-NLS
contentPath = NbBundle.getMessage(this.getClass(), "BlackboardArtifactTagNode.createSheet.unavail.text");
}
properties.put(new NodeProperty<>(
NbBundle.getMessage(this.getClass(), "BlackboardArtifactTagNode.createSheet.srcFilePath.text"),
NbBundle.getMessage(this.getClass(), "BlackboardArtifactTagNode.createSheet.srcFilePath.text"),
"",
contentPath));
properties.put(new NodeProperty<>(
NbBundle.getMessage(this.getClass(), "BlackboardArtifactTagNode.createSheet.resultType.text"),
NbBundle.getMessage(this.getClass(), "BlackboardArtifactTagNode.createSheet.resultType.text"),
"",
tag.getArtifact().getDisplayName()));
properties.put(new NodeProperty<>(
NbBundle.getMessage(this.getClass(), "BlackboardArtifactTagNode.createSheet.comment.text"),
NbBundle.getMessage(this.getClass(), "BlackboardArtifactTagNode.createSheet.comment.text"),
"",
tag.getComment()));
properties.put(new NodeProperty<>(
NbBundle.getMessage(this.getClass(), "BlackboardArtifactTagNode.createSheet.userName.text"),
NbBundle.getMessage(this.getClass(), "BlackboardArtifactTagNode.createSheet.userName.text"),
"",
tag.getUserName()));
return propertySheet;
}
@NbBundle.Messages("BlackboardArtifactTagNode.viewSourceArtifact.text=View Source Result")
@Override
public Action[] getActions(boolean context) {
List<Action> actions = new ArrayList<>();
BlackboardArtifact artifact = getLookup().lookup(BlackboardArtifact.class);
//if this artifact has a time stamp add the action to view it in the timeline
try {
if (ViewArtifactInTimelineAction.hasSupportedTimeStamp(artifact)) {
actions.add(new ViewArtifactInTimelineAction(artifact));
}
} catch (TskCoreException ex) {
LOGGER.log(Level.SEVERE, MessageFormat.format("Error getting arttribute(s) from blackboard artifact{0}.", artifact.getArtifactID()), ex); //NON-NLS
}
actions.add(new ViewTaggedArtifactAction(Bundle.BlackboardArtifactTagNode_viewSourceArtifact_text(), artifact));
actions.add(null);
// if the artifact links to another file, add an action to go to that file
try {
AbstractFile c = findLinked(artifact);
if (c != null) {
actions.add(ViewFileInTimelineAction.createViewFileAction(c));
}
} catch (TskCoreException ex) {
LOGGER.log(Level.SEVERE, MessageFormat.format("Error getting linked file from blackboard artifact{0}.", artifact.getArtifactID()), ex); //NON-NLS
}
//if this artifact has associated content, add the action to view the content in the timeline
AbstractFile file = getLookup().lookup(AbstractFile.class);
if (null != file) {
actions.add(ViewFileInTimelineAction.createViewSourceFileAction(file));
}
actions.addAll(DataModelActionsFactory.getActions(tag, true));
actions.add(null);
actions.addAll(Arrays.asList(super.getActions(context)));
return actions.toArray(new Action[0]);
}
@Override
public <T> T accept(DisplayableItemNodeVisitor<T> visitor) {
return visitor.visit(this);
}
@Override
public String getItemType() {
return getClass().getName();
}
}

View File

@ -102,18 +102,12 @@ BlackboardArtifactNode_getViewSrcContentAction_type_DataArtifact=Data Artifact
BlackboardArtifactNode_getViewSrcContentAction_type_File=File
BlackboardArtifactNode_getViewSrcContentAction_type_OSAccount=OS Account
BlackboardArtifactNode_getViewSrcContentAction_type_unknown=Item
BlackboardArtifactTagNode.createSheet.userName.text=User Name
BlackboardArtifactTagNode.viewSourceArtifact.text=View Source Result
Category.five=CAT-5: Non-pertinent
Category.four=CAT-4: Exemplar/Comparison (Internal Use Only)
Category.one=CAT-1: Child Exploitation (Illegal)
Category.three=CAT-3: CGI/Animation (Child Exploitive)
Category.two=CAT-2: Child Exploitation (Non-Illegal/Age Difficult)
Category.zero=CAT-0: Uncategorized
ContentTagNode.createSheet.artifactMD5.displayName=MD5 Hash
ContentTagNode.createSheet.artifactMD5.name=MD5 Hash
ContentTagNode.createSheet.origFileName=Original Name
ContentTagNode.createSheet.userName.text=User Name
DataArtifacts_name=Data Artifacts
DataSourcesHostsNode_name=Data Sources
DeletedContent.allDelFilter.text=All
@ -421,8 +415,6 @@ TagNameNode.bbArtTagTypeNodeKey.text=Result Tags
TagNameNode.bookmark.text=Bookmark
TagNameNode.createSheet.name.name=Name
TagNameNode.createSheet.name.displayName=Name
TagNode.propertySheet.origName=Original Name
TagNode.propertySheet.origNameDisplayName=Original Name
TagsNode.displayName.text=Tags
TagsNode.createSheet.name.name=Name
TagsNode.createSheet.name.displayName=Name

View File

@ -1,160 +0,0 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2013-2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.datamodel;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.logging.Level;
import javax.swing.Action;
import org.apache.commons.lang3.StringUtils;
import org.openide.nodes.Sheet;
import org.openide.util.NbBundle;
import org.openide.util.NbBundle.Messages;
import org.openide.util.lookup.Lookups;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.coreutils.TimeZoneUtils;
import org.sleuthkit.autopsy.timeline.actions.ViewFileInTimelineAction;
import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.Content;
import org.sleuthkit.datamodel.ContentTag;
import org.sleuthkit.datamodel.TskCoreException;
/**
* Instances of this class wrap ContentTag objects. In the Autopsy presentation
* of the SleuthKit data model, they are leaf nodes of a tree consisting of
* content and artifact tags, grouped first by tag type, then by tag name.
*/
class ContentTagNode extends TagNode {
private static final Logger LOGGER = Logger.getLogger(ContentTagNode.class.getName());
private static final String ICON_PATH = "org/sleuthkit/autopsy/images/blue-tag-icon-16.png"; //NON-NLS
private final ContentTag tag;
ContentTagNode(ContentTag tag) {
super(Lookups.fixed(tag, tag.getContent()), tag.getContent());
super.setName(tag.getContent().getName());
super.setDisplayName(tag.getContent().getName());
this.setIconBaseWithExtension(ICON_PATH);
this.tag = tag;
}
@Messages({
"ContentTagNode.createSheet.origFileName=Original Name",
"ContentTagNode.createSheet.artifactMD5.displayName=MD5 Hash",
"ContentTagNode.createSheet.artifactMD5.name=MD5 Hash",
"ContentTagNode.createSheet.userName.text=User Name"})
@Override
protected Sheet createSheet() {
Content content = tag.getContent();
String contentPath;
try {
contentPath = content.getUniquePath();
} catch (TskCoreException ex) {
LOGGER.log(Level.SEVERE, "Failed to get path for content (id = " + content.getId() + ")", ex); //NON-NLS
contentPath = NbBundle.getMessage(this.getClass(), "ContentTagNode.createSheet.unavail.path");
}
AbstractFile file = content instanceof AbstractFile ? (AbstractFile) content : null;
Sheet propertySheet = super.createSheet();
Sheet.Set properties = propertySheet.get(Sheet.PROPERTIES);
if (properties == null) {
properties = Sheet.createPropertiesSet();
propertySheet.put(properties);
}
properties.put(new NodeProperty<>(
NbBundle.getMessage(this.getClass(), "ContentTagNode.createSheet.file.name"),
NbBundle.getMessage(this.getClass(), "ContentTagNode.createSheet.file.displayName"),
"",
content.getName()));
addOriginalNameProp(properties);
properties.put(new NodeProperty<>(
NbBundle.getMessage(this.getClass(), "ContentTagNode.createSheet.filePath.name"),
NbBundle.getMessage(this.getClass(), "ContentTagNode.createSheet.filePath.displayName"),
"",
contentPath));
properties.put(new NodeProperty<>(
NbBundle.getMessage(this.getClass(), "ContentTagNode.createSheet.comment.name"),
NbBundle.getMessage(this.getClass(), "ContentTagNode.createSheet.comment.displayName"),
"",
tag.getComment()));
properties.put(new NodeProperty<>(NbBundle.getMessage(this.getClass(), "ContentTagNode.createSheet.fileModifiedTime.name"),
NbBundle.getMessage(this.getClass(), "ContentTagNode.createSheet.fileModifiedTime.displayName"),
"",
file != null ? TimeZoneUtils.getFormattedTime(file.getMtime()) : ""));
properties.put(new NodeProperty<>(
NbBundle.getMessage(this.getClass(), "ContentTagNode.createSheet.fileChangedTime.name"),
NbBundle.getMessage(this.getClass(), "ContentTagNode.createSheet.fileChangedTime.displayName"),
"",
file != null ? TimeZoneUtils.getFormattedTime(file.getCtime()) : ""));
properties.put(new NodeProperty<>(
NbBundle.getMessage(this.getClass(), "ContentTagNode.createSheet.fileAccessedTime.name"),
NbBundle.getMessage(this.getClass(), "ContentTagNode.createSheet.fileAccessedTime.displayName"),
"",
file != null ? TimeZoneUtils.getFormattedTime(file.getAtime()) : ""));
properties.put(new NodeProperty<>(
NbBundle.getMessage(this.getClass(), "ContentTagNode.createSheet.fileCreatedTime.name"),
NbBundle.getMessage(this.getClass(), "ContentTagNode.createSheet.fileCreatedTime.displayName"),
"",
file != null ? TimeZoneUtils.getFormattedTime(file.getCrtime()) : ""));
properties.put(new NodeProperty<>(
NbBundle.getMessage(this.getClass(), "ContentTagNode.createSheet.fileSize.name"),
NbBundle.getMessage(this.getClass(), "ContentTagNode.createSheet.fileSize.displayName"),
"",
content.getSize()));
properties.put(new NodeProperty<>(
Bundle.ContentTagNode_createSheet_artifactMD5_name(),
Bundle.ContentTagNode_createSheet_artifactMD5_displayName(),
"",
file != null ? StringUtils.defaultString(file.getMd5Hash()) : ""));
properties.put(new NodeProperty<>(
NbBundle.getMessage(this.getClass(), "ContentTagNode.createSheet.userName.text"),
NbBundle.getMessage(this.getClass(), "ContentTagNode.createSheet.userName.text"),
"",
tag.getUserName()));
return propertySheet;
}
@Override
public Action[] getActions(boolean context) {
List<Action> actions = new ArrayList<>();
AbstractFile file = getLookup().lookup(AbstractFile.class);
if (file != null) {
actions.add(ViewFileInTimelineAction.createViewFileAction(file));
}
actions.addAll(DataModelActionsFactory.getActions(tag, false));
actions.add(null);
actions.addAll(Arrays.asList(super.getActions(context)));
return actions.toArray(new Action[actions.size()]);
}
@Override
public <T> T accept(DisplayableItemNodeVisitor<T> visitor) {
return visitor.visit(this);
}
@Override
public String getItemType() {
return getClass().getName();
}
}

View File

@ -375,80 +375,6 @@ public class DataModelActionsFactory {
actionsList.addAll(ContextMenuExtensionPoint.getActions());
return actionsList;
}
public static List<Action> getActions(ContentTag contentTag, boolean isArtifactSource) {
List<Action> actionsList = new ArrayList<>();
actionsList.add(new ViewContextAction((isArtifactSource ? VIEW_SOURCE_FILE_IN_DIR : VIEW_FILE_IN_DIR), contentTag.getContent()));
final ContentTagNode tagNode = new ContentTagNode(contentTag);
actionsList.add(null); // creates a menu separator
actionsList.add(new NewWindowViewAction(VIEW_IN_NEW_WINDOW, tagNode));
final Collection<AbstractFile> selectedFilesList
= new HashSet<>(Utilities.actionsGlobalContext().lookupAll(AbstractFile.class));
if (selectedFilesList.size() == 1) {
actionsList.add(new ExternalViewerAction(OPEN_IN_EXTERNAL_VIEWER, tagNode));
} else {
actionsList.add(ExternalViewerShortcutAction.getInstance());
}
actionsList.add(null); // creates a menu separator
actionsList.add(ExtractAction.getInstance());
actionsList.add(ExportCSVAction.getInstance());
actionsList.add(null); // creates a menu separator
actionsList.add(AddContentTagAction.getInstance());
if (isArtifactSource) {
actionsList.add(AddBlackboardArtifactTagAction.getInstance());
}
if (selectedFilesList.size() == 1) {
actionsList.add(DeleteFileContentTagAction.getInstance());
}
if (isArtifactSource) {
final Collection<BlackboardArtifact> selectedArtifactsList
= new HashSet<>(Utilities.actionsGlobalContext().lookupAll(BlackboardArtifact.class));
if (selectedArtifactsList.size() == 1) {
actionsList.add(DeleteFileBlackboardArtifactTagAction.getInstance());
}
}
actionsList.add(DeleteContentTagAction.getInstance());
actionsList.add(ReplaceContentTagAction.getInstance());
actionsList.addAll(ContextMenuExtensionPoint.getActions());
return actionsList;
}
public static List<Action> getActions(BlackboardArtifactTag artifactTag, boolean isArtifactSource) {
List<Action> actionsList = new ArrayList<>();
actionsList.add(new ViewContextAction((isArtifactSource ? VIEW_SOURCE_FILE_IN_DIR : VIEW_FILE_IN_DIR), artifactTag.getContent()));
final BlackboardArtifactTagNode tagNode = new BlackboardArtifactTagNode(artifactTag);
actionsList.add(null); // creates a menu separator
actionsList.add(new NewWindowViewAction(VIEW_IN_NEW_WINDOW, tagNode));
final Collection<AbstractFile> selectedFilesList
= new HashSet<>(Utilities.actionsGlobalContext().lookupAll(AbstractFile.class));
if (selectedFilesList.size() == 1) {
actionsList.add(new ExternalViewerAction(OPEN_IN_EXTERNAL_VIEWER, tagNode));
} else {
actionsList.add(ExternalViewerShortcutAction.getInstance());
}
actionsList.add(null); // creates a menu separator
actionsList.add(ExtractAction.getInstance());
actionsList.add(ExportCSVAction.getInstance());
actionsList.add(null); // creates a menu separator
actionsList.add(AddContentTagAction.getInstance());
if (isArtifactSource) {
actionsList.add(AddBlackboardArtifactTagAction.getInstance());
}
if (selectedFilesList.size() == 1) {
actionsList.add(DeleteFileContentTagAction.getInstance());
}
if (isArtifactSource) {
final Collection<BlackboardArtifact> selectedArtifactsList
= new HashSet<>(Utilities.actionsGlobalContext().lookupAll(BlackboardArtifact.class));
if (selectedArtifactsList.size() == 1) {
actionsList.add(DeleteFileBlackboardArtifactTagAction.getInstance());
}
}
actionsList.add(DeleteBlackboardArtifactTagAction.getInstance());
actionsList.add(ReplaceBlackboardArtifactTagAction.getInstance());
actionsList.addAll(ContextMenuExtensionPoint.getActions());
return actionsList;
}
public static List<Action> getActions(OsAccount osAccount) {
List<Action> actionsList = new ArrayList<>();

View File

@ -141,11 +141,8 @@ public interface DisplayableItemNodeVisitor<T> {
T visit(Tags.ContentTagTypeNode node);
T visit(ContentTagNode node);
T visit(Tags.BlackboardArtifactTagTypeNode node);
T visit(BlackboardArtifactTagNode node);
/*
* Reports
@ -475,21 +472,11 @@ public interface DisplayableItemNodeVisitor<T> {
return defaultVisit(node);
}
@Override
public T visit(ContentTagNode node) {
return defaultVisit(node);
}
@Override
public T visit(Tags.BlackboardArtifactTagTypeNode node) {
return defaultVisit(node);
}
@Override
public T visit(BlackboardArtifactTagNode node) {
return defaultVisit(node);
}
@Override
public T visit(Reports.ReportsListNode node) {
return defaultVisit(node);

View File

@ -1,128 +0,0 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2020-2020 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.datamodel;
import java.beans.PropertyChangeEvent;
import java.beans.PropertyChangeListener;
import org.openide.nodes.Children;
import org.openide.nodes.Sheet;
import org.openide.util.Lookup;
import org.openide.util.NbBundle;
import org.sleuthkit.autopsy.core.UserPreferences;
import org.sleuthkit.autopsy.datamodel.utils.FileNameTransTask;
import org.sleuthkit.autopsy.texttranslation.TextTranslationService;
import org.sleuthkit.datamodel.Content;
/**
* An abstract superclass for a node that represents a tag, uses the name of a
* given Content object as its display name, and has a property sheet with an
* original name property when machine translation is enabled.
*
* The translation of the Content name is done in a background thread. The
* translated name is made the display name of the node and the untranslated
* name is put into both the original name property and into the node's tooltip.
*
* TODO (Jira-6174): Consider modifying this class to be able to use it more broadly
* within the Autopsy data model (i.e., AbstractNode suclasses). It's not really
* specific to a tag node.
*/
@NbBundle.Messages({
"TagNode.propertySheet.origName=Original Name",
"TagNode.propertySheet.origNameDisplayName=Original Name"
})
abstract class TagNode extends DisplayableItemNode {
private final static String ORIG_NAME_PROP_NAME = Bundle.TagNode_propertySheet_origName();
private final static String ORIG_NAME_PROP_DISPLAY_NAME = Bundle.TagNode_propertySheet_origNameDisplayName();
private final String originalName;
private volatile String translatedName;
/**
* An abstract superclass for a node that represents a tag, uses the name of
* a given Content object as its display name, and has a property sheet with
* an untranslated file name property when machine translation is enabled.
*
* @param lookup The Lookup of the node.
* @param content The Content to use for the node display name.
*/
TagNode(Lookup lookup, Content content) {
super(Children.LEAF, lookup);
originalName = content.getName();
}
@Override
public boolean isLeafTypeNode() {
return true;
}
@Override
abstract public String getItemType();
@Override
abstract public <T> T accept(DisplayableItemNodeVisitor<T> visitor);
/**
* Adds an original name property to the node's property sheet and submits
* an original name translation task.
*
* The translation of the original name is done in a background thread. The
* translated name is made the display name of the node and the untranslated
* name is put into both the original name property and into the node's
* tooltip.
*
* @param properties The node's property sheet.
*/
protected void addOriginalNameProp(Sheet.Set properties) {
if (TextTranslationService.getInstance().hasProvider() && UserPreferences.displayTranslatedFileNames()) {
properties.put(new NodeProperty<>(
ORIG_NAME_PROP_NAME,
ORIG_NAME_PROP_DISPLAY_NAME,
"",
translatedName != null ? originalName : ""));
if (translatedName == null) {
new FileNameTransTask(originalName, this, new NameTranslationListener()).submit();
}
}
}
/**
* A listener for PropertyChangeEvents from a background task used to
* translate the original display name associated with the node.
*/
private class NameTranslationListener implements PropertyChangeListener {
@Override
public void propertyChange(PropertyChangeEvent evt) {
String eventType = evt.getPropertyName();
if (eventType.equals(FileNameTransTask.getPropertyName())) {
translatedName = evt.getNewValue().toString();
String originalName = evt.getOldValue().toString();
setDisplayName(translatedName);
setShortDescription(originalName);
updatePropertySheet(new NodeProperty<>(
ORIG_NAME_PROP_NAME,
ORIG_NAME_PROP_DISPLAY_NAME,
"",
originalName));
}
}
}
}

View File

@ -39,8 +39,13 @@ import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
import org.sleuthkit.autopsy.casemodule.services.TagsManager;
import org.sleuthkit.autopsy.core.UserPreferences;
import org.sleuthkit.autopsy.corecomponents.DataResultTopComponent;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.ingest.IngestManager;
import org.sleuthkit.autopsy.mainui.datamodel.TagsSearchParams;
import static org.sleuthkit.autopsy.mainui.datamodel.TagsSearchParams.TagType.FILE;
import static org.sleuthkit.autopsy.mainui.datamodel.TagsSearchParams.TagType.RESULT;
import org.sleuthkit.autopsy.mainui.nodes.SelectionResponder;
import org.sleuthkit.autopsy.tags.TagUtils;
import org.sleuthkit.datamodel.BlackboardArtifactTag;
import org.sleuthkit.datamodel.ContentTag;
@ -214,7 +219,7 @@ public class Tags implements AutopsyVisitableItem {
}
}
};
private final PropertyChangeListener weakPcl = WeakListeners.propertyChange(pcl, null);
/**
@ -225,7 +230,7 @@ public class Tags implements AutopsyVisitableItem {
TagNameNodeFactory(long objId) {
this.filteringDSObjId = objId;
}
@Override
protected void addNotify() {
IngestManager.getInstance().addIngestJobEventListener(INGEST_JOB_EVENTS_OF_INTEREST, weakPcl);
@ -411,13 +416,13 @@ public class Tags implements AutopsyVisitableItem {
* and blackboard artifact tags, grouped first by tag type, then by tag
* name.
*/
public class ContentTagTypeNode extends DisplayableItemNode implements Observer {
public class ContentTagTypeNode extends DisplayableItemNode implements Observer, SelectionResponder {
private final String ICON_PATH = "org/sleuthkit/autopsy/images/tag-folder-blue-icon-16.png"; //NON-NLS
private final TagName tagName;
public ContentTagTypeNode(TagName tagName) {
super(Children.create(new ContentTagNodeFactory(tagName), true), Lookups.singleton(tagName.getDisplayName() + " " + CONTENT_DISPLAY_NAME));
super(Children.LEAF, Lookups.singleton(tagName.getDisplayName() + " " + CONTENT_DISPLAY_NAME));
this.tagName = tagName;
super.setName(CONTENT_DISPLAY_NAME);
updateDisplayName();
@ -425,6 +430,12 @@ public class Tags implements AutopsyVisitableItem {
tagResults.addObserver(this);
}
@Override
public void respondSelection(DataResultTopComponent dataResultPanel) {
dataResultPanel.displayTags(new TagsSearchParams(tagName, FILE,
filteringDataSourceObjId() > 0 ? filteringDataSourceObjId() : null));
}
private void updateDisplayName() {
long tagsCount = 0;
try {
@ -479,51 +490,6 @@ public class Tags implements AutopsyVisitableItem {
}
}
private class ContentTagNodeFactory extends ChildFactory<ContentTag> implements Observer {
private final TagName tagName;
ContentTagNodeFactory(TagName tagName) {
super();
this.tagName = tagName;
tagResults.addObserver(this);
}
@Override
protected boolean createKeys(List<ContentTag> keys) {
// Use the content tags bearing the specified tag name as the keys.
try {
List<ContentTag> contentTags = (filteringDSObjId > 0)
? Case.getCurrentCaseThrows().getServices().getTagsManager().getContentTagsByTagName(tagName, filteringDSObjId)
: Case.getCurrentCaseThrows().getServices().getTagsManager().getContentTagsByTagName(tagName);
if (UserPreferences.showOnlyCurrentUserTags()) {
String userName = System.getProperty(USER_NAME_PROPERTY);
for (ContentTag tag : contentTags) {
if (userName.equals(tag.getUserName())) {
keys.add(tag);
}
}
} else {
keys.addAll(contentTags);
}
} catch (TskCoreException | NoCurrentCaseException ex) {
Logger.getLogger(ContentTagNodeFactory.class.getName()).log(Level.SEVERE, "Failed to get tag names", ex); //NON-NLS
}
return true;
}
@Override
protected Node createNodeForKey(ContentTag key) {
// The content tags to be wrapped are used as the keys.
return new ContentTagNode(key);
}
@Override
public void update(Observable o, Object arg) {
refresh(true);
}
}
private final String ARTIFACT_DISPLAY_NAME = NbBundle.getMessage(BlackboardArtifactTagTypeNode.class, "BlackboardArtifactTagTypeNode.displayName.text");
/**
@ -532,13 +498,13 @@ public class Tags implements AutopsyVisitableItem {
* content and blackboard artifact tags, grouped first by tag type, then by
* tag name.
*/
public class BlackboardArtifactTagTypeNode extends DisplayableItemNode implements Observer {
public class BlackboardArtifactTagTypeNode extends DisplayableItemNode implements Observer, SelectionResponder {
private final TagName tagName;
private final String ICON_PATH = "org/sleuthkit/autopsy/images/tag-folder-blue-icon-16.png"; //NON-NLS
public BlackboardArtifactTagTypeNode(TagName tagName) {
super(Children.create(new BlackboardArtifactTagNodeFactory(tagName), true), Lookups.singleton(tagName.getDisplayName() + " " + ARTIFACT_DISPLAY_NAME));
super(Children.LEAF, Lookups.singleton(tagName.getDisplayName() + " " + ARTIFACT_DISPLAY_NAME));
this.tagName = tagName;
super.setName(ARTIFACT_DISPLAY_NAME);
this.setIconBaseWithExtension(ICON_PATH);
@ -546,6 +512,12 @@ public class Tags implements AutopsyVisitableItem {
tagResults.addObserver(this);
}
@Override
public void respondSelection(DataResultTopComponent dataResultPanel) {
dataResultPanel.displayTags(new TagsSearchParams(tagName, RESULT,
filteringDataSourceObjId() > 0 ? filteringDataSourceObjId() : null));
}
private void updateDisplayName() {
long tagsCount = 0;
try {
@ -598,49 +570,4 @@ public class Tags implements AutopsyVisitableItem {
return getClass().getName();
}
}
private class BlackboardArtifactTagNodeFactory extends ChildFactory<BlackboardArtifactTag> implements Observer {
private final TagName tagName;
BlackboardArtifactTagNodeFactory(TagName tagName) {
super();
this.tagName = tagName;
tagResults.addObserver(this);
}
@Override
protected boolean createKeys(List<BlackboardArtifactTag> keys) {
try {
// Use the blackboard artifact tags bearing the specified tag name as the keys.
List<BlackboardArtifactTag> artifactTags = (filteringDSObjId > 0)
? Case.getCurrentCaseThrows().getServices().getTagsManager().getBlackboardArtifactTagsByTagName(tagName, filteringDSObjId)
: Case.getCurrentCaseThrows().getServices().getTagsManager().getBlackboardArtifactTagsByTagName(tagName);
if (UserPreferences.showOnlyCurrentUserTags()) {
String userName = System.getProperty(USER_NAME_PROPERTY);
for (BlackboardArtifactTag tag : artifactTags) {
if (userName.equals(tag.getUserName())) {
keys.add(tag);
}
}
} else {
keys.addAll(artifactTags);
}
} catch (TskCoreException | NoCurrentCaseException ex) {
Logger.getLogger(BlackboardArtifactTagNodeFactory.class.getName()).log(Level.SEVERE, "Failed to get tag names", ex); //NON-NLS
}
return true;
}
@Override
protected Node createNodeForKey(BlackboardArtifactTag key) {
// The blackboard artifact tags to be wrapped are used as the keys.
return new BlackboardArtifactTagNode(key);
}
@Override
public void update(Observable o, Object arg) {
refresh(true);
}
}
}

View File

@ -1,7 +1,7 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2019-2020 Basis Technology Corp.
* Copyright 2019-2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
@ -24,7 +24,6 @@ import java.util.Collection;
import java.util.List;
import java.util.logging.Level;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.Account;
import org.sleuthkit.datamodel.Blackboard.BlackboardException;
import org.sleuthkit.datamodel.BlackboardArtifact;
@ -188,10 +187,10 @@ final class XRYCallsFileParser extends AbstractSingleEntityParser {
switch (xryKey) {
case TEL:
case NUMBER:
if(!XRYUtils.isPhoneValid(pair.getValue())) {
if (!XRYUtils.isPhoneValid(pair.getValue())) {
continue;
}
// Apply namespace or direction
if (xryNamespace == XryNamespace.FROM || direction == CommunicationDirection.INCOMING) {
callerId = pair.getValue();
@ -206,30 +205,30 @@ final class XRYCallsFileParser extends AbstractSingleEntityParser {
// Although confusing, as these are also 'name spaces', it appears
// later versions of XRY just made these standardized lines.
case TO:
if(!XRYUtils.isPhoneValid(pair.getValue())) {
if (!XRYUtils.isPhoneValid(pair.getValue())) {
continue;
}
calleeList.add(pair.getValue());
break;
case FROM:
if(!XRYUtils.isPhoneValid(pair.getValue())) {
if (!XRYUtils.isPhoneValid(pair.getValue())) {
continue;
}
callerId = pair.getValue();
break;
case TIME:
try {
//Tranform value to seconds since epoch
long dateTimeSinceEpoch = XRYUtils.calculateSecondsSinceEpoch(pair.getValue());
startTime = dateTimeSinceEpoch;
} catch (DateTimeParseException ex) {
logger.log(Level.WARNING, String.format("[XRY DSP] Assumption"
+ " about the date time formatting of call logs is "
+ "not right. Here is the value [ %s ]", pair.getValue()), ex);
}
break;
//Tranform value to seconds since epoch
long dateTimeSinceEpoch = XRYUtils.calculateSecondsSinceEpoch(pair.getValue());
startTime = dateTimeSinceEpoch;
} catch (DateTimeParseException ex) {
logger.log(Level.WARNING, String.format("[XRY DSP] Assumption"
+ " about the date time formatting of call logs is "
+ "not right. Here is the value [ %s ]", pair.getValue()), ex);
}
break;
case DIRECTION:
String directionString = pair.getValue().toLowerCase();
if (directionString.equals("incoming")) {
@ -263,7 +262,6 @@ final class XRYCallsFileParser extends AbstractSingleEntityParser {
// Make sure we have the required fields, otherwise the CommHelper will
// complain about illegal arguments.
// These are all the invalid combinations.
if (callerId == null && calleeList.isEmpty()
|| direction == CommunicationDirection.INCOMING && callerId == null
@ -288,10 +286,10 @@ final class XRYCallsFileParser extends AbstractSingleEntityParser {
// it would have been a valid combination.
if (callerId != null) {
try {
currentCase.getCommunicationsManager().createAccountFileInstance(
Account.Type.PHONE, callerId, PARSER_NAME, parent);
currentCase.getCommunicationsManager().createAccountFileInstance(
Account.Type.PHONE, callerId, PARSER_NAME, parent, null);
} catch (InvalidAccountIDException ex) {
logger.log(Level.WARNING, String.format("Invalid account identifier %s", callerId), ex);
logger.log(Level.WARNING, String.format("Invalid account identifier %s", callerId), ex);
}
otherAttributes.add(new BlackboardAttribute(
@ -301,12 +299,11 @@ final class XRYCallsFileParser extends AbstractSingleEntityParser {
for (String phone : calleeList) {
try {
currentCase.getCommunicationsManager().createAccountFileInstance(
Account.Type.PHONE, phone, PARSER_NAME, parent);
currentCase.getCommunicationsManager().createAccountFileInstance(
Account.Type.PHONE, phone, PARSER_NAME, parent, null);
} catch (InvalidAccountIDException ex) {
logger.log(Level.WARNING, String.format("Invalid account identifier %s", phone), ex);
}
otherAttributes.add(new BlackboardAttribute(
BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PHONE_NUMBER,
@ -315,17 +312,17 @@ final class XRYCallsFileParser extends AbstractSingleEntityParser {
if (!otherAttributes.isEmpty()) {
BlackboardArtifact artifact = parent.newDataArtifact(new BlackboardArtifact.Type(BlackboardArtifact.ARTIFACT_TYPE.TSK_CALLLOG), otherAttributes);
currentCase.getBlackboard().postArtifact(artifact, PARSER_NAME);
currentCase.getBlackboard().postArtifact(artifact, PARSER_NAME, null);
}
} else {
// Otherwise we can safely use the helper.
CommunicationArtifactsHelper helper = new CommunicationArtifactsHelper(
currentCase, PARSER_NAME, parent, Account.Type.PHONE);
currentCase, PARSER_NAME, parent, Account.Type.PHONE, null);
helper.addCalllog(direction, callerId, calleeList, startTime,
endTime, callType, otherAttributes);
}
}
}
}

View File

@ -1,7 +1,7 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2019-2020 Basis Technology Corp.
* Copyright 2019-2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
@ -24,7 +24,6 @@ import java.util.List;
import java.util.logging.Level;
import org.sleuthkit.autopsy.coreutils.Logger;
import static org.sleuthkit.autopsy.datasourceprocessors.xry.AbstractSingleEntityParser.PARSER_NAME;
import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.Account;
import org.sleuthkit.datamodel.Blackboard;
import org.sleuthkit.datamodel.BlackboardArtifact;
@ -136,7 +135,7 @@ final class XRYContactsFileParser extends AbstractSingleEntityParser {
// complain about illegal arguments.
if (phoneNumber != null || homePhoneNumber != null || mobilePhoneNumber != null || hasAnEmail) {
CommunicationArtifactsHelper helper = new CommunicationArtifactsHelper(
currentCase, PARSER_NAME, parent, Account.Type.DEVICE);
currentCase, PARSER_NAME, parent, Account.Type.DEVICE, null);
helper.addContact(contactName, phoneNumber, homePhoneNumber,
mobilePhoneNumber, emailAddr, additionalAttributes);
@ -144,8 +143,8 @@ final class XRYContactsFileParser extends AbstractSingleEntityParser {
// Just create an artifact with the attributes that we do have.
if (!additionalAttributes.isEmpty()) {
BlackboardArtifact artifact = parent.newDataArtifact(new BlackboardArtifact.Type(BlackboardArtifact.ARTIFACT_TYPE.TSK_CONTACT), additionalAttributes);
currentCase.getBlackboard().postArtifact(artifact, PARSER_NAME);
currentCase.getBlackboard().postArtifact(artifact, PARSER_NAME, null);
}
}
}

View File

@ -1,7 +1,7 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2019-2020 Basis Technology Corp.
* Copyright 2019-2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
@ -95,6 +95,7 @@ final class XRYMessagesFileParser implements XRYFileParser {
* Indicates if the display name of the XRY key is a recognized type.
*
* @param name
*
* @return
*/
public static boolean contains(String name) {
@ -114,6 +115,7 @@ final class XRYMessagesFileParser implements XRYFileParser {
* contains() before hand.
*
* @param name
*
* @return
*/
public static XryKey fromDisplayName(String name) {
@ -149,6 +151,7 @@ final class XRYMessagesFileParser implements XRYFileParser {
* type.
*
* @param xryNamespace
*
* @return
*/
public static boolean contains(String xryNamespace) {
@ -169,6 +172,7 @@ final class XRYMessagesFileParser implements XRYFileParser {
* contains() before hand.
*
* @param xryNamespace
*
* @return
*/
public static XryNamespace fromDisplayName(String xryNamespace) {
@ -206,6 +210,7 @@ final class XRYMessagesFileParser implements XRYFileParser {
* Indicates if the display name of the XRY key is a recognized type.
*
* @param name
*
* @return
*/
public static boolean contains(String name) {
@ -225,6 +230,7 @@ final class XRYMessagesFileParser implements XRYFileParser {
* contains() before hand.
*
* @param name
*
* @return
*/
public static XryMetaKey fromDisplayName(String name) {
@ -253,11 +259,13 @@ final class XRYMessagesFileParser implements XRYFileParser {
* assumption is correct, otherwise an error will appear in the logs.
*
* @param reader The XRYFileReader that reads XRY entities from the
* Message-SMS report.
* Message-SMS report.
* @param parent The parent Content to create artifacts from.
* @throws IOException If an I/O error is encountered during report reading
*
* @throws IOException If an I/O error is encountered during report
* reading
* @throws TskCoreException If an error during artifact creation is
* encountered.
* encountered.
*/
@Override
public void parse(XRYFileReader reader, Content parent, SleuthkitCase currentCase) throws IOException, TskCoreException, BlackboardException {
@ -270,10 +278,10 @@ final class XRYMessagesFileParser implements XRYFileParser {
while (reader.hasNextEntity()) {
String xryEntity = reader.nextEntity();
// This call will combine all segmented text into a single key value pair
List<XRYKeyValuePair> pairs = getXRYKeyValuePairs(xryEntity, reader, referenceNumbersSeen);
// Transform all the data from XRY land into the appropriate CommHelper
// data types.
final String messageType = PARSER_NAME;
@ -286,8 +294,8 @@ final class XRYMessagesFileParser implements XRYFileParser {
String text = null;
final String threadId = null;
final Collection<BlackboardAttribute> otherAttributes = new ArrayList<>();
for(XRYKeyValuePair pair : pairs) {
for (XRYKeyValuePair pair : pairs) {
XryNamespace namespace = XryNamespace.NONE;
if (XryNamespace.contains(pair.getNamespace())) {
namespace = XryNamespace.fromDisplayName(pair.getNamespace());
@ -298,55 +306,55 @@ final class XRYMessagesFileParser implements XRYFileParser {
switch (key) {
case TEL:
case NUMBER:
if(!XRYUtils.isPhoneValid(pair.getValue())) {
if (!XRYUtils.isPhoneValid(pair.getValue())) {
continue;
}
// Apply namespace or direction
if(namespace == XryNamespace.FROM || direction == CommunicationDirection.INCOMING) {
if (namespace == XryNamespace.FROM || direction == CommunicationDirection.INCOMING) {
senderId = pair.getValue();
} else if(namespace == XryNamespace.TO || direction == CommunicationDirection.OUTGOING) {
} else if (namespace == XryNamespace.TO || direction == CommunicationDirection.OUTGOING) {
recipientIdsList.add(pair.getValue());
} else {
try {
currentCase.getCommunicationsManager().createAccountFileInstance(
Account.Type.PHONE, pair.getValue(), PARSER_NAME, parent);
Account.Type.PHONE, pair.getValue(), PARSER_NAME, parent, null);
} catch (InvalidAccountIDException ex) {
logger.log(Level.WARNING, String.format("Invalid account identifier %s", pair.getValue()), ex);
}
otherAttributes.add(new BlackboardAttribute(
BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PHONE_NUMBER,
PARSER_NAME, pair.getValue()));
BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PHONE_NUMBER,
PARSER_NAME, pair.getValue()));
}
break;
// Although confusing, as these are also 'name spaces', it appears
// later versions of XRY just made these standardized lines.
case FROM:
if(!XRYUtils.isPhoneValid(pair.getValue())) {
if (!XRYUtils.isPhoneValid(pair.getValue())) {
continue;
}
senderId = pair.getValue();
break;
case TO:
if(!XRYUtils.isPhoneValid(pair.getValue())) {
if (!XRYUtils.isPhoneValid(pair.getValue())) {
continue;
}
recipientIdsList.add(pair.getValue());
break;
case TIME:
try {
//Tranform value to seconds since epoch
long dateTimeSinceInEpoch = XRYUtils.calculateSecondsSinceEpoch(pair.getValue());
dateTime = dateTimeSinceInEpoch;
} catch (DateTimeParseException ex) {
logger.log(Level.WARNING, String.format("[%s] Assumption"
+ " about the date time formatting of messages is "
+ "not right. Here is the pair [ %s ]", PARSER_NAME, pair), ex);
}
break;
//Tranform value to seconds since epoch
long dateTimeSinceInEpoch = XRYUtils.calculateSecondsSinceEpoch(pair.getValue());
dateTime = dateTimeSinceInEpoch;
} catch (DateTimeParseException ex) {
logger.log(Level.WARNING, String.format("[%s] Assumption"
+ " about the date time formatting of messages is "
+ "not right. Here is the pair [ %s ]", PARSER_NAME, pair), ex);
}
break;
case TYPE:
switch (normalizedValue) {
case "incoming":
@ -406,11 +414,11 @@ final class XRYMessagesFileParser implements XRYFileParser {
}
break;
case SERVICE_CENTER:
if(!XRYUtils.isPhoneValid(pair.getValue())) {
if (!XRYUtils.isPhoneValid(pair.getValue())) {
continue;
}
otherAttributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PHONE_NUMBER,
otherAttributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PHONE_NUMBER,
PARSER_NAME, pair.getValue()));
break;
default:
@ -427,18 +435,18 @@ final class XRYMessagesFileParser implements XRYFileParser {
}
}
}
CommunicationArtifactsHelper helper = new CommunicationArtifactsHelper(
currentCase, PARSER_NAME, parent, Account.Type.PHONE);
helper.addMessage(messageType, direction, senderId, recipientIdsList,
dateTime, readStatus, subject, text, threadId, otherAttributes);
CommunicationArtifactsHelper helper = new CommunicationArtifactsHelper(
currentCase, PARSER_NAME, parent, Account.Type.PHONE, null);
helper.addMessage(messageType, direction, senderId, recipientIdsList,
dateTime, readStatus, subject, text, threadId, otherAttributes);
}
}
/**
* Extracts all pairs from the XRY Entity. This function
* will unify any segmented text, if need be.
* Extracts all pairs from the XRY Entity. This function will unify any
* segmented text, if need be.
*/
private List<XRYKeyValuePair> getXRYKeyValuePairs(String xryEntity,
XRYFileReader reader, Set<Integer> referenceValues) throws IOException {
@ -508,10 +516,13 @@ final class XRYMessagesFileParser implements XRYFileParser {
* Builds up segmented message entities so that the text is unified for a
* single artifact.
*
* @param reader File reader that is producing XRY entities.
* @param referenceNumbersSeen All known references numbers up until this point.
* @param xryEntity The source XRY entity.
* @param reader File reader that is producing XRY entities.
* @param referenceNumbersSeen All known references numbers up until this
* point.
* @param xryEntity The source XRY entity.
*
* @return
*
* @throws IOException
*/
private String getSegmentedText(String[] xryEntity, XRYFileReader reader,
@ -604,7 +615,8 @@ final class XRYMessagesFileParser implements XRYFileParser {
* Extracts the value of the XRY meta key, if any.
*
* @param xryLines XRY entity to extract from.
* @param metaKey The key type to extract.
* @param metaKey The key type to extract.
*
* @return
*/
private Optional<Integer> getMetaKeyValue(String[] xryLines, XryMetaKey metaKey) {
@ -629,10 +641,12 @@ final class XRYMessagesFileParser implements XRYFileParser {
/**
* Extracts the ith XRY Key Value pair in the XRY Entity.
*
* The total number of pairs can be determined via getCountOfKeyValuePairs().
* The total number of pairs can be determined via
* getCountOfKeyValuePairs().
*
* @param xryLines XRY entity.
* @param index The requested Key Value pair.
* @param index The requested Key Value pair.
*
* @return
*/
private Optional<XRYKeyValuePair> getKeyValuePairByIndex(String[] xryLines, int index) {
@ -672,4 +686,4 @@ final class XRYMessagesFileParser implements XRYFileParser {
return Optional.empty();
}
}
}

View File

@ -388,10 +388,10 @@ public class ResultsSorter implements Comparator<Result> {
Bundle.FileSorter_SortingMethod_keywordlist_displayName()), // Sort alphabetically by list of keyword list names found
BY_FULL_PATH(new ArrayList<>(),
Bundle.FileSorter_SortingMethod_fullPath_displayName()), // Sort alphabetically by path
BY_DOMAIN_NAME(Arrays.asList(new DiscoveryAttributes.DomainCategoryAttribute()), Bundle.FileSorter_SortingMethod_domain_displayName()),
BY_PAGE_VIEWS(Arrays.asList(new DiscoveryAttributes.DomainCategoryAttribute()), Bundle.FileSorter_SortingMethod_pageViews_displayName()),
BY_DOWNLOADS(Arrays.asList(new DiscoveryAttributes.DomainCategoryAttribute()), Bundle.FileSorter_SortingMethod_downloads_displayName()),
BY_LAST_ACTIVITY(Arrays.asList(new DiscoveryAttributes.DomainCategoryAttribute()), Bundle.FileSorter_SortingMethod_activity_displayName());
BY_DOMAIN_NAME(Arrays.asList(new DiscoveryAttributes.DomainCategoryAttribute(), new DiscoveryAttributes.PreviouslyNotableAttribute()), Bundle.FileSorter_SortingMethod_domain_displayName()),
BY_PAGE_VIEWS(Arrays.asList(new DiscoveryAttributes.DomainCategoryAttribute(), new DiscoveryAttributes.PreviouslyNotableAttribute()), Bundle.FileSorter_SortingMethod_pageViews_displayName()),
BY_DOWNLOADS(Arrays.asList(new DiscoveryAttributes.DomainCategoryAttribute(), new DiscoveryAttributes.PreviouslyNotableAttribute()), Bundle.FileSorter_SortingMethod_downloads_displayName()),
BY_LAST_ACTIVITY(Arrays.asList(new DiscoveryAttributes.DomainCategoryAttribute(), new DiscoveryAttributes.PreviouslyNotableAttribute()), Bundle.FileSorter_SortingMethod_activity_displayName());
private final String displayName;
private final List<DiscoveryAttributes.AttributeType> requiredAttributes;

View File

@ -73,8 +73,8 @@ class SampleFileIngestModule implements FileIngestModule {
// Skip anything other than actual file system files.
if ((file.getType() == TskData.TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS)
|| (file.getType() == TskData.TSK_DB_FILES_TYPE_ENUM.UNUSED_BLOCKS)
|| (file.isFile() == false)) {
|| (file.getType() == TskData.TSK_DB_FILES_TYPE_ENUM.UNUSED_BLOCKS)
|| (file.isFile() == false)) {
return IngestModule.ProcessResult.OK;
}
@ -111,10 +111,13 @@ class SampleFileIngestModule implements FileIngestModule {
addToBlackboardPostCount(context.getJobId(), 1L);
/*
* post the artifact which will index the artifact for keyword
* search, and fire an event to notify UI of this new artifact
* Post the artifact to the blackboard. Doing so will cause events
* to be published that will trigger additional analysis, if
* applicable. For example, the creation of timeline events,
* indexing of the artifact for keyword search, and analysis by the
* data artifact ingest modules if the artifact is a data artifact.
*/
file.getSleuthkitCase().getBlackboard().postArtifact(art, SampleIngestModuleFactory.getModuleName());
file.getSleuthkitCase().getBlackboard().postArtifact(art, SampleIngestModuleFactory.getModuleName(), context.getJobId());
return IngestModule.ProcessResult.OK;

View File

@ -1,7 +1,7 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2015-2017 Basis Technology Corp.
* Copyright 2015-2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
@ -21,66 +21,76 @@ package org.sleuthkit.autopsy.guiutils;
import java.awt.Component;
import java.time.Duration;
import javax.swing.JTable;
import static javax.swing.SwingConstants.CENTER;
/**
* A JTable cell renderer that renders a duration represented as a long as a
* string with days, hours, minutes, and seconds components. It center-aligns
* cell content and grays out the cell if the table is disabled.
*/
public class DurationCellRenderer extends GrayableCellRenderer {
public final class DurationCellRenderer extends GrayableCellRenderer {
private static final long serialVersionUID = 1L;
private static final char UNIT_SEPARATOR_CHAR = ':';
public DurationCellRenderer() {
setHorizontalAlignment(CENTER);
setHorizontalAlignment(LEFT);
}
@Override
public Component getTableCellRendererComponent(JTable table, Object value, boolean isSelected, boolean hasFocus, int row, int column) {
if (value instanceof Long) {
{
setText(DurationCellRenderer.longToDurationString((long) value));
}
setText(DurationCellRenderer.longToDurationString((long) value));
}
grayCellIfTableNotEnabled(table, isSelected);
return this;
}
public static char getUnitSeperator() {
return UNIT_SEPARATOR_CHAR;
}
/**
* Convert a duration represented by a long to a human readable string with
* with days, hours, minutes, and seconds components.
*
* @param duration - the representation of the duration in long form
* @param duration - The representation of the duration in long form.
*
* @return - the representation of the duration in String form.
* @return - The representation of the duration in String form.
*/
public static String longToDurationString(long duration) {
Duration d = Duration.ofMillis(duration);
if (d.isNegative()) {
d = Duration.ofMillis(-duration);
d = Duration.ofMillis(0); //it being 0 for a few seconds seems preferable to it counting down to 0 then back up from 0
}
String result;
long days = d.toDays();
long hours = d.minusDays(days).toHours();
long minutes = d.minusDays(days).minusHours(hours).toMinutes();
long seconds = d.minusDays(days).minusHours(hours).minusMinutes(minutes).getSeconds();
if (minutes > 0) {
if (hours > 0) {
if (days > 0) {
result = days + " d " + hours + " h " + minutes + " m " + seconds + " s";
} else {
result = hours + " h " + minutes + " m " + seconds + " s";
}
} else {
result = minutes + " m " + seconds + " s";
}
} else {
result = seconds + " s";
if (days < 0) {
days = 0;
}
return result;
if (hours < 0) {
hours = 0;
}
if (minutes < 0) {
minutes = 0;
}
if (seconds < 0) {
seconds = 0;
}
StringBuilder results = new StringBuilder(12);
if (days < 99) {
results.append(String.format("%02d", days));
} else {
results.append(days); //in the off chance something has been running for over 99 days lets allow it to stand out a bit by having as many characters as it needs
}
results.append(UNIT_SEPARATOR_CHAR);
results.append(String.format("%02d", hours));
results.append(UNIT_SEPARATOR_CHAR);
results.append(String.format("%02d", minutes));
results.append(UNIT_SEPARATOR_CHAR);
results.append(String.format("%02d", seconds));
return results.toString();
}
}

View File

@ -23,27 +23,28 @@ import java.util.Optional;
import org.sleuthkit.datamodel.DataArtifact;
/**
* A pipeline of data artifact ingest modules used to execute data artifact
* A pipeline of data artifact ingest modules used to perform data artifact
* ingest tasks for an ingest job.
*/
final class DataArtifactIngestPipeline extends IngestTaskPipeline<DataArtifactIngestTask> {
final class DataArtifactIngestPipeline extends IngestPipeline<DataArtifactIngestTask> {
/**
* Constructs a pipeline of data artifact ingest modules used to execute
* Constructs a pipeline of data artifact ingest modules used to perform
* data artifact ingest tasks for an ingest job.
*
* @param ingestJobPipeline The ingest job pipeline that owns this ingest
* task pipeline.
* @param moduleTemplates The ingest module templates that define this
* pipeline. May be an empty list.
* @param ingestJobExecutor The ingest job executor for this pipeline.
* @param moduleTemplates The ingest module templates to be used to
* construct the ingest modules for this pipeline.
* May be an empty list if this type of pipeline is
* not needed for the ingest job.
*/
DataArtifactIngestPipeline(IngestJobPipeline ingestJobPipeline, List<IngestModuleTemplate> moduleTemplates) {
super(ingestJobPipeline, moduleTemplates);
DataArtifactIngestPipeline(IngestJobExecutor ingestJobExecutor, List<IngestModuleTemplate> moduleTemplates) {
super(ingestJobExecutor, moduleTemplates);
}
@Override
Optional<PipelineModule<DataArtifactIngestTask>> acceptModuleTemplate(IngestModuleTemplate template) {
Optional<IngestTaskPipeline.PipelineModule<DataArtifactIngestTask>> module = Optional.empty();
Optional<IngestPipeline.PipelineModule<DataArtifactIngestTask>> module = Optional.empty();
if (template.isDataArtifactIngestModuleTemplate()) {
DataArtifactIngestModule ingestModule = template.createDataArtifactIngestModule();
module = Optional.of(new DataArtifactIngestPipelineModule(ingestModule, template.getModuleName()));
@ -52,18 +53,18 @@ final class DataArtifactIngestPipeline extends IngestTaskPipeline<DataArtifactIn
}
@Override
void prepareForTask(DataArtifactIngestTask task) throws IngestTaskPipelineException {
void prepareForTask(DataArtifactIngestTask task) throws IngestPipelineException {
}
@Override
void cleanUpAfterTask(DataArtifactIngestTask task) throws IngestTaskPipelineException {
void cleanUpAfterTask(DataArtifactIngestTask task) throws IngestPipelineException {
}
/**
* A decorator that adds ingest infrastructure operations to a data artifact
* ingest module.
*/
static final class DataArtifactIngestPipelineModule extends IngestTaskPipeline.PipelineModule<DataArtifactIngestTask> {
static final class DataArtifactIngestPipelineModule extends IngestPipeline.PipelineModule<DataArtifactIngestTask> {
private final DataArtifactIngestModule module;
@ -80,7 +81,7 @@ final class DataArtifactIngestPipeline extends IngestTaskPipeline<DataArtifactIn
}
@Override
void executeTask(IngestJobPipeline ingestJobPipeline, DataArtifactIngestTask task) throws IngestModuleException {
void process(IngestJobExecutor ingestJobExecutor, DataArtifactIngestTask task) throws IngestModuleException {
DataArtifact artifact = task.getDataArtifact();
module.process(artifact);
}

View File

@ -22,7 +22,7 @@ import org.sleuthkit.datamodel.DataArtifact;
/**
* A data artifact ingest task that will be executed by an ingest thread using a
* given ingest job pipeline.
* given ingest job executor.
*/
final class DataArtifactIngestTask extends IngestTask {
@ -30,14 +30,14 @@ final class DataArtifactIngestTask extends IngestTask {
/**
* Constructs a data artifact ingest task that will be executed by an ingest
* thread using a given ingest job pipeline.
* thread using a given ingest job executor.
*
* @param ingestJobPipeline The ingest job pipeline to use to execute the
* @param ingestJobExecutor The ingest job executor to use to execute the
* task.
* @param artifact The data artifact to be processed.
*/
DataArtifactIngestTask(IngestJobPipeline ingestJobPipeline, DataArtifact artifact) {
super(ingestJobPipeline);
DataArtifactIngestTask(IngestJobExecutor ingestJobExecutor, DataArtifact artifact) {
super(ingestJobExecutor);
this.artifact = artifact;
}
@ -53,7 +53,7 @@ final class DataArtifactIngestTask extends IngestTask {
@Override
void execute(long threadId) {
super.setThreadId(threadId);
getIngestJobPipeline().execute(this);
getIngestJobExecutor().execute(this);
}
}

View File

@ -23,10 +23,10 @@ package org.sleuthkit.autopsy.ingest;
*/
public class DataSourceIngestModuleProgress {
private final IngestJobPipeline ingestJobPipeline;
private final IngestJobExecutor ingestJobExecutor;
DataSourceIngestModuleProgress(IngestJobPipeline pipeline) {
this.ingestJobPipeline = pipeline;
DataSourceIngestModuleProgress(IngestJobExecutor ingestJobExecutor) {
this.ingestJobExecutor = ingestJobExecutor;
}
/**
@ -38,7 +38,7 @@ public class DataSourceIngestModuleProgress {
* data source.
*/
public void switchToDeterminate(int workUnits) {
this.ingestJobPipeline.switchDataSourceIngestProgressBarToDeterminate(workUnits);
ingestJobExecutor.switchDataSourceIngestProgressBarToDeterminate(workUnits);
}
/**
@ -46,7 +46,7 @@ public class DataSourceIngestModuleProgress {
* the total work units to process the data source is unknown.
*/
public void switchToIndeterminate() {
this.ingestJobPipeline.switchDataSourceIngestProgressBarToIndeterminate();
ingestJobExecutor.switchDataSourceIngestProgressBarToIndeterminate();
}
/**
@ -56,7 +56,7 @@ public class DataSourceIngestModuleProgress {
* @param workUnits Number of work units performed so far by the module.
*/
public void progress(int workUnits) {
this.ingestJobPipeline.advanceDataSourceIngestProgressBar("", workUnits);
ingestJobExecutor.advanceDataSourceIngestProgressBar("", workUnits);
}
/**
@ -65,7 +65,7 @@ public class DataSourceIngestModuleProgress {
* @param message Message to display
*/
public void progress(String message) {
this.ingestJobPipeline.advanceDataSourceIngestProgressBar(message);
ingestJobExecutor.advanceDataSourceIngestProgressBar(message);
}
/**
@ -76,7 +76,7 @@ public class DataSourceIngestModuleProgress {
* @param workUnits Number of work units performed so far by the module.
*/
public void progress(String currentTask, int workUnits) {
this.ingestJobPipeline.advanceDataSourceIngestProgressBar(currentTask, workUnits);
ingestJobExecutor.advanceDataSourceIngestProgressBar(currentTask, workUnits);
}
}

View File

@ -29,7 +29,7 @@ import org.sleuthkit.datamodel.Content;
* A pipeline of data source level ingest modules for executing data source
* level ingest tasks for an ingest job.
*/
final class DataSourceIngestPipeline extends IngestTaskPipeline<DataSourceIngestTask> {
final class DataSourceIngestPipeline extends IngestPipeline<DataSourceIngestTask> {
private static final Logger logger = Logger.getLogger(DataSourceIngestPipeline.class.getName());
private static final IngestManager ingestManager = IngestManager.getInstance();
@ -38,17 +38,19 @@ final class DataSourceIngestPipeline extends IngestTaskPipeline<DataSourceIngest
* Constructs a pipeline of data source level ingest modules for performing
* data source level ingest tasks for an ingest job.
*
* @param ingestJobPipeline The ingest job pipeline that owns this pipeline.
* @param moduleTemplates The ingest module templates that define this
* pipeline.
* @param ingestJobExecutor The ingest job executor for this pipeline.
* @param moduleTemplates The ingest module templates to be used to
* construct the ingest modules for this pipeline.
* May be an empty list if this type of pipeline is
* not needed for the ingest job.
*/
DataSourceIngestPipeline(IngestJobPipeline ingestJobPipeline, List<IngestModuleTemplate> moduleTemplates) {
super(ingestJobPipeline, moduleTemplates);
DataSourceIngestPipeline(IngestJobExecutor ingestJobExecutor, List<IngestModuleTemplate> moduleTemplates) {
super(ingestJobExecutor, moduleTemplates);
}
@Override
Optional<IngestTaskPipeline.PipelineModule<DataSourceIngestTask>> acceptModuleTemplate(IngestModuleTemplate template) {
Optional<IngestTaskPipeline.PipelineModule<DataSourceIngestTask>> module = Optional.empty();
Optional<IngestPipeline.PipelineModule<DataSourceIngestTask>> acceptModuleTemplate(IngestModuleTemplate template) {
Optional<IngestPipeline.PipelineModule<DataSourceIngestTask>> module = Optional.empty();
if (template.isDataSourceIngestModuleTemplate()) {
DataSourceIngestModule ingestModule = template.createDataSourceIngestModule();
module = Optional.of(new DataSourcePipelineModule(ingestModule, template.getModuleName()));
@ -69,7 +71,7 @@ final class DataSourceIngestPipeline extends IngestTaskPipeline<DataSourceIngest
* A wrapper that adds ingest infrastructure operations to a data source
* level ingest module.
*/
static final class DataSourcePipelineModule extends IngestTaskPipeline.PipelineModule<DataSourceIngestTask> {
static final class DataSourcePipelineModule extends IngestPipeline.PipelineModule<DataSourceIngestTask> {
private final DataSourceIngestModule module;
@ -83,18 +85,18 @@ final class DataSourceIngestPipeline extends IngestTaskPipeline<DataSourceIngest
}
@Override
void executeTask(IngestJobPipeline ingestJobPipeline, DataSourceIngestTask task) throws IngestModuleException {
void process(IngestJobExecutor ingestJobExecutor, DataSourceIngestTask task) throws IngestModuleException {
Content dataSource = task.getDataSource();
String progressBarDisplayName = NbBundle.getMessage(this.getClass(), "IngestJob.progress.dataSourceIngest.displayName", getDisplayName(), dataSource.getName());
ingestJobPipeline.updateDataSourceIngestProgressBarDisplayName(progressBarDisplayName);
ingestJobPipeline.switchDataSourceIngestProgressBarToIndeterminate();
ingestJobExecutor.updateDataSourceIngestProgressBarDisplayName(progressBarDisplayName);
ingestJobExecutor.switchDataSourceIngestProgressBarToIndeterminate();
ingestManager.setIngestTaskProgress(task, getDisplayName());
logger.log(Level.INFO, "{0} analysis of {1} starting", new Object[]{getDisplayName(), dataSource.getName()}); //NON-NLS
module.process(dataSource, new DataSourceIngestModuleProgress(ingestJobPipeline));
module.process(dataSource, new DataSourceIngestModuleProgress(ingestJobExecutor));
logger.log(Level.INFO, "{0} analysis of {1} finished", new Object[]{getDisplayName(), dataSource.getName()}); //NON-NLS
if (!ingestJobPipeline.isCancelled() && ingestJobPipeline.currentDataSourceIngestModuleIsCancelled()) {
ingestJobPipeline.currentDataSourceIngestModuleCancellationCompleted(getDisplayName());
}
if (!ingestJobExecutor.isCancelled() && ingestJobExecutor.currentDataSourceIngestModuleIsCancelled()) {
ingestJobExecutor.currentDataSourceIngestModuleCancellationCompleted(getDisplayName());
}
}
}

View File

@ -20,25 +20,25 @@ package org.sleuthkit.autopsy.ingest;
/**
* A data source level ingest task that will be executed by an ingest thread
* using a given ingest job pipeline.
* using a given ingest job executor.
*/
final class DataSourceIngestTask extends IngestTask {
/**
* Constructs a data source level ingest task that will be executed by an
* ingest thread using a given ingest job pipeline.
* ingest thread using a given ingest job executor.
*
* @param ingestJobPipeline The ingest job pipeline to use to execute the
* @param ingestJobExecutor The ingest job executor to use to execute the
* task.
*/
DataSourceIngestTask(IngestJobPipeline ingestJobPipeline) {
super(ingestJobPipeline);
DataSourceIngestTask(IngestJobExecutor ingestJobExecutor) {
super(ingestJobExecutor);
}
@Override
void execute(long threadId) {
super.setThreadId(threadId);
getIngestJobPipeline().execute(this);
}
getIngestJobExecutor().execute(this);
}
}

View File

@ -39,32 +39,34 @@ import org.sleuthkit.datamodel.TskCoreException;
@NbBundle.Messages({
"FileIngestPipeline_SaveResults_Activity=Saving Results"
})
final class FileIngestPipeline extends IngestTaskPipeline<FileIngestTask> {
final class FileIngestPipeline extends IngestPipeline<FileIngestTask> {
private static final int FILE_BATCH_SIZE = 500;
private static final String SAVE_RESULTS_ACTIVITY = Bundle.FileIngestPipeline_SaveResults_Activity();
private static final Logger logger = Logger.getLogger(FileIngestPipeline.class.getName());
private static final IngestManager ingestManager = IngestManager.getInstance();
private final IngestJobPipeline ingestJobPipeline;
private final IngestJobExecutor ingestJobExecutor;
private final List<AbstractFile> fileBatch;
/**
* Constructs a pipeline of file ingest modules for executing file ingest
* tasks for an ingest job.
*
* @param ingestJobPipeline The ingest job pipeline that owns this pipeline.
* @param moduleTemplates The ingest module templates that define this
* pipeline.
* @param ingestJobExecutor The ingest job executor for this pipeline.
* @param moduleTemplates The ingest module templates to be used to
* construct the ingest modules for this pipeline.
* May be an empty list if this type of pipeline is
* not needed for the ingest job.
*/
FileIngestPipeline(IngestJobPipeline ingestJobPipeline, List<IngestModuleTemplate> moduleTemplates) {
super(ingestJobPipeline, moduleTemplates);
this.ingestJobPipeline = ingestJobPipeline;
FileIngestPipeline(IngestJobExecutor ingestJobExecutor, List<IngestModuleTemplate> moduleTemplates) {
super(ingestJobExecutor, moduleTemplates);
this.ingestJobExecutor = ingestJobExecutor;
fileBatch = new ArrayList<>();
}
@Override
Optional<IngestTaskPipeline.PipelineModule<FileIngestTask>> acceptModuleTemplate(IngestModuleTemplate template) {
Optional<IngestTaskPipeline.PipelineModule<FileIngestTask>> module = Optional.empty();
Optional<IngestPipeline.PipelineModule<FileIngestTask>> acceptModuleTemplate(IngestModuleTemplate template) {
Optional<IngestPipeline.PipelineModule<FileIngestTask>> module = Optional.empty();
if (template.isFileIngestModuleTemplate()) {
FileIngestModule ingestModule = template.createFileIngestModule();
module = Optional.of(new FileIngestPipelineModule(ingestModule, template.getModuleName()));
@ -73,18 +75,18 @@ final class FileIngestPipeline extends IngestTaskPipeline<FileIngestTask> {
}
@Override
void prepareForTask(FileIngestTask task) throws IngestTaskPipelineException {
void prepareForTask(FileIngestTask task) throws IngestPipelineException {
}
@Override
void cleanUpAfterTask(FileIngestTask task) throws IngestTaskPipelineException {
void cleanUpAfterTask(FileIngestTask task) throws IngestPipelineException {
try {
ingestManager.setIngestTaskProgress(task, SAVE_RESULTS_ACTIVITY);
AbstractFile file = task.getFile();
file.close();
cacheFileForBatchUpdate(file);
} catch (TskCoreException ex) {
throw new IngestTaskPipelineException(String.format("Failed to get file (file objId = %d)", task.getFileId()), ex); //NON-NLS
throw new IngestPipelineException(String.format("Failed to get file (file objId = %d)", task.getFileId()), ex); //NON-NLS
} finally {
ingestManager.setIngestTaskProgressCompleted(task);
}
@ -96,7 +98,7 @@ final class FileIngestPipeline extends IngestTaskPipeline<FileIngestTask> {
Date start = new Date();
try {
updateBatchedFiles();
} catch (IngestTaskPipelineException ex) {
} catch (IngestPipelineException ex) {
errors.add(new IngestModuleError(SAVE_RESULTS_ACTIVITY, ex));
}
Date finish = new Date();
@ -113,9 +115,9 @@ final class FileIngestPipeline extends IngestTaskPipeline<FileIngestTask> {
*
* @param file The file.
*
* @throws IngestTaskPipelineException if the case database update fails.
* @throws IngestPipelineException if the case database update fails.
*/
private void cacheFileForBatchUpdate(AbstractFile file) throws IngestTaskPipelineException {
private void cacheFileForBatchUpdate(AbstractFile file) throws IngestPipelineException {
/*
* Only one file ingest thread at a time will try to access the file
* cache. The synchronization here is to ensure visibility of the files
@ -134,9 +136,9 @@ final class FileIngestPipeline extends IngestTaskPipeline<FileIngestTask> {
* Updates the case database with new properties added to the files in the
* cache by the ingest modules that processed them.
*
* @throws IngestTaskPipelineException if the case database update fails.
* @throws IngestPipelineException if the case database update fails.
*/
private void updateBatchedFiles() throws IngestTaskPipelineException {
private void updateBatchedFiles() throws IngestPipelineException {
/*
* Only one file ingest thread at a time will try to access the file
* cache. The synchronization here is to ensure visibility of the files
@ -146,7 +148,7 @@ final class FileIngestPipeline extends IngestTaskPipeline<FileIngestTask> {
synchronized (fileBatch) {
CaseDbTransaction transaction = null;
try {
if (!ingestJobPipeline.isCancelled()) {
if (!ingestJobExecutor.isCancelled()) {
Case currentCase = Case.getCurrentCaseThrows();
SleuthkitCase caseDb = currentCase.getSleuthkitCase();
transaction = caseDb.beginTransaction();
@ -166,7 +168,7 @@ final class FileIngestPipeline extends IngestTaskPipeline<FileIngestTask> {
logger.log(Level.SEVERE, "Error rolling back transaction after failure to save updated properties for cached files from tasks", ex1);
}
}
throw new IngestTaskPipelineException("Failed to save updated properties for cached files from tasks", ex); //NON-NLS
throw new IngestPipelineException("Failed to save updated properties for cached files from tasks", ex); //NON-NLS
} finally {
fileBatch.clear();
}
@ -177,7 +179,7 @@ final class FileIngestPipeline extends IngestTaskPipeline<FileIngestTask> {
* A wrapper that adds ingest infrastructure operations to a file ingest
* module.
*/
static final class FileIngestPipelineModule extends IngestTaskPipeline.PipelineModule<FileIngestTask> {
static final class FileIngestPipelineModule extends IngestPipeline.PipelineModule<FileIngestTask> {
private final FileIngestModule module;
@ -195,7 +197,7 @@ final class FileIngestPipeline extends IngestTaskPipeline<FileIngestTask> {
}
@Override
void executeTask(IngestJobPipeline ingestJobPipeline, FileIngestTask task) throws IngestModuleException {
void process(IngestJobExecutor ingestJobExecutor, FileIngestTask task) throws IngestModuleException {
AbstractFile file = null;
try {
file = task.getFile();

View File

@ -25,7 +25,7 @@ import org.sleuthkit.datamodel.TskCoreException;
/**
* A file ingest task that will be executed by an ingest thread using a given
* ingest job pipeline.
* ingest job executor.
*/
final class FileIngestTask extends IngestTask {
@ -34,13 +34,13 @@ final class FileIngestTask extends IngestTask {
/**
* Constructs a file ingest task that will be executed by an ingest thread
* using a given ingest job pipeline.
* using a given ingest job executor.
*
* @param ingestJobPipeline The ingest job pipeline to use to execute the
* @param ingestJobPipeline The ingest job executor to use to execute the
* task.
* @param file The file to be processed.
*/
FileIngestTask(IngestJobPipeline ingestJobPipeline, AbstractFile file) {
FileIngestTask(IngestJobExecutor ingestJobPipeline, AbstractFile file) {
super(ingestJobPipeline);
this.file = file;
fileId = file.getId();
@ -48,15 +48,15 @@ final class FileIngestTask extends IngestTask {
/**
* Constructs a file ingest task that will be executed by an ingest thread
* using a given ingest job pipeline. This constructor supports streaming
* using a given ingest job executor. This constructor supports streaming
* ingest by deferring the construction of the AbstractFile object for this
* task to conserve heap memory.
*
* @param ingestJobPipeline The ingest job pipeline to use to execute the
* @param ingestJobPipeline The ingest job executor to use to execute the
* task.
* @param fileId The object ID of the file to be processed.
*/
FileIngestTask(IngestJobPipeline ingestJobPipeline, long fileId) {
FileIngestTask(IngestJobExecutor ingestJobPipeline, long fileId) {
super(ingestJobPipeline);
this.fileId = fileId;
}
@ -88,7 +88,7 @@ final class FileIngestTask extends IngestTask {
@Override
void execute(long threadId) {
super.setThreadId(threadId);
getIngestJobPipeline().execute(this);
getIngestJobExecutor().execute(this);
}
@Override
@ -100,19 +100,19 @@ final class FileIngestTask extends IngestTask {
return false;
}
FileIngestTask other = (FileIngestTask) obj;
IngestJobPipeline thisPipeline = getIngestJobPipeline();
IngestJobPipeline otherPipeline = other.getIngestJobPipeline();
IngestJobExecutor thisPipeline = getIngestJobExecutor();
IngestJobExecutor otherPipeline = other.getIngestJobExecutor();
if (thisPipeline != otherPipeline && (thisPipeline == null || !thisPipeline.equals(otherPipeline))) {
return false;
}
return (this.fileId == other.fileId);
return (getFileId() == other.getFileId());
}
@Override
public int hashCode() {
int hash = 5;
hash = 47 * hash + Objects.hashCode(getIngestJobPipeline());
hash = 47 * hash + Objects.hashCode(this.fileId);
hash = 47 * hash + Objects.hashCode(getIngestJobExecutor());
hash = 47 * hash + Objects.hashCode(getFileId());
return hash;
}

View File

@ -28,10 +28,11 @@ import org.openide.util.NbBundle;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.Content;
import org.sleuthkit.datamodel.DataArtifact;
/**
* Analyzes one or more data sources using a set of ingest modules specified via
* ingest job settings.
* Analyzes a data sources using a set of ingest modules specified via ingest
* job settings.
*/
public final class IngestJob {
@ -73,17 +74,17 @@ public final class IngestJob {
private final List<AbstractFile> files = new ArrayList<>();
private final Mode ingestMode;
private final IngestJobSettings settings;
private volatile IngestJobPipeline ingestJobPipeline;
private volatile IngestJobExecutor ingestModuleExecutor;
private volatile CancellationReason cancellationReason;
/**
* Constructs a batch mode ingest job that analyzes a data source using a
* set of ingest modules specified via ingest job settings. Either all of
* the files in the data source or a given subset of the files will be
* analyzed.
* set of ingest modules specified via ingest job settings.
*
* @param dataSource The data source to be analyzed.
* @param files A subset of the files from the data source.
* @param files A subset of the files from the data source to be
* analyzed, may be empty if all of the files should be
* analyzed.
* @param settings The ingest job settings.
*/
IngestJob(Content dataSource, List<AbstractFile> files, IngestJobSettings settings) {
@ -91,13 +92,6 @@ public final class IngestJob {
this.files.addAll(files);
}
/**
* Constructs an ingest job that analyzes a data source using a set of
* ingest modules specified via ingest job settings, possibly using an
* ingest stream.
*
* @param settings The ingest job settings.
*/
/**
* Constructs an ingest job that analyzes a data source using a set of
* ingest modules specified via ingest job settings, possibly using an
@ -108,7 +102,7 @@ public final class IngestJob {
* @param settings The ingest job settings.
*/
IngestJob(Content dataSource, Mode ingestMode, IngestJobSettings settings) {
this.id = IngestJob.nextId.getAndIncrement();
id = IngestJob.nextId.getAndIncrement();
this.dataSource = dataSource;
this.settings = settings;
this.ingestMode = ingestMode;
@ -125,6 +119,15 @@ public final class IngestJob {
return this.id;
}
/**
* Gets the data source to be analyzed by this job.
*
* @return The data source.
*/
Content getDataSource() {
return dataSource;
}
/**
* Checks to see if this ingest job has at least one non-empty ingest module
* pipeline.
@ -136,31 +139,41 @@ public final class IngestJob {
}
/**
* Adds a set of files to this ingest job if it is running in streaming
* Adds a set of files to this ingest job, if it is running in streaming
* ingest mode.
*
* @param fileObjIds The object IDs of the files.
*/
void addStreamingIngestFiles(List<Long> fileObjIds) {
void addStreamedFiles(List<Long> fileObjIds) {
if (ingestMode == Mode.STREAMING) {
if (ingestJobPipeline != null) {
ingestJobPipeline.addStreamedFiles(fileObjIds);
if (ingestModuleExecutor != null) {
ingestModuleExecutor.addStreamedFiles(fileObjIds);
} else {
logger.log(Level.SEVERE, "Attempted to add streamed ingest files with no ingest pipeline");
logger.log(Level.SEVERE, "Attempted to add streamed files with no ingest pipeline");
}
} else {
logger.log(Level.SEVERE, "Attempted to add streamed ingest files to batch ingest job");
logger.log(Level.SEVERE, "Attempted to add streamed files to batch ingest job");
}
}
/**
* Adds one or more data artifacts to this ingest job for processing by its
* data artifact ingest modules.
*
* @param dataArtifacts The data artifacts.
*/
void addDataArtifacts(List<DataArtifact> dataArtifacts) {
ingestModuleExecutor.addDataArtifacts(dataArtifacts);
}
/**
* Starts data source level analysis for this job if it is running in
* streaming ingest mode.
*/
void processStreamingIngestDataSource() {
if (ingestMode == Mode.STREAMING) {
if (ingestJobPipeline != null) {
ingestJobPipeline.addStreamedDataSource();
if (ingestModuleExecutor != null) {
ingestModuleExecutor.startStreamingModeDataSourceAnalysis();
} else {
logger.log(Level.SEVERE, "Attempted to start data source analaysis with no ingest pipeline");
}
@ -176,16 +189,16 @@ public final class IngestJob {
* @return A collection of ingest module start up errors, empty on success.
*/
synchronized List<IngestModuleError> start() throws InterruptedException {
if (ingestJobPipeline != null) {
if (ingestModuleExecutor != null) {
logger.log(Level.SEVERE, "Attempt to start ingest job that has already been started");
return Collections.emptyList();
}
ingestJobPipeline = new IngestJobPipeline(this, dataSource, files, settings);
ingestModuleExecutor = new IngestJobExecutor(this, dataSource, files, settings);
List<IngestModuleError> errors = new ArrayList<>();
errors.addAll(ingestJobPipeline.startUp());
errors.addAll(ingestModuleExecutor.startUp());
if (errors.isEmpty()) {
IngestManager.getInstance().fireDataSourceAnalysisStarted(id, ingestJobPipeline.getDataSource());
IngestManager.getInstance().fireDataSourceAnalysisStarted(id, ingestModuleExecutor.getDataSource());
} else {
cancel(CancellationReason.INGEST_MODULES_STARTUP_FAILED);
}
@ -220,7 +233,7 @@ public final class IngestJob {
*/
public ProgressSnapshot getSnapshot(boolean includeIngestTasksSnapshot) {
ProgressSnapshot snapshot = null;
if (ingestJobPipeline != null) {
if (ingestModuleExecutor != null) {
return new ProgressSnapshot(includeIngestTasksSnapshot);
}
return snapshot;
@ -233,8 +246,8 @@ public final class IngestJob {
*/
Snapshot getDiagnosticStatsSnapshot() {
Snapshot snapshot = null;
if (ingestJobPipeline != null) {
snapshot = ingestJobPipeline.getDiagnosticStatsSnapshot(true);
if (ingestModuleExecutor != null) {
snapshot = ingestModuleExecutor.getDiagnosticStatsSnapshot(true);
}
return snapshot;
}
@ -272,8 +285,8 @@ public final class IngestJob {
* ingest manager's ingest jobs list lock.
*/
new Thread(() -> {
if (ingestJobPipeline != null) {
ingestJobPipeline.cancel(reason);
if (ingestModuleExecutor != null) {
ingestModuleExecutor.cancel(reason);
}
}).start();
}
@ -284,7 +297,7 @@ public final class IngestJob {
* @return The cancellation reason, may be not cancelled.
*/
public CancellationReason getCancellationReason() {
return this.cancellationReason;
return cancellationReason;
}
/**
@ -294,18 +307,16 @@ public final class IngestJob {
* @return True or false.
*/
public boolean isCancelled() {
return (CancellationReason.NOT_CANCELLED != this.cancellationReason);
return (CancellationReason.NOT_CANCELLED != cancellationReason);
}
/**
* Provides a callback for the ingest modules pipeline, allowing this ingest
* Provides a callback for the ingest module executor, allowing this ingest
* job to notify the ingest manager when it is complete.
*
* @param ingestJobPipeline A completed ingestJobPipeline.
*/
void notifyIngestPipelineShutDown() {
void notifyIngestPipelinesShutDown() {
IngestManager ingestManager = IngestManager.getInstance();
if (!ingestJobPipeline.isCancelled()) {
if (!ingestModuleExecutor.isCancelled()) {
ingestManager.fireDataSourceAnalysisCompleted(id, dataSource);
} else {
IngestManager.getInstance().fireDataSourceAnalysisCancelled(id, dataSource);
@ -423,11 +434,7 @@ public final class IngestJob {
* stats part of the snapshot.
*/
private ProgressSnapshot(boolean includeIngestTasksSnapshot) {
/*
* Note that the getSnapshot() will not construct a ProgressSnapshot
* if ingestJobPipeline is null.
*/
Snapshot snapshot = ingestJobPipeline.getDiagnosticStatsSnapshot(includeIngestTasksSnapshot);
Snapshot snapshot = ingestModuleExecutor.getDiagnosticStatsSnapshot(includeIngestTasksSnapshot);
dataSourceProcessingSnapshot = new DataSourceProcessingSnapshot(snapshot);
jobCancellationRequested = IngestJob.this.isCancelled();
jobCancellationReason = IngestJob.this.getCancellationReason();
@ -444,7 +451,7 @@ public final class IngestJob {
DataSourceIngestModuleHandle moduleHandle = null;
DataSourceIngestPipeline.DataSourcePipelineModule module = dataSourceProcessingSnapshot.getDataSourceLevelIngestModule();
if (module != null) {
moduleHandle = new DataSourceIngestModuleHandle(ingestJobPipeline, module);
moduleHandle = new DataSourceIngestModuleHandle(ingestModuleExecutor, module);
}
return moduleHandle;
}
@ -507,7 +514,7 @@ public final class IngestJob {
*/
public static class DataSourceIngestModuleHandle {
private final IngestJobPipeline ingestJobPipeline;
private final IngestJobExecutor ingestJobExecutor;
private final DataSourceIngestPipeline.DataSourcePipelineModule module;
private final boolean cancelled;
@ -516,14 +523,14 @@ public final class IngestJob {
* used to get basic information about the module and to request
* cancellation of the module.
*
* @param ingestJobPipeline The ingestJobPipeline that owns the data
* @param ingestJobExecutor The ingest job executor that owns the data
* source level ingest module.
* @param module The data source level ingest module.
*/
private DataSourceIngestModuleHandle(IngestJobPipeline ingestJobPipeline, DataSourceIngestPipeline.DataSourcePipelineModule module) {
this.ingestJobPipeline = ingestJobPipeline;
private DataSourceIngestModuleHandle(IngestJobExecutor ingestJobExecutor, DataSourceIngestPipeline.DataSourcePipelineModule module) {
this.ingestJobExecutor = ingestJobExecutor;
this.module = module;
this.cancelled = ingestJobPipeline.currentDataSourceIngestModuleIsCancelled();
this.cancelled = ingestJobExecutor.currentDataSourceIngestModuleIsCancelled();
}
/**
@ -533,7 +540,7 @@ public final class IngestJob {
* @return The display name.
*/
public String displayName() {
return this.module.getDisplayName();
return module.getDisplayName();
}
/**
@ -543,7 +550,7 @@ public final class IngestJob {
* @return The module processing start time.
*/
public Date startTime() {
return this.module.getProcessingStartTime();
return module.getProcessingStartTime();
}
/**
@ -553,7 +560,7 @@ public final class IngestJob {
* @return True or false.
*/
public boolean isCancelled() {
return this.cancelled;
return cancelled;
}
/**
@ -567,8 +574,8 @@ public final class IngestJob {
* could perhaps be solved by adding a cancel() API to the
* IngestModule interface.
*/
if (this.ingestJobPipeline.getCurrentDataSourceIngestModule() == this.module) {
this.ingestJobPipeline.cancelCurrentDataSourceIngestModule();
if (ingestJobExecutor.getCurrentDataSourceIngestModule() == module) {
ingestJobExecutor.cancelCurrentDataSourceIngestModule();
}
}

View File

@ -21,7 +21,6 @@ package org.sleuthkit.autopsy.ingest;
import java.util.List;
import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.Content;
import org.sleuthkit.datamodel.DataArtifact;
/**
* Provides an ingest module with services specific to the ingest job of which
@ -29,16 +28,16 @@ import org.sleuthkit.datamodel.DataArtifact;
*/
public final class IngestJobContext {
private final IngestJobPipeline ingestJobPipeline;
private final IngestJobExecutor ingestJobExecutor;
/**
* Constructs an ingest job context object that provides an ingest module
* with services specific to the ingest job of which the module is a part.
*
* @param ingestJobPipeline The ingest pipeline for the job.
* @param ingestJobExecutor The ingest executor for the job.
*/
IngestJobContext(IngestJobPipeline ingestJobPipeline) {
this.ingestJobPipeline = ingestJobPipeline;
IngestJobContext(IngestJobExecutor ingestJobExecutor) {
this.ingestJobExecutor = ingestJobExecutor;
}
/**
@ -47,7 +46,7 @@ public final class IngestJobContext {
* @return The context string.
*/
public String getExecutionContext() {
return ingestJobPipeline.getExecutionContext();
return ingestJobExecutor.getExecutionContext();
}
/**
@ -56,7 +55,7 @@ public final class IngestJobContext {
* @return The data source.
*/
public Content getDataSource() {
return ingestJobPipeline.getDataSource();
return ingestJobExecutor.getDataSource();
}
/**
@ -65,7 +64,7 @@ public final class IngestJobContext {
* @return The ID.
*/
public long getJobId() {
return ingestJobPipeline.getIngestJobId();
return ingestJobExecutor.getIngestJobId();
}
/**
@ -79,7 +78,7 @@ public final class IngestJobContext {
*/
@Deprecated
public boolean isJobCancelled() {
return ingestJobPipeline.isCancelled();
return ingestJobExecutor.isCancelled();
}
/**
@ -91,7 +90,7 @@ public final class IngestJobContext {
* @return True or false.
*/
public boolean dataSourceIngestIsCancelled() {
return ingestJobPipeline.currentDataSourceIngestModuleIsCancelled() || ingestJobPipeline.isCancelled();
return ingestJobExecutor.currentDataSourceIngestModuleIsCancelled() || ingestJobExecutor.isCancelled();
}
/**
@ -106,7 +105,7 @@ public final class IngestJobContext {
* It is not currently possible to cancel individual file ingest
* modules.
*/
return ingestJobPipeline.isCancelled();
return ingestJobExecutor.isCancelled();
}
/**
@ -122,7 +121,7 @@ public final class IngestJobContext {
* It is not currently possible to cancel individual data artifact
* ingest modules.
*/
return ingestJobPipeline.isCancelled();
return ingestJobExecutor.isCancelled();
}
/**
@ -132,7 +131,7 @@ public final class IngestJobContext {
* @return True or false.
*/
public boolean processingUnallocatedSpace() {
return ingestJobPipeline.shouldProcessUnallocatedSpace();
return ingestJobExecutor.shouldProcessUnallocatedSpace();
}
/**
@ -146,8 +145,8 @@ public final class IngestJobContext {
@Deprecated
public void scheduleFiles(List<AbstractFile> files) {
addFilesToJob(files);
}
}
/**
* Adds one or more files, e.g., extracted or carved files, to the ingest
* job for processing by its file ingest modules.
@ -155,17 +154,7 @@ public final class IngestJobContext {
* @param files The files.
*/
public void addFilesToJob(List<AbstractFile> files) {
ingestJobPipeline.addFiles(files);
}
/**
* Adds one or more data artifacts to the ingest job for processing by its
* data artifact ingest modules.
*
* @param artifacts The artifacts.
*/
public void addDataArtifactsToJob(List<DataArtifact> artifacts) {
ingestJobPipeline.addDataArtifacts(artifacts);
ingestJobExecutor.addFiles(files);
}
}

View File

@ -56,7 +56,7 @@ class IngestJobInputStream implements IngestStream {
if (closed) {
throw new IngestStreamClosedException("Can not add files - ingest stream is closed");
}
ingestJob.addStreamingIngestFiles(fileObjectIds);
ingestJob.addStreamedFiles(fileObjectIds);
}
@Override

View File

@ -34,6 +34,7 @@ import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.concurrent.Callable;
import java.util.concurrent.ConcurrentHashMap;
@ -72,6 +73,7 @@ import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.Blackboard;
import org.sleuthkit.datamodel.BlackboardArtifact;
import org.sleuthkit.datamodel.Content;
import org.sleuthkit.datamodel.DataArtifact;
import org.sleuthkit.datamodel.DataSource;
import org.sleuthkit.datamodel.TskCoreException;
@ -288,13 +290,103 @@ public class IngestManager implements IngestProgressSnapshotProvider {
/**
* Handles artifacts posted events published by the Sleuth Kit layer
* blackboard via the event bus for the case database.
* blackboard via the Sleuth Kit event bus.
*
* @param tskEvent A Sleuth Kit data model ArtifactsPostedEvent from the
* case database event bus.
* @param tskEvent The event.
*/
@Subscribe
void handleArtifactsPosted(Blackboard.ArtifactsPostedEvent tskEvent) {
/*
* Add any new data artifacts included in the event to the source ingest
* job for possible analysis.
*/
List<DataArtifact> newDataArtifacts = new ArrayList<>();
Collection<BlackboardArtifact> newArtifacts = tskEvent.getArtifacts();
for (BlackboardArtifact artifact : newArtifacts) {
if (artifact instanceof DataArtifact) {
newDataArtifacts.add((DataArtifact) artifact);
}
}
if (!newDataArtifacts.isEmpty()) {
IngestJob ingestJob = null;
Optional<Long> ingestJobId = tskEvent.getIngestJobId();
if (ingestJobId.isPresent()) {
synchronized (ingestJobsById) {
ingestJob = ingestJobsById.get(ingestJobId.get());
}
} else {
/*
* There are four use cases where the ingest job ID returned by
* the event is expected be null:
*
* 1. The artifacts are being posted by a data source proccessor
* (DSP) module that runs before the ingest job is created,
* i.e., a DSP that does not support streaming ingest and has no
* noton of an ingest job ID. In this use case, the event is
* handled synchronously. The DSP calls
* Blackboard.postArtifacts(), which puts the event on the event
* bus to which this method subscribes, so the event will be
* handled here before the DSP completes and calls
* DataSourceProcessorCallback.done(). This means the code below
* will execute before the ingest job is created, so it will not
* find an ingest job to which to add the artifacts. However,
* the artifacts WILL be analyzed after the ingest job is
* started, when the ingest job executor, working in batch mode,
* schedules ingest tasks for all of the data artifacts in the
* case database. There is a slight risk that the wrong ingest
* job will be selected if multiple ingests of the same data
* source are in progress.
*
* 2. The artifacts were posted by an ingest module that either
* has not been updated to use the current
* Blackboard.postArtifacts() API, or is using it incorrectly.
* In this use case, the code below should be able to find the
* ingest job to which to add the artifacts via their data
* source. There is a slight risk that the wrong ingest job will
* be selected if multiple ingests of the same data source are
* in progress.
*
* 3. The portable case generator uses a
* CommunicationArtifactsHelper constructed with a null ingest
* job ID, and the CommunicatonsArtifactHelper posts artifacts.
* Ingest of that data source might be running, in which case
* the data artifact will be analyzed. It also might be analyzed
* by a subsequent ingest job for the data source. This is an
* acceptable edge case.
*
* 4. The user can manually create timeline events with the
* timeline tool, which posts the TSK_TL_EVENT data artifacts.
* The user selects the data source for these artifacts. Ingest
* of that data source might be running, in which case the data
* artifact will be analyzed. It also might be analyzed by a
* subsequent ingest job for the data source. This is an
* acceptable edge case.
*/
DataArtifact dataArtifact = newDataArtifacts.get(0);
try {
Content artifactDataSource = dataArtifact.getDataSource();
synchronized (ingestJobsById) {
for (IngestJob job : ingestJobsById.values()) {
Content dataSource = job.getDataSource();
if (artifactDataSource.getId() == dataSource.getId()) {
ingestJob = job;
break;
}
}
}
} catch (TskCoreException ex) {
logger.log(Level.SEVERE, String.format("Failed to get data source for data artifact (object ID = %d)", dataArtifact.getId()), ex); //NON-NLS
}
}
if (ingestJob != null) {
ingestJob.addDataArtifacts(newDataArtifacts);
}
}
/*
* Publish Autopsy events for the new artifacts, one event per artifact
* type.
*/
for (BlackboardArtifact.Type artifactType : tskEvent.getArtifactTypes()) {
ModuleDataEvent legacyEvent = new ModuleDataEvent(tskEvent.getModuleName(), artifactType, tskEvent.getArtifacts(artifactType));
AutopsyEvent autopsyEvent = new BlackboardPostEvent(legacyEvent);
@ -825,7 +917,7 @@ public class IngestManager implements IngestProgressSnapshotProvider {
*/
void setIngestTaskProgress(DataSourceIngestTask task, String currentModuleName) {
IngestThreadActivitySnapshot prevSnap = ingestThreadActivitySnapshots.get(task.getThreadId());
IngestThreadActivitySnapshot newSnap = new IngestThreadActivitySnapshot(task.getThreadId(), task.getIngestJobPipeline().getIngestJobId(), currentModuleName, task.getDataSource());
IngestThreadActivitySnapshot newSnap = new IngestThreadActivitySnapshot(task.getThreadId(), task.getIngestJobExecutor().getIngestJobId(), currentModuleName, task.getDataSource());
ingestThreadActivitySnapshots.put(task.getThreadId(), newSnap);
/*
@ -847,10 +939,10 @@ public class IngestManager implements IngestProgressSnapshotProvider {
IngestThreadActivitySnapshot prevSnap = ingestThreadActivitySnapshots.get(task.getThreadId());
IngestThreadActivitySnapshot newSnap;
try {
newSnap = new IngestThreadActivitySnapshot(task.getThreadId(), task.getIngestJobPipeline().getIngestJobId(), currentModuleName, task.getDataSource(), task.getFile());
newSnap = new IngestThreadActivitySnapshot(task.getThreadId(), task.getIngestJobExecutor().getIngestJobId(), currentModuleName, task.getDataSource(), task.getFile());
} catch (TskCoreException ex) {
logger.log(Level.SEVERE, "Error getting file from file ingest task", ex);
newSnap = new IngestThreadActivitySnapshot(task.getThreadId(), task.getIngestJobPipeline().getIngestJobId(), currentModuleName, task.getDataSource());
newSnap = new IngestThreadActivitySnapshot(task.getThreadId(), task.getIngestJobExecutor().getIngestJobId(), currentModuleName, task.getDataSource());
}
ingestThreadActivitySnapshots.put(task.getThreadId(), newSnap);

View File

@ -33,21 +33,24 @@ import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil;
/**
* An abstract superclass for pipelines of ingest modules that execute ingest
* tasks for an ingest job. Subclasses need to extend this class and to
* implement a specialization of the inner PipelineModule abstract superclass.
* An abstract superclass for pipelines of ingest modules that perform the
* ingest tasks that make up an ingest job. A pipeline performs a task by
* passing it sequentially to the process() method of each module in the
* pipeline.
*
* NOTE ON MULTI-THREADING POLICY: This class is primarily designed for use
* by one thread at a time. There are a few status fields that are volatile to
* ensure visibility to threads making ingest progress snapshots, but methods
* such as startUp(), executeTask() and shutDown() are not synchronized.
*
* @param <T> The ingest task type.
* @param <T> The type of ingest tasks the pipeline performs.
*/
abstract class IngestTaskPipeline<T extends IngestTask> {
abstract class IngestPipeline<T extends IngestTask> {
private static final Logger logger = Logger.getLogger(IngestTaskPipeline.class.getName());
private final IngestJobPipeline ingestJobPipeline;
/*
* NOTE ON MULTI-THREADING POLICY: This class is primarily designed for use
* by one thread at a time. There are a few status fields that are volatile
* to ensure visibility to threads making ingest progress snapshots, but
* methods such as startUp(), performTask() and shutDown() are not
* synchronized.
*/
private static final Logger logger = Logger.getLogger(IngestPipeline.class.getName());
private final IngestJobExecutor ingestJobExecutor;
private final List<IngestModuleTemplate> moduleTemplates;
private final List<PipelineModule<T>> modules;
private volatile Date startTime;
@ -56,38 +59,34 @@ abstract class IngestTaskPipeline<T extends IngestTask> {
/**
* Constructs the superclass part of a pipeline of ingest modules that
* executes ingest tasks for an ingest job.
* performs ingest tasks for an ingest job.
*
* @param ingestPipeline The parent ingest job pipeline for this ingest
* task pipeline.
* @param moduleTemplates The ingest module templates that define this
* ingest task pipeline. May be an empty list.
* @param ingestJobExecutor The ingest job executor for this pipeline.
* @param moduleTemplates The ingest module templates to be used to
* construct the ingest modules for this pipeline.
* May be an empty list if this type of pipeline is
* not needed for the ingest job.
*/
IngestTaskPipeline(IngestJobPipeline ingestPipeline, List<IngestModuleTemplate> moduleTemplates) {
this.ingestJobPipeline = ingestPipeline;
/*
* The creation of ingest modules from the ingest module templates has
* been deliberately deferred to the startUp() method so that any and
* all errors in module construction or start up can be reported to the
* client code.
*/
IngestPipeline(IngestJobExecutor ingestJobExecutor, List<IngestModuleTemplate> moduleTemplates) {
this.ingestJobExecutor = ingestJobExecutor;
this.moduleTemplates = moduleTemplates;
modules = new ArrayList<>();
}
/**
* Indicates whether or not there are any ingest modules in this ingest task
* Indicates whether or not there are any ingest modules in this ingest
* pipeline.
*
* @return True or false.
* @return True or false; always true before startUp() is called.
*/
boolean isEmpty() {
return modules.isEmpty();
}
/**
* Queries whether or not this ingest task pipeline is running, i.e., the
* startUp() method has been called and the shutDown() has not been called.
* Queries whether or not this ingest pipeline is running, i.e., the
* startUp() method has been called and the shutDown() method has not been
* called yet.
*
* @return True or false.
*/
@ -96,8 +95,8 @@ abstract class IngestTaskPipeline<T extends IngestTask> {
}
/**
* Starts up this ingest task pipeline by calling the startUp() methods of
* the ingest modules in the pipeline.
* Starts up this ingest pipeline by calling the startUp() methods of the
* ingest modules in the pipeline.
*
* @return A list of ingest module start up errors, possibly empty.
*/
@ -110,21 +109,19 @@ abstract class IngestTaskPipeline<T extends IngestTask> {
* any and all errors in module construction or start up can be
* reported to the client code.
*/
createIngestModules(moduleTemplates);
createIngestModules();
errors.addAll(startUpIngestModules());
} else {
errors.add(new IngestModuleError("Ingest Task Pipeline", new IngestTaskPipelineException("Pipeline already started"))); //NON-NLS
errors.add(new IngestModuleError("Ingest Task Pipeline", new IngestPipelineException("Pipeline already started"))); //NON-NLS
}
return errors;
}
/**
* Creates the ingest modules for this ingest task pipeline from the given
* ingest module templates.
*
* @param moduleTemplates The ingest module templates.
* Creates the ingest modules for this ingest pipeline using its ingest
* module templates.
*/
private void createIngestModules(List<IngestModuleTemplate> moduleTemplates) {
private void createIngestModules() {
if (modules.isEmpty()) {
for (IngestModuleTemplate template : moduleTemplates) {
Optional<PipelineModule<T>> module = acceptModuleTemplate(template);
@ -137,8 +134,8 @@ abstract class IngestTaskPipeline<T extends IngestTask> {
/**
* Determines if one of the types of ingest modules that can be created from
* a given ingest module template should be added to this ingest task
* pipeline. If so, the ingest module is created and returned.
* a given ingest module template should be added to this ingest pipeline.
* If so, the ingest module is created and returned.
*
* @param template The ingest module template to be used or ignored, as
* appropriate to the pipeline type.
@ -149,7 +146,7 @@ abstract class IngestTaskPipeline<T extends IngestTask> {
abstract Optional<PipelineModule<T>> acceptModuleTemplate(IngestModuleTemplate template);
/**
* Starts up the ingest modules in this ingest task pipeline.
* Starts up the ingest modules in this ingest pipeline.
*
* @return A list of ingest module start up errors, possibly empty.
*/
@ -159,7 +156,7 @@ abstract class IngestTaskPipeline<T extends IngestTask> {
running = true;
for (PipelineModule<T> module : modules) {
try {
module.startUp(new IngestJobContext(ingestJobPipeline));
module.startUp(new IngestJobContext(ingestJobExecutor));
} catch (Throwable ex) {
/*
* A catch-all exception firewall. Start up errors for all of
@ -174,10 +171,10 @@ abstract class IngestTaskPipeline<T extends IngestTask> {
}
/**
* Returns the start up time of this ingest task pipeline.
* Returns the start up time of this ingest pipeline.
*
* @return The file processing start time, may be null if this pipeline has
* not been started yet.
* @return The start up time, may be null if this pipeline has not been
* started yet.
*/
Date getStartTime() {
Date reportedStartTime = null;
@ -188,65 +185,66 @@ abstract class IngestTaskPipeline<T extends IngestTask> {
}
/**
* Executes an ingest task by calling the process() methods of the ingest
* modules in this ingest task pipeline.
* Performs an ingest task by sequentially calling the process() methods of
* the ingest modules in this ingest pipeline.
*
* @param task The task.
*
* @return A list of ingest module task processing errors, possibly empty.
* @return A list of ingest module processing errors, possibly empty.
*/
List<IngestModuleError> executeTask(T task) {
List<IngestModuleError> performTask(T task) {
List<IngestModuleError> errors = new ArrayList<>();
if (running) {
if (!ingestJobPipeline.isCancelled()) {
if (!ingestJobExecutor.isCancelled()) {
pauseIfScheduled();
if (ingestJobPipeline.isCancelled()) {
if (ingestJobExecutor.isCancelled()) {
return errors;
}
try {
prepareForTask(task);
} catch (IngestTaskPipelineException ex) {
} catch (IngestPipelineException ex) {
errors.add(new IngestModuleError("Ingest Task Pipeline", ex)); //NON-NLS
return errors;
}
for (PipelineModule<T> module : modules) {
pauseIfScheduled();
if (ingestJobPipeline.isCancelled()) {
if (ingestJobExecutor.isCancelled()) {
break;
}
try {
currentModule = module;
currentModule.setProcessingStartTime();
module.executeTask(ingestJobPipeline, task);
} catch (Throwable ex) {
module.process(ingestJobExecutor, task);
} catch (Throwable ex) { // Catch-all exception firewall
/*
* A catch-all exception firewall. Note that a runtime
* exception from a single module does not stop
* Note that an exception from a module does not stop
* processing of the task by the other modules in the
* pipeline.
*/
errors.add(new IngestModuleError(module.getDisplayName(), ex));
}
if (ingestJobPipeline.isCancelled()) {
if (ingestJobExecutor.isCancelled()) {
break;
}
}
}
try {
cleanUpAfterTask(task);
} catch (IngestTaskPipelineException ex) {
} catch (IngestPipelineException ex) {
errors.add(new IngestModuleError("Ingest Task Pipeline", ex)); //NON-NLS
}
} else {
errors.add(new IngestModuleError("Ingest Task Pipeline", new IngestTaskPipelineException("Pipeline not started or shut down"))); //NON-NLS
errors.add(new IngestModuleError("Ingest Task Pipeline", new IngestPipelineException("Pipeline not started or shut down"))); //NON-NLS
}
currentModule = null;
return errors;
}
/**
* Pauses task execution if ingest has been configured to be paused weekly
* at a specified time for a specified duration.
* Pauses this pipeline if ingest has been configured to be paused weekly at
* a specified time, for a specified duration. A pipeline can only be paused
* between calls to module process() methods, i.e., the individual modules
* themselves cannot be paused in the middle of processing a task.
*/
private void pauseIfScheduled() {
if (ScheduledIngestPauseSettings.getPauseEnabled() == true) {
@ -278,7 +276,7 @@ abstract class IngestTaskPipeline<T extends IngestTask> {
*/
LocalDateTime timeNow = LocalDateTime.now();
if ((timeNow.equals(pauseStart) || timeNow.isAfter(pauseStart)) && timeNow.isBefore(pauseEnd)) {
ingestJobPipeline.registerPausedIngestThread(Thread.currentThread());
ingestJobExecutor.registerPausedIngestThread(Thread.currentThread());
try {
long timeRemainingMillis = ChronoUnit.MILLIS.between(timeNow, pauseEnd);
logger.log(Level.INFO, String.format("%s pausing at %s for ~%d minutes", Thread.currentThread().getName(), LocalDateTime.now(), TimeUnit.MILLISECONDS.toMinutes(timeRemainingMillis)));
@ -287,27 +285,27 @@ abstract class IngestTaskPipeline<T extends IngestTask> {
} catch (InterruptedException notLogged) {
logger.log(Level.INFO, String.format("%s resuming at %s due to sleep interrupt (ingest job canceled)", Thread.currentThread().getName(), LocalDateTime.now()));
} finally {
ingestJobPipeline.unregisterPausedIngestThread(Thread.currentThread());
ingestJobExecutor.unregisterPausedIngestThread(Thread.currentThread());
}
}
}
}
/**
* Does any task type specific preparation required before executing an
* Does any task-type-specific preparation required before performing an
* ingest task.
*
* @param task The task.
*
* @throws IngestTaskPipelineException Thrown if there is an error preparing
* to execute the task.
* @throws IngestPipelineException Thrown if there is an error preparing to
* perform the task.
*/
abstract void prepareForTask(T task) throws IngestTaskPipelineException;
abstract void prepareForTask(T task) throws IngestPipelineException;
/**
* Gets the currently running ingest module.
*
* @return The module, possibly null if no module is currently running.
* @return The module, possibly null, if no module is currently running.
*/
PipelineModule<T> getCurrentlyRunningModule() {
return currentModule;
@ -345,22 +343,19 @@ abstract class IngestTaskPipeline<T extends IngestTask> {
}
/**
* Does any task type specific clean up required after executing an ingest
* Does any task-type-specific clean up required after performing an ingest
* task.
*
* @param task The task.
*
* @throws IngestTaskPipelineException Thrown if there is an error cleaning
* up after performing the task.
* @throws IngestPipelineException Thrown if there is an error cleaning up
* after performing the task.
*/
abstract void cleanUpAfterTask(T task) throws IngestTaskPipelineException;
abstract void cleanUpAfterTask(T task) throws IngestPipelineException;
/**
* An abstract superclass for a decorator that adds ingest infrastructure
* operations to an ingest module.
*
* IMPORTANT: Subclasses of IngestTaskPipeline need to implement a
* specialization this class
* An abstract superclass for an ingest module decorator that adds ingest
* infrastructure operations to an ingest module.
*/
static abstract class PipelineModule<T extends IngestTask> implements IngestModule {
@ -369,16 +364,17 @@ abstract class IngestTaskPipeline<T extends IngestTask> {
private volatile Date processingStartTime;
/**
* Constructs an instance of an abstract superclass for a decorator that
* adds ingest infrastructure operations to an ingest module.
* Constructs an instance of an abstract superclass for an ingest module
* decorator that adds ingest infrastructure operations to an ingest
* module.
*
* @param module The ingest module to be wrapped.
* @param module The ingest module to be decorated.
* @param displayName The display name for the module.
*/
PipelineModule(IngestModule module, String displayName) {
this.module = module;
this.displayName = displayName;
this.processingStartTime = new Date();
processingStartTime = new Date();
}
/**
@ -410,8 +406,8 @@ abstract class IngestTaskPipeline<T extends IngestTask> {
/**
* Gets the the processing start time for the decorated module.
*
* @return The start time, will be null if the module has not started
* processing the data source yet.
* @return The start time, not valid if setProcessingStartTime() has not
* been called first.
*/
Date getProcessingStartTime() {
return new Date(processingStartTime.getTime());
@ -423,17 +419,17 @@ abstract class IngestTaskPipeline<T extends IngestTask> {
}
/**
* Executes an ingest task using the process() method of the decorated
* Performs an ingest task using the process() method of the decorated
* module.
*
* @param ingestJobPipeline The ingest job pipeline that owns the ingest
* task pipeline this module belongs to.
* @param task The task to execute.
* @param ingestJobExecutor The ingest job executor that owns the ingest
* pipeline to which this module belongs.
* @param task The task to perform.
*
* @throws IngestModuleException Exception thrown if there is an error
* performing the task.
*/
abstract void executeTask(IngestJobPipeline ingestJobPipeline, T task) throws IngestModuleException;
abstract void process(IngestJobExecutor ingestJobExecutor, T task) throws IngestModuleException;
@Override
public void shutDown() {
@ -443,28 +439,28 @@ abstract class IngestTaskPipeline<T extends IngestTask> {
}
/**
* An exception thrown by an ingest task pipeline.
* An exception thrown by an ingest pipeline.
*/
public static class IngestTaskPipelineException extends Exception {
static class IngestPipelineException extends Exception {
private static final long serialVersionUID = 1L;
/**
* Constructs an exception to be thrown by an ingest task pipeline.
* Constructs an exception to be thrown by an ingest pipeline.
*
* @param message The exception message.
*/
public IngestTaskPipelineException(String message) {
IngestPipelineException(String message) {
super(message);
}
/**
* Constructs an exception to be thrown by an ingest task pipeline.
* Constructs an exception to be thrown by an ingest pipeline.
*
* @param message The exception message.
* @param cause The exception cause.
*/
public IngestTaskPipelineException(String message, Throwable cause) {
IngestPipelineException(String message, Throwable cause) {
super(message, cause);
}

View File

@ -33,7 +33,7 @@ import org.sleuthkit.datamodel.SleuthkitCase;
*/
public final class IngestServices {
private static Logger logger = Logger.getLogger(IngestServices.class.getName());
private final static Logger logger = Logger.getLogger(IngestServices.class.getName());
private static IngestServices instance = null;
/**
@ -115,7 +115,7 @@ public final class IngestServices {
public void fireModuleDataEvent(ModuleDataEvent moduleDataEvent) {
try {
Blackboard blackboard = Case.getCurrentCaseThrows().getSleuthkitCase().getBlackboard();
blackboard.postArtifacts(moduleDataEvent.getArtifacts(), moduleDataEvent.getModuleName());
blackboard.postArtifacts(moduleDataEvent.getArtifacts(), moduleDataEvent.getModuleName(), null);
} catch (NoCurrentCaseException | Blackboard.BlackboardException ex) {
logger.log(Level.SEVERE, "Failed to post artifacts", ex);
}

View File

@ -22,37 +22,35 @@ import org.sleuthkit.datamodel.Content;
/**
* An ingest task that will be executed by an ingest thread using a given ingest
* job pipeline. Three examples of concrete types of ingest tasks are tasks to
* job executor. Three examples of concrete types of ingest tasks are tasks to
* analyze a data source, tasks to analyze the files in a data source, and tasks
* that analyze data artifacts.
* to analyze data artifacts.
*/
abstract class IngestTask {
private final static long NOT_SET = Long.MIN_VALUE;
private final IngestJobPipeline ingestJobPipeline;
private final IngestJobExecutor ingestJobExecutor;
private long threadId;
/**
* Constructs an ingest task that will be executed by an ingest thread using
* a given ingest job pipeline. Three examples of concrete types of ingest
* tasks are tasks to analyze a data source, tasks to analyze the files in a
* data source, and tasks that analyze data artifacts.
* a given ingest job executor.
*
* @param ingestJobPipeline The ingest job pipeline to use to execute the
* @param ingestJobExecutor The ingest job executor to use to execute the
* task.
*/
IngestTask(IngestJobPipeline ingestJobPipeline) {
this.ingestJobPipeline = ingestJobPipeline;
IngestTask(IngestJobExecutor ingestJobExecutor) {
this.ingestJobExecutor = ingestJobExecutor;
threadId = NOT_SET;
}
/**
* Gets the ingest job pipeline used to complete this task.
* Gets the ingest job executor to use to execute this task.
*
* @return The ingest job pipeline.
* @return The ingest job executor.
*/
IngestJobPipeline getIngestJobPipeline() {
return ingestJobPipeline;
IngestJobExecutor getIngestJobExecutor() {
return ingestJobExecutor;
}
/**
@ -61,7 +59,7 @@ abstract class IngestTask {
* @return The data source.
*/
Content getDataSource() {
return getIngestJobPipeline().getDataSource();
return getIngestJobExecutor().getDataSource();
}
/**
@ -84,8 +82,8 @@ abstract class IngestTask {
/**
* Records the ingest thread ID of the calling thread and executes this task
* using the ingest job pipeline specified when the task was created. The
* implementation of the method should simple call
* using the ingest job executor specified when the task was created. The
* implementation of the method should simply call
* super.setThreadId(threadId) and getIngestJobPipeline().process(this).
*
* @param threadId The numeric ID of the ingest thread executing this task.

View File

@ -138,7 +138,7 @@ final class IngestTasksScheduler {
* task to the pipeline for processing by the
* pipeline's ingest modules.
*/
synchronized void scheduleIngestTasks(IngestJobPipeline ingestPipeline) {
synchronized void scheduleIngestTasks(IngestJobExecutor ingestPipeline) {
if (!ingestPipeline.isCancelled()) {
if (ingestPipeline.hasDataSourceIngestModules()) {
scheduleDataSourceIngestTask(ingestPipeline);
@ -163,7 +163,7 @@ final class IngestTasksScheduler {
* task to the pipeline for processing by the
* pipeline's ingest modules.
*/
synchronized void scheduleDataSourceIngestTask(IngestJobPipeline ingestPipeline) {
synchronized void scheduleDataSourceIngestTask(IngestJobExecutor ingestPipeline) {
if (!ingestPipeline.isCancelled()) {
DataSourceIngestTask task = new DataSourceIngestTask(ingestPipeline);
try {
@ -190,7 +190,7 @@ final class IngestTasksScheduler {
* empty, then all if the files from the data source
* are candidates for scheduling.
*/
synchronized void scheduleFileIngestTasks(IngestJobPipeline ingestPipeline, Collection<AbstractFile> files) {
synchronized void scheduleFileIngestTasks(IngestJobExecutor ingestPipeline, Collection<AbstractFile> files) {
if (!ingestPipeline.isCancelled()) {
Collection<AbstractFile> candidateFiles;
if (files.isEmpty()) {
@ -220,7 +220,7 @@ final class IngestTasksScheduler {
* processing by the pipeline's ingest modules.
* @param files A list of file object IDs for the streamed files.
*/
synchronized void scheduleStreamedFileIngestTasks(IngestJobPipeline ingestPipeline, List<Long> fileIds) {
synchronized void scheduleStreamedFileIngestTasks(IngestJobExecutor ingestPipeline, List<Long> fileIds) {
if (!ingestPipeline.isCancelled()) {
for (long id : fileIds) {
/*
@ -252,7 +252,7 @@ final class IngestTasksScheduler {
* processing by the pipeline's ingest modules.
* @param files The files.
*/
synchronized void fastTrackFileIngestTasks(IngestJobPipeline ingestPipeline, Collection<AbstractFile> files) {
synchronized void fastTrackFileIngestTasks(IngestJobExecutor ingestPipeline, Collection<AbstractFile> files) {
if (!ingestPipeline.isCancelled()) {
/*
* Put the files directly into the queue for the file ingest
@ -290,7 +290,7 @@ final class IngestTasksScheduler {
* target Content of the task to the pipeline for
* processing by the pipeline's ingest modules.
*/
synchronized void scheduleDataArtifactIngestTasks(IngestJobPipeline ingestPipeline) {
synchronized void scheduleDataArtifactIngestTasks(IngestJobExecutor ingestPipeline) {
if (!ingestPipeline.isCancelled()) {
Blackboard blackboard = Case.getCurrentCase().getSleuthkitCase().getBlackboard();
try {
@ -318,7 +318,7 @@ final class IngestTasksScheduler {
* source; if empty, then all of the data artifacts
* from the data source will be scheduled.
*/
synchronized void scheduleDataArtifactIngestTasks(IngestJobPipeline ingestPipeline, List<DataArtifact> artifacts) {
synchronized void scheduleDataArtifactIngestTasks(IngestJobExecutor ingestPipeline, List<DataArtifact> artifacts) {
if (!ingestPipeline.isCancelled()) {
for (DataArtifact artifact : artifacts) {
DataArtifactIngestTask task = new DataArtifactIngestTask(ingestPipeline, artifact);
@ -373,7 +373,7 @@ final class IngestTasksScheduler {
*
* @return True or false.
*/
synchronized boolean currentTasksAreCompleted(IngestJobPipeline ingestPipeline) {
synchronized boolean currentTasksAreCompleted(IngestJobExecutor ingestPipeline) {
long pipelineId = ingestPipeline.getIngestJobId();
return !(dataSourceIngestTasksQueue.hasTasksForJob(pipelineId)
|| hasTasksForJob(topLevelFileIngestTasksQueue, pipelineId)
@ -402,7 +402,7 @@ final class IngestTasksScheduler {
*
* @param ingestJobPipeline The ingest pipeline for the job.
*/
synchronized void cancelPendingFileTasksForIngestJob(IngestJobPipeline ingestJobPipeline) {
synchronized void cancelPendingFileTasksForIngestJob(IngestJobExecutor ingestJobPipeline) {
long jobId = ingestJobPipeline.getIngestJobId();
removeTasksForJob(topLevelFileIngestTasksQueue, jobId);
removeTasksForJob(batchedFileIngestTasksQueue, jobId);
@ -549,7 +549,7 @@ final class IngestTasksScheduler {
for (Content child : file.getChildren()) {
if (child instanceof AbstractFile) {
AbstractFile childFile = (AbstractFile) child;
FileIngestTask childTask = new FileIngestTask(nextTask.getIngestJobPipeline(), childFile);
FileIngestTask childTask = new FileIngestTask(nextTask.getIngestJobExecutor(), childFile);
if (childFile.hasChildren()) {
batchedFileIngestTasksQueue.add(childTask);
} else if (shouldEnqueueFileTask(childTask)) {
@ -668,7 +668,7 @@ final class IngestTasksScheduler {
private static boolean shouldBeCarved(final FileIngestTask task) {
try {
AbstractFile file = task.getFile();
return task.getIngestJobPipeline().shouldProcessUnallocatedSpace() && file.getType().equals(TskData.TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS);
return task.getIngestJobExecutor().shouldProcessUnallocatedSpace() && file.getType().equals(TskData.TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS);
} catch (TskCoreException ex) {
return false;
}
@ -685,7 +685,7 @@ final class IngestTasksScheduler {
private static boolean fileAcceptedByFilter(final FileIngestTask task) {
try {
AbstractFile file = task.getFile();
return !(task.getIngestJobPipeline().getFileIngestFilter().fileIsMemberOf(file) == null);
return !(task.getIngestJobExecutor().getFileIngestFilter().fileIsMemberOf(file) == null);
} catch (TskCoreException ex) {
return false;
}
@ -702,7 +702,7 @@ final class IngestTasksScheduler {
*/
synchronized private static boolean hasTasksForJob(Collection<? extends IngestTask> tasks, long pipelineId) {
for (IngestTask task : tasks) {
if (task.getIngestJobPipeline().getIngestJobId() == pipelineId) {
if (task.getIngestJobExecutor().getIngestJobId() == pipelineId) {
return true;
}
}
@ -720,7 +720,7 @@ final class IngestTasksScheduler {
Iterator<? extends IngestTask> iterator = tasks.iterator();
while (iterator.hasNext()) {
IngestTask task = iterator.next();
if (task.getIngestJobPipeline().getIngestJobId() == pipelineId) {
if (task.getIngestJobExecutor().getIngestJobId() == pipelineId) {
iterator.remove();
}
}
@ -738,7 +738,7 @@ final class IngestTasksScheduler {
private static int countTasksForJob(Collection<? extends IngestTask> tasks, long pipelineId) {
int count = 0;
for (IngestTask task : tasks) {
if (task.getIngestJobPipeline().getIngestJobId() == pipelineId) {
if (task.getIngestJobExecutor().getIngestJobId() == pipelineId) {
count++;
}
}

View File

@ -1,7 +1,7 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2011-2018 Basis Technology Corp.
* Copyright 2015-2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
@ -35,8 +35,9 @@ import org.sleuthkit.datamodel.BlackboardArtifact;
import org.sleuthkit.datamodel.TskCoreException;
/**
* Event published when new data is posted to the blackboard of a case. The
* "old" value is a legacy ModuleDataEvent object. The "new" value is null.
* An event published when a new artifact (data artifact or analysis result) is
* posted to the blackboard. The "old" value is a legacy ModuleDataEvent object.
* The "new" value is null.
*/
public final class BlackboardPostEvent extends AutopsyEvent implements Serializable {
@ -45,14 +46,15 @@ public final class BlackboardPostEvent extends AutopsyEvent implements Serializa
private transient ModuleDataEvent eventData;
/**
* Constructs an event to be published when new content is added to a case
* or there is a change a recorded attribute of existing content.
* Constructs an event published when a new artifact (data artifact or
* analysis result) is posted to the blackboard. The "old" value is a legacy
* ModuleDataEvent object. The "new" value is null.
*
* @param eventData A ModuleDataEvent object containing the data associated
* with the blackboard post.
*/
public BlackboardPostEvent(ModuleDataEvent eventData) {
/**
/*
* Putting a serializable data holding object into oldValue to allow for
* lazy loading of the ModuleDataEvent object for remote events. This
* bypasses the issues related to the serialization and de-serialization
@ -63,9 +65,9 @@ public final class BlackboardPostEvent extends AutopsyEvent implements Serializa
IngestManager.IngestModuleEvent.DATA_ADDED.toString(),
new SerializableEventData(eventData.getModuleName(), eventData.getBlackboardArtifactType(), eventData.getArtifacts() != null
? eventData.getArtifacts()
.stream()
.map(BlackboardArtifact::getArtifactID)
.collect(Collectors.toList()) : Collections.emptyList()),
.stream()
.map(BlackboardArtifact::getArtifactID)
.collect(Collectors.toList()) : Collections.emptyList()),
null
);
this.eventData = eventData;
@ -78,13 +80,13 @@ public final class BlackboardPostEvent extends AutopsyEvent implements Serializa
*/
@Override
public Object getOldValue() {
/**
* The eventData field is set in the constructor, but it is transient so
* it will become null when the event is serialized for publication over
* a network. Doing a lazy load of the ModuleDataEvent object bypasses
* the issues related to the serialization and de-serialization of
* BlackboardArtifact objects and may also save database round trips
* from other nodes since subscribers to this event are often not
/*
* The eventData field is set in the constructor, but it is transient,
* so it will become null when the event is serialized for publication
* over a network. Doing a lazy load of the ModuleDataEvent object
* bypasses the issues related to the serialization and de-serialization
* of BlackboardArtifact objects and may also save database round trips
* from other hosts since subscribers to this event are often not
* interested in the event data.
*/
if (null != eventData) {

View File

@ -1,7 +1,7 @@
/*
* Autopsy
*
* Copyright 2019 Basis Technology Corp.
* Copyright 2019-2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
@ -513,8 +513,7 @@ final class AddLogicalImageTask implements Runnable {
private void postArtifacts(List<BlackboardArtifact> artifacts) {
try {
// index the artifact for keyword search
blackboard.postArtifacts(artifacts, MODULE_NAME);
blackboard.postArtifacts(artifacts, MODULE_NAME, null);
} catch (Blackboard.BlackboardException ex) {
LOGGER.log(Level.SEVERE, "Unable to post artifacts to blackboard", ex); //NON-NLS
}

View File

@ -0,0 +1,62 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.mainui.datamodel;
import java.util.List;
import org.sleuthkit.datamodel.BlackboardArtifactTag;
/**
* A result row for a BlackboardArtifactTag.
*/
public final class BlackboardArtifactTagsRowDTO extends BaseRowDTO {
private static final String TYPE_ID = "ARTIFACT_TAG";
private final BlackboardArtifactTag tag;
public BlackboardArtifactTagsRowDTO(BlackboardArtifactTag tag, List<Object> cellValues, long id) {
super(cellValues, TYPE_ID, id);
this.tag = tag;
}
public static String getTypeIdForClass() {
return TYPE_ID;
}
/**
* Returns the tag for this result row.
*
* @return
*/
public BlackboardArtifactTag getTag() {
return tag;
}
/**
* Returns the tags display name.
*
* @return The display name for this tag.
*/
public String getDisplayName() {
return getCellValues().size() > 0
? getCellValues().get(0).toString()
: "";
}
}

View File

@ -0,0 +1,62 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.mainui.datamodel;
import java.util.List;
import org.sleuthkit.datamodel.ContentTag;
/**
* A result row for a ContentTag.
*/
public class ContentTagsRowDTO extends BaseRowDTO {
private static final String TYPE_ID = "CONTENT_TAG";
private final ContentTag tag;
public ContentTagsRowDTO(ContentTag tag, List<Object> cellValues, long id) {
super(cellValues, TYPE_ID, id);
this.tag = tag;
}
public static String getTypeIdForClass() {
return TYPE_ID;
}
/**
* Return the tag for this result row.
*
* @return The tag for this row.
*/
public ContentTag getTag() {
return tag;
}
/**
* Returns the tags display name.
*
* @return The display name for this tag.
*/
public String getDisplayName() {
return getCellValues().size() > 0
? getCellValues().get(0).toString()
: "";
}
}

View File

@ -137,7 +137,7 @@ class FileSystemColumnUtils {
getColumnKey(Bundle.FileSystemColumnUtils_imageColumns_devID())
);
// Not used yet - Note that Hosts aren't content and will not be combined with other types, so we include the name here
// Note that Hosts aren't content and will not be combined with other types, so we include the name here
private static final List<ColumnKey> HOST_COLUMNS = Arrays.asList(
NAME_COLUMN
);
@ -230,6 +230,15 @@ class FileSystemColumnUtils {
return colKeys;
}
/**
* Get the column keys for a Host.
*
* @return The column keys.
*/
static List<ColumnKey> getColumnKeysForHost() {
return Arrays.asList(NAME_COLUMN);
}
/**
* Get the cell values for a given content object.
*

View File

@ -20,7 +20,6 @@ package org.sleuthkit.autopsy.mainui.datamodel;
import com.google.common.cache.Cache;
import com.google.common.cache.CacheBuilder;
import java.util.Arrays;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.List;
@ -33,6 +32,7 @@ import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
import org.sleuthkit.datamodel.Content;
import org.sleuthkit.datamodel.Host;
import org.sleuthkit.datamodel.Person;
import org.sleuthkit.datamodel.SleuthkitCase;
import org.sleuthkit.datamodel.TskCoreException;
@ -93,6 +93,45 @@ public class FileSystemDAO {
return fetchContentForTable(cacheKey, contentForTable, parentName);
}
private BaseSearchResultsDTO fetchHostsForTable(SearchParams<FileSystemPersonSearchParam> cacheKey) throws NoCurrentCaseException, TskCoreException {
SleuthkitCase skCase = Case.getCurrentCaseThrows().getSleuthkitCase();
Long objectId = cacheKey.getParamData().getPersonObjectId();
List<Host> hostsForTable = new ArrayList<>();
String parentName = "";
if (objectId != null) {
Optional<Person> person = skCase.getPersonManager().getPerson(objectId);
if (person.isPresent()) {
parentName = person.get().getName();
hostsForTable.addAll(skCase.getPersonManager().getHostsForPerson(person.get()));
} else {
throw new TskCoreException("Error loading person with ID " + objectId);
}
} else {
hostsForTable.addAll(skCase.getPersonManager().getHostsWithoutPersons());
}
Stream<Host> pagedHostsStream = hostsForTable.stream()
.sorted(Comparator.comparing((host) -> host.getHostId()))
.skip(cacheKey.getStartItem());
if (cacheKey.getMaxResultsCount() != null) {
pagedHostsStream = pagedHostsStream.limit(cacheKey.getMaxResultsCount());
}
List<Host> pagedHosts = pagedHostsStream.collect(Collectors.toList());
List<ColumnKey> columnKeys = FileSystemColumnUtils.getColumnKeysForHost();
List<RowDTO> rows = new ArrayList<>();
for (Host host : pagedHosts) {
List<Object> cellValues = FileSystemColumnUtils.getCellValuesForHost(host);
rows.add(new BaseRowDTO(cellValues, FILE_SYSTEM_TYPE_ID, host.getHostId()));
}
return new BaseSearchResultsDTO(FILE_SYSTEM_TYPE_ID, parentName, columnKeys, rows, cacheKey.getStartItem(), hostsForTable.size());
}
private BaseSearchResultsDTO fetchContentForTable(SearchParams<?> cacheKey, List<Content> contentForTable,
String parentName) throws NoCurrentCaseException, TskCoreException {
@ -116,7 +155,7 @@ public class FileSystemDAO {
* @param contentObjects The content objects.
* @param searchParams The search parameters including the paging.
*
* @return The list of paged artifacts.
* @return The list of paged content.
*/
private List<Content> getPaged(List<? extends Content> contentObjects, SearchParams<?> searchParams) {
Stream<? extends Content> pagedArtsStream = contentObjects.stream()
@ -149,4 +188,14 @@ public class FileSystemDAO {
return searchParamsCache.get(searchParams, () -> fetchContentForTableFromHost(searchParams));
}
public BaseSearchResultsDTO getHostsForTable(FileSystemPersonSearchParam objectKey, long startItem, Long maxCount, boolean hardRefresh) throws ExecutionException, IllegalArgumentException {
SearchParams<FileSystemPersonSearchParam> searchParams = new SearchParams<>(objectKey, startItem, maxCount);
if (hardRefresh) {
searchParamsCache.invalidate(searchParams);
}
return searchParamsCache.get(searchParams, () -> fetchHostsForTable(searchParams));
}
}

View File

@ -0,0 +1,66 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.mainui.datamodel;
import java.util.Objects;
/**
* Key for person object in order to retrieve data from DAO.
*/
public class FileSystemPersonSearchParam {
private final Long personObjectId;
/**
* Create search param.
*
* @param personObjectId May be null to fetch hosts not associated with a Person
*/
public FileSystemPersonSearchParam(Long personObjectId) {
this.personObjectId = personObjectId;
}
public Long getPersonObjectId() {
return personObjectId;
}
@Override
public int hashCode() {
int hash = 7;
hash = 67 * hash + Objects.hashCode(this.personObjectId);
return hash;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
final FileSystemPersonSearchParam other = (FileSystemPersonSearchParam) obj;
if (!Objects.equals(this.personObjectId, other.personObjectId)) {
return false;
}
return true;
}
}

View File

@ -81,9 +81,6 @@ public class TagsDAO {
private static final String USER_NAME_PROPERTY = "user.name"; //NON-NLS
private static final String FILE_TAG_TYPE_ID = "FILE_TAG";
private static final String RESULT_TAG_TYPE_ID = "RESULT_TAG";
private static final List<ColumnKey> FILE_TAG_COLUMNS = Arrays.asList(
getFileColumnKey(Bundle.TagsDAO_fileColumns_nameColLbl()),
getFileColumnKey(Bundle.TagsDAO_fileColumns_originalName()), // GVDTODO handle translation
@ -218,13 +215,13 @@ public class TagsDAO {
blackboardTag.getComment(),
blackboardTag.getUserName());
fileRows.add(new BaseRowDTO(
fileRows.add(new BlackboardArtifactTagsRowDTO(
blackboardTag,
cellValues,
RESULT_TAG_TYPE_ID,
blackboardTag.getId()));
}
return new BaseSearchResultsDTO(RESULT_TAG_TYPE_ID, Bundle.ResultTag_name_text(), RESULT_TAG_COLUMNS, fileRows, 0, allTags.size());
return new BaseSearchResultsDTO(BlackboardArtifactTagsRowDTO.getTypeIdForClass(), Bundle.ResultTag_name_text(), RESULT_TAG_COLUMNS, fileRows, 0, allTags.size());
}
private SearchResultsDTO fetchFileTags(SearchParams<TagsSearchParams> cacheKey) throws NoCurrentCaseException, TskCoreException {
@ -271,13 +268,13 @@ public class TagsDAO {
file != null ? StringUtils.defaultString(file.getMd5Hash()) : "",
contentTag.getUserName());
fileRows.add(new BaseRowDTO(
fileRows.add(new ContentTagsRowDTO(
contentTag,
cellValues,
FILE_TAG_TYPE_ID,
file.getId()));
}
return new BaseSearchResultsDTO(FILE_TAG_TYPE_ID, Bundle.FileTag_name_text(), FILE_TAG_COLUMNS, fileRows, 0, allTags.size());
return new BaseSearchResultsDTO(ContentTagsRowDTO.getTypeIdForClass(), Bundle.FileTag_name_text(), FILE_TAG_COLUMNS, fileRows, 0, allTags.size());
}
/**

View File

@ -18,14 +18,20 @@
*/
package org.sleuthkit.autopsy.mainui.nodes;
import java.util.Optional;
import org.openide.util.Lookup;
import org.openide.util.lookup.Lookups;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.datamodel.AnalysisResultItem;
import org.sleuthkit.autopsy.datamodel.FileTypeExtensions;
import org.sleuthkit.autopsy.datamodel.utils.IconsUtil;
import org.sleuthkit.autopsy.mainui.datamodel.AnalysisResultRowDTO;
import org.sleuthkit.autopsy.mainui.datamodel.AnalysisResultTableSearchResultsDTO;
import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.AnalysisResult;
import org.sleuthkit.datamodel.BlackboardArtifact;
import org.sleuthkit.datamodel.Content;
import org.sleuthkit.datamodel.TskCoreException;
/**
* Node to display AnalysResult.
@ -70,4 +76,32 @@ public class AnalysisResultNode extends ArtifactNode<AnalysisResult, AnalysisRes
return Lookups.fixed(row.getAnalysisResult(), resultItem, row.getSrcContent());
}
@Override
public boolean supportsContentTagAction() {
return getSourceContent().isPresent() && getSourceContent().get() instanceof AbstractFile;
}
@Override
public Optional<AbstractFile> getExtractArchiveWithPasswordActionFile() {
Optional<Content> optionalSourceContent = getSourceContent();
// GVDTODO: HANDLE THIS ACTION IN A BETTER WAY!-----
// See JIRA-8099
boolean encryptionDetected = false;
if(optionalSourceContent.isPresent()) {
if (optionalSourceContent.get() instanceof AbstractFile) {
AbstractFile file = (AbstractFile) optionalSourceContent.get();
boolean isArchive = FileTypeExtensions.getArchiveExtensions().contains("." + file.getNameExtension().toLowerCase());
try {
encryptionDetected = isArchive && file.getArtifacts(BlackboardArtifact.ARTIFACT_TYPE.TSK_ENCRYPTION_DETECTED).size() > 0;
} catch (TskCoreException ex) {
// TODO
}
if(encryptionDetected) {
return Optional.of(file);
}
}
}
return Optional.empty();
}
}

View File

@ -18,46 +18,24 @@
*/
package org.sleuthkit.autopsy.mainui.nodes;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import java.util.Optional;
import javax.swing.Action;
import org.openide.nodes.AbstractNode;
import org.openide.nodes.Children;
import org.openide.nodes.Node;
import org.openide.nodes.Sheet;
import org.openide.util.Lookup;
import org.openide.util.NbBundle;
import org.openide.util.NbBundle.Messages;
import org.openide.util.Utilities;
import org.sleuthkit.autopsy.actions.AddBlackboardArtifactTagAction;
import org.sleuthkit.autopsy.actions.AddContentTagAction;
import org.sleuthkit.autopsy.actions.DeleteFileBlackboardArtifactTagAction;
import org.sleuthkit.autopsy.actions.DeleteFileContentTagAction;
import org.sleuthkit.autopsy.actions.ViewArtifactAction;
import org.sleuthkit.autopsy.actions.ViewOsAccountAction;
import org.sleuthkit.autopsy.coreutils.ContextMenuExtensionPoint;
import org.sleuthkit.autopsy.datamodel.BlackboardArtifactItem;
import org.sleuthkit.autopsy.datamodel.DataModelActionsFactory;
import org.sleuthkit.autopsy.datamodel.DirectoryNode;
import org.sleuthkit.autopsy.datamodel.LayoutFileNode;
import org.sleuthkit.autopsy.datamodel.LocalDirectoryNode;
import org.sleuthkit.autopsy.datamodel.LocalFileNode;
import org.sleuthkit.autopsy.datamodel.SlackFileNode;
import org.sleuthkit.autopsy.datamodel.VirtualDirectoryNode;
import org.sleuthkit.autopsy.directorytree.ExportCSVAction;
import org.sleuthkit.autopsy.directorytree.ExternalViewerAction;
import org.sleuthkit.autopsy.directorytree.ExternalViewerShortcutAction;
import org.sleuthkit.autopsy.directorytree.ExtractAction;
import org.sleuthkit.autopsy.directorytree.NewWindowViewAction;
import org.sleuthkit.autopsy.directorytree.ViewContextAction;
import org.sleuthkit.autopsy.mainui.datamodel.ArtifactRowDTO;
import org.sleuthkit.autopsy.mainui.datamodel.ColumnKey;
import org.sleuthkit.autopsy.timeline.actions.ViewArtifactInTimelineAction;
import org.sleuthkit.autopsy.timeline.actions.ViewFileInTimelineAction;
import org.sleuthkit.autopsy.mainui.nodes.actions.ActionContext;
import org.sleuthkit.autopsy.mainui.nodes.actions.ActionsFactory;
import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.BlackboardArtifact;
import org.sleuthkit.datamodel.Content;
@ -69,15 +47,15 @@ import org.sleuthkit.datamodel.LayoutFile;
import org.sleuthkit.datamodel.LocalDirectory;
import org.sleuthkit.datamodel.LocalFile;
import org.sleuthkit.datamodel.OsAccount;
import org.sleuthkit.datamodel.Report;
import org.sleuthkit.datamodel.SlackFile;
import org.sleuthkit.datamodel.VirtualDirectory;
public abstract class ArtifactNode<T extends BlackboardArtifact, R extends ArtifactRowDTO<T>> extends AbstractNode {
public abstract class ArtifactNode<T extends BlackboardArtifact, R extends ArtifactRowDTO<T>> extends AbstractNode implements ActionContext {
private final R rowData;
private final BlackboardArtifact.Type artifactType;
private final List<ColumnKey> columns;
private Node parentFileNode;
ArtifactNode(R rowData, List<ColumnKey> columns, BlackboardArtifact.Type artifactType, Lookup lookup, String iconPath) {
super(Children.LEAF, lookup);
@ -87,6 +65,82 @@ public abstract class ArtifactNode<T extends BlackboardArtifact, R extends Artif
setupNodeDisplay(iconPath);
}
@Override
public Optional<Content> getSourceContent() {
return Optional.ofNullable(rowData.getSrcContent());
}
@Override
public Optional<AbstractFile> getLinkedFile() {
return Optional.ofNullable((AbstractFile) rowData.getLinkedFile());
}
@Override
public boolean supportsViewInTimeline() {
return rowData.isTimelineSupported();
}
@Override
public Optional<BlackboardArtifact> getArtifactForTimeline() {
return Optional.ofNullable(rowData.getArtifact());
}
@Override
public boolean supportsAssociatedFileActions() {
return getLinkedFile().isPresent();
}
@Override
public boolean supportsSourceContentActions() {
Content sourceContent = rowData.getSrcContent();
return (sourceContent instanceof DataArtifact)
|| (sourceContent instanceof OsAccount)
|| (sourceContent instanceof AbstractFile || (rowData.getArtifact() instanceof DataArtifact));
}
@Override
public Optional<AbstractFile> getSourceFileForTimelineAction() {
return Optional.ofNullable(rowData.getSrcContent() instanceof AbstractFile ? (AbstractFile) rowData.getSrcContent() : null);
}
@Override
public Optional<BlackboardArtifact> getArtifact() {
return Optional.of(rowData.getArtifact());
}
@Override
public boolean supportsSourceContentViewerActions() {
return rowData.getSrcContent() != null;
}
@Override
public Optional<Node> getNewWindowActionNode() {
return Optional.ofNullable(getParentFileNode());
}
@Override
public Optional<Node> getExternalViewerActionNode() {
return Optional.ofNullable(getParentFileNode());
}
@Override
public boolean supportsExtractActions() {
return rowData.getSrcContent() instanceof AbstractFile;
}
@Override
public boolean supportsArtifactTagAction() {
return true;
}
private Node getParentFileNode() {
if (parentFileNode == null) {
parentFileNode = getParentFileNode(rowData.getSrcContent());
}
return parentFileNode;
}
protected void setupNodeDisplay(String iconPath) {
// use first cell value for display name
String displayName = rowData.getCellValues().size() > 0
@ -104,182 +158,9 @@ public abstract class ArtifactNode<T extends BlackboardArtifact, R extends Artif
return ContentNodeUtil.setSheet(super.createSheet(), columns, rowData.getCellValues());
}
/**
* Returns a list of non null actions from the given possibly null options.
*
* @param items The items to purge of null items.
*
* @return The list of non-null actions.
*/
private List<Action> getNonNull(Action... items) {
return Stream.of(items)
.filter(i -> i != null)
.collect(Collectors.toList());
}
@Override
public Action[] getActions(boolean context) {
// groupings of actions where each group will be separated by a divider
List<List<Action>> actionsLists = new ArrayList<>();
T artifact = rowData.getArtifact();
Content srcContent = rowData.getSrcContent();
// view artifact in timeline
actionsLists.add(getNonNull(
getTimelineArtifactAction(artifact, rowData.isTimelineSupported())
));
// view associated file (TSK_PATH_ID attr) in directory and timeline
AbstractFile associatedFile = rowData.getLinkedFile() instanceof AbstractFile
? (AbstractFile) rowData.getLinkedFile()
: null;
actionsLists.add(getAssociatedFileActions(associatedFile, this.artifactType));
// view source content in directory and timeline
actionsLists.add(getNonNull(
getViewSrcContentAction(artifact, srcContent),
getTimelineSrcContentAction(srcContent)
));
// menu options for artifact with report parent
if (srcContent instanceof Report) {
actionsLists.add(DataModelActionsFactory.getActions(srcContent, false));
}
Node parentFileNode = getParentFileNode(srcContent);
int selectedFileCount = Utilities.actionsGlobalContext().lookupAll(AbstractFile.class).size();
int selectedArtifactCount = Utilities.actionsGlobalContext().lookupAll(BlackboardArtifactItem.class).size();
// view source content if source content is some sort of file
actionsLists.add(getSrcContentViewerActions(parentFileNode, selectedFileCount));
// extract / export if source content is some sort of file
if (parentFileNode != null) {
actionsLists.add(Arrays.asList(ExtractAction.getInstance(), ExportCSVAction.getInstance()));
}
// file and result tagging
actionsLists.add(getTagActions(parentFileNode != null, artifact, selectedFileCount, selectedArtifactCount));
// menu extension items (i.e. add to central repository)
actionsLists.add(ContextMenuExtensionPoint.getActions());
// netbeans default items (i.e. properties)
actionsLists.add(Arrays.asList(super.getActions(context)));
return actionsLists.stream()
// remove any empty lists
.filter((lst) -> lst != null && !lst.isEmpty())
// add in null between each list group
.flatMap(lst -> Stream.concat(Stream.of((Action) null), lst.stream()))
// skip the first null
.skip(1)
.toArray(sz -> new Action[sz]);
}
/**
* Returns the name of the artifact based on the artifact type to be used
* with the associated file string in a right click menu.
*
* @param artifactType The artifact type.
*
* @return The artifact type name.
*/
@NbBundle.Messages({
"ArtifactNode_getAssociatedTypeStr_webCache=Cached File",
"ArtifactNode_getAssociatedTypeStr_webDownload=Downloaded File",
"ArtifactNode_getAssociatedTypeStr_associated=Associated File",})
private String getAssociatedTypeStr(BlackboardArtifact.Type artifactType) {
if (BlackboardArtifact.Type.TSK_WEB_CACHE.equals(artifactType)) {
return Bundle.ArtifactNode_getAssociatedTypeStr_webCache();
} else if (BlackboardArtifact.Type.TSK_WEB_DOWNLOAD.equals(artifactType)) {
return Bundle.ArtifactNode_getAssociatedTypeStr_webDownload();
} else {
return Bundle.ArtifactNode_getAssociatedTypeStr_associated();
}
}
/**
* Returns the name to represent the type of the content (file, data
* artifact, os account, item).
*
* @param content The content.
*
* @return The name of the type of content.
*/
@Messages({
"ArtifactNode_getViewSrcContentAction_type_File=File",
"ArtifactNode_getViewSrcContentAction_type_DataArtifact=Data Artifact",
"ArtifactNode_getViewSrcContentAction_type_OSAccount=OS Account",
"ArtifactNode_getViewSrcContentAction_type_unknown=Item"
})
private String getContentTypeStr(Content content) {
if (content instanceof AbstractFile) {
return Bundle.ArtifactNode_getViewSrcContentAction_type_File();
} else if (content instanceof DataArtifact) {
return Bundle.ArtifactNode_getViewSrcContentAction_type_DataArtifact();
} else if (content instanceof OsAccount) {
return Bundle.ArtifactNode_getViewSrcContentAction_type_OSAccount();
} else {
return Bundle.ArtifactNode_getViewSrcContentAction_type_unknown();
}
}
@Messages({
"# {0} - type",
"ArtifactNode_getAssociatedFileActions_viewAssociatedFileAction=View {0} in Directory",
"# {0} - type",
"ArtifactNode_getAssociatedFileActions_viewAssociatedFileInTimelineAction=View {0} in Timeline..."
})
private List<Action> getAssociatedFileActions(AbstractFile associatedFile, BlackboardArtifact.Type artifactType) {
if (associatedFile != null) {
return Arrays.asList(
new ViewContextAction(
Bundle.ArtifactNode_getAssociatedFileActions_viewAssociatedFileAction(
getAssociatedTypeStr(artifactType)),
associatedFile),
new ViewFileInTimelineAction(associatedFile,
Bundle.ArtifactNode_getAssociatedFileActions_viewAssociatedFileInTimelineAction(
getAssociatedTypeStr(artifactType)))
);
} else {
return Collections.emptyList();
}
}
/**
* Creates an action to navigate to src content in tree hierarchy.
*
* @param artifact The artifact.
* @param content The content.
*
* @return The action or null if no action derived.
*/
@NbBundle.Messages({
"# {0} - contentType",
"ArtifactNode_getSrcContentAction_actionDisplayName=View Source {0} in Directory"
})
private Action getViewSrcContentAction(BlackboardArtifact artifact, Content content) {
if (content instanceof DataArtifact) {
return new ViewArtifactAction(
(BlackboardArtifact) content,
Bundle.ArtifactNode_getSrcContentAction_actionDisplayName(
getContentTypeStr(content)));
} else if (content instanceof OsAccount) {
return new ViewOsAccountAction(
(OsAccount) content,
Bundle.ArtifactNode_getSrcContentAction_actionDisplayName(
getContentTypeStr(content)));
} else if (content instanceof AbstractFile || artifact instanceof DataArtifact) {
return new ViewContextAction(
Bundle.ArtifactNode_getSrcContentAction_actionDisplayName(
getContentTypeStr(content)),
content);
} else {
return null;
}
return ActionsFactory.getActions( this);
}
/**
@ -309,103 +190,4 @@ public abstract class ArtifactNode<T extends BlackboardArtifact, R extends Artif
return null;
}
}
/**
* Returns tag actions.
*
* @param hasSrcFile Whether or not the artifact has a source
* file.
* @param artifact This artifact.
* @param selectedFileCount The count of selected files.
* @param selectedArtifactCount The count of selected artifacts.
*
* @return The tag actions.
*/
private List<Action> getTagActions(boolean hasSrcFile, BlackboardArtifact artifact, int selectedFileCount, int selectedArtifactCount) {
List<Action> actionsList = new ArrayList<>();
// don't show AddContentTagAction for data artifacts.
if (hasSrcFile && !(artifact instanceof DataArtifact)) {
actionsList.add(AddContentTagAction.getInstance());
}
actionsList.add(AddBlackboardArtifactTagAction.getInstance());
// don't show DeleteFileContentTagAction for data artifacts.
if (hasSrcFile && (!(artifact instanceof DataArtifact)) && (selectedFileCount == 1)) {
actionsList.add(DeleteFileContentTagAction.getInstance());
}
if (selectedArtifactCount == 1) {
actionsList.add(DeleteFileBlackboardArtifactTagAction.getInstance());
}
return actionsList;
}
/**
* Returns actions to view src content in a different viewer or window.
*
* @param srcFileNode The source file node or null if no source file.
* @param selectedFileCount The number of selected files.
*
* @return The list of actions or an empty list.
*/
@NbBundle.Messages({
"ArtifactNode_getSrcContentViewerActions_viewInNewWin=View Item in New Window",
"ArtifactNode_getSrcContentViewerActions_openInExtViewer=Open in External Viewer Ctrl+E"
})
private List<Action> getSrcContentViewerActions(Node srcFileNode, int selectedFileCount) {
List<Action> actionsList = new ArrayList<>();
if (srcFileNode != null) {
actionsList.add(new NewWindowViewAction(Bundle.ArtifactNode_getSrcContentViewerActions_viewInNewWin(), srcFileNode));
if (selectedFileCount == 1) {
actionsList.add(new ExternalViewerAction(Bundle.ArtifactNode_getSrcContentViewerActions_openInExtViewer(), srcFileNode));
} else {
actionsList.add(ExternalViewerShortcutAction.getInstance());
}
}
return actionsList;
}
/**
* If the source content of the artifact represented by this node is a file,
* returns an action to view the file in the data source tree.
*
* @param srcContent The src content to navigate to in the timeline action.
*
* @return The src content navigation action or null.
*/
@NbBundle.Messages({
"# {0} - contentType",
"ArtifactNode_getTimelineSrcContentAction_actionDisplayName=View Source {0} in Timeline... "
})
private Action getTimelineSrcContentAction(Content srcContent) {
if (srcContent instanceof AbstractFile) {
return new ViewFileInTimelineAction((AbstractFile) srcContent,
Bundle.ArtifactNode_getTimelineSrcContentAction_actionDisplayName(
getContentTypeStr(srcContent)));
}
return null;
}
/**
* If the artifact represented by this node has a timestamp, an action to
* view it in the timeline.
*
* @param art The artifact for timeline navigation action.
* @param hasSupportedTimeStamp This artifact has a supported time stamp.
*
* @return The action or null if no action should exist.
*/
@NbBundle.Messages({
"ArtifactNode_getTimelineArtifactAction_displayName=View Selected Item in Timeline... "
})
private Action getTimelineArtifactAction(BlackboardArtifact art, boolean hasSupportedTimeStamp) {
if (hasSupportedTimeStamp) {
return new ViewArtifactInTimelineAction(art, Bundle.ArtifactNode_getTimelineArtifactAction_displayName());
} else {
return null;
}
}
}

View File

@ -0,0 +1,165 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.mainui.nodes;
import java.util.List;
import org.openide.nodes.AbstractNode;
import org.openide.nodes.Children;
import org.openide.nodes.Sheet;
import org.openide.util.Lookup;
import org.openide.util.lookup.Lookups;
import org.sleuthkit.autopsy.datamodel.AnalysisResultItem;
import org.sleuthkit.autopsy.datamodel.BlackboardArtifactItem;
import org.sleuthkit.autopsy.datamodel.DataArtifactItem;
import org.sleuthkit.autopsy.mainui.datamodel.BlackboardArtifactTagsRowDTO;
import org.sleuthkit.autopsy.mainui.datamodel.ColumnKey;
import org.sleuthkit.autopsy.mainui.datamodel.SearchResultsDTO;
import org.sleuthkit.datamodel.AnalysisResult;
import org.sleuthkit.datamodel.BlackboardArtifact;
import org.sleuthkit.datamodel.BlackboardArtifactTag;
import org.sleuthkit.datamodel.Content;
import org.sleuthkit.datamodel.DataArtifact;
/**
* A node representing a BlackboardArtifactTag.
*/
public final class BlackboardArtifactTagNode extends AbstractNode {
private static final String ICON_PATH = "org/sleuthkit/autopsy/images/green-tag-icon-16.png"; //NON-NLS
private final BlackboardArtifactTagsRowDTO rowData;
private final List<ColumnKey> columns;
public BlackboardArtifactTagNode(SearchResultsDTO results, BlackboardArtifactTagsRowDTO rowData) {
super(Children.LEAF, createLookup(rowData.getTag()));
this.rowData = rowData;
this.columns = results.getColumns();
setDisplayName(rowData.getDisplayName());
setShortDescription(rowData.getDisplayName());
setName(Long.toString(rowData.getId()));
setIconBaseWithExtension(ICON_PATH);
}
@Override
protected Sheet createSheet() {
return ContentNodeUtil.setSheet(super.createSheet(), columns, rowData.getCellValues());
}
/**
* Creates the lookup for a BlackboardArtifactTag.
*
* Note: This method comes from dataModel.BlackboardArtifactTag.
*
* @param tag The tag to create a lookup for
*
* @return The lookup.
*/
private static Lookup createLookup(BlackboardArtifactTag tag) {
/*
* Make an Autopsy Data Model wrapper for the artifact.
*
* NOTE: The creation of an Autopsy Data Model independent of the
* NetBeans nodes is a work in progress. At the time this comment is
* being written, this object is only being used to indicate the item
* represented by this BlackboardArtifactTagNode.
*/
Content sourceContent = tag.getContent();
BlackboardArtifact artifact = tag.getArtifact();
BlackboardArtifactItem<?> artifactItem;
if (artifact instanceof AnalysisResult) {
artifactItem = new AnalysisResultItem((AnalysisResult) artifact, sourceContent);
} else {
artifactItem = new DataArtifactItem((DataArtifact) artifact, sourceContent);
}
return Lookups.fixed(tag, artifactItem, artifact, sourceContent);
}
// Actions are not a part of the first story, however I am deleting the original
// node which will make finding this info a little more difficult.
// public Action[] getActions(boolean context) {
// List<Action> actions = new ArrayList<>();
// BlackboardArtifact artifact = getLookup().lookup(BlackboardArtifact.class);
// //if this artifact has a time stamp add the action to view it in the timeline
// try {
// if (ViewArtifactInTimelineAction.hasSupportedTimeStamp(artifact)) {
// actions.add(new ViewArtifactInTimelineAction(artifact));
// }
// } catch (TskCoreException ex) {
// LOGGER.log(Level.SEVERE, MessageFormat.format("Error getting arttribute(s) from blackboard artifact{0}.", artifact.getArtifactID()), ex); //NON-NLS
// }
//
// actions.add(new ViewTaggedArtifactAction(Bundle.BlackboardArtifactTagNode_viewSourceArtifact_text(), artifact));
// actions.add(null);
// // if the artifact links to another file, add an action to go to that file
// try {
// AbstractFile c = findLinked(artifact);
// if (c != null) {
// actions.add(ViewFileInTimelineAction.createViewFileAction(c));
// }
// } catch (TskCoreException ex) {
// LOGGER.log(Level.SEVERE, MessageFormat.format("Error getting linked file from blackboard artifact{0}.", artifact.getArtifactID()), ex); //NON-NLS
// }
// //if this artifact has associated content, add the action to view the content in the timeline
// AbstractFile file = getLookup().lookup(AbstractFile.class);
// if (null != file) {
// actions.add(ViewFileInTimelineAction.createViewSourceFileAction(file));
// }
// actions.addAll(DataModelActionsFactory.getActions(tag, true));
// actions.add(null);
// actions.addAll(Arrays.asList(super.getActions(context)));
// return actions.toArray(new Action[0]);
// }
//
// From DataModelActionsFactory
// public static List<Action> getActions(BlackboardArtifactTag artifactTag, boolean isArtifactSource) {
// List<Action> actionsList = new ArrayList<>();
// actionsList.add(new ViewContextAction((isArtifactSource ? VIEW_SOURCE_FILE_IN_DIR : VIEW_FILE_IN_DIR), artifactTag.getContent()));
// final BlackboardArtifactTagNode tagNode = new BlackboardArtifactTagNode(artifactTag);
// actionsList.add(null); // creates a menu separator
// actionsList.add(new NewWindowViewAction(VIEW_IN_NEW_WINDOW, tagNode));
// final Collection<AbstractFile> selectedFilesList
// = new HashSet<>(Utilities.actionsGlobalContext().lookupAll(AbstractFile.class));
// if (selectedFilesList.size() == 1) {
// actionsList.add(new ExternalViewerAction(OPEN_IN_EXTERNAL_VIEWER, tagNode));
// } else {
// actionsList.add(ExternalViewerShortcutAction.getInstance());
// }
// actionsList.add(null); // creates a menu separator
// actionsList.add(ExtractAction.getInstance());
// actionsList.add(ExportCSVAction.getInstance());
// actionsList.add(null); // creates a menu separator
// actionsList.add(AddContentTagAction.getInstance());
// if (isArtifactSource) {
// actionsList.add(AddBlackboardArtifactTagAction.getInstance());
// }
// if (selectedFilesList.size() == 1) {
// actionsList.add(DeleteFileContentTagAction.getInstance());
// }
// if (isArtifactSource) {
// final Collection<BlackboardArtifact> selectedArtifactsList
// = new HashSet<>(Utilities.actionsGlobalContext().lookupAll(BlackboardArtifact.class));
// if (selectedArtifactsList.size() == 1) {
// actionsList.add(DeleteFileBlackboardArtifactTagAction.getInstance());
// }
// }
// actionsList.add(DeleteBlackboardArtifactTagAction.getInstance());
// actionsList.add(ReplaceBlackboardArtifactTagAction.getInstance());
// actionsList.addAll(ContextMenuExtensionPoint.getActions());
// return actionsList;
// }
}

View File

@ -1,23 +1,5 @@
AnalysisResultTypeFactory_adHocName=Adhoc Results
# {0} - type
ArtifactNode_getAssociatedFileActions_viewAssociatedFileAction=View {0} in Directory
# {0} - type
ArtifactNode_getAssociatedFileActions_viewAssociatedFileInTimelineAction=View {0} in Timeline...
ArtifactNode_getAssociatedTypeStr_associated=Associated File
ArtifactNode_getAssociatedTypeStr_webCache=Cached File
ArtifactNode_getAssociatedTypeStr_webDownload=Downloaded File
# {0} - contentType
ArtifactNode_getSrcContentAction_actionDisplayName=View Source {0} in Directory
ArtifactNode_getSrcContentViewerActions_openInExtViewer=Open in External Viewer Ctrl+E
ArtifactNode_getSrcContentViewerActions_viewInNewWin=View Item in New Window
ArtifactNode_getTimelineArtifactAction_displayName=View Selected Item in Timeline...
# {0} - contentType
ArtifactNode_getTimelineSrcContentAction_actionDisplayName=View Source {0} in Timeline...
ArtifactNode_getViewSrcContentAction_type_DataArtifact=Data Artifact
ArtifactNode_getViewSrcContentAction_type_File=File
ArtifactNode_getViewSrcContentAction_type_OSAccount=OS Account
ArtifactNode_getViewSrcContentAction_type_unknown=Item
FileNodev2.getActions.openInExtViewer.text=Open in External Viewer Ctrl+E
FileNodev2.getActions.searchFilesSameMD5.text=Search for files with the same MD5 hash
FileNodev2.getActions.viewFileInDir.text=View File in Directory
FileNodev2.getActions.viewInNewWin.text=View Item in New Window
SearchResultRootNode_createSheet_childCount_displayName=Child Count
SearchResultRootNode_createSheet_childCount_name=Child Count
SearchResultRootNode_createSheet_type_displayName=Name
SearchResultRootNode_createSheet_type_name=Name
SearchResultRootNode_noDesc=No Description

View File

@ -0,0 +1,126 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.mainui.nodes;
import java.util.List;
import org.openide.nodes.AbstractNode;
import org.openide.nodes.Children;
import org.openide.nodes.Sheet;
import org.openide.util.Lookup;
import org.openide.util.lookup.Lookups;
import org.sleuthkit.autopsy.mainui.datamodel.ColumnKey;
import org.sleuthkit.autopsy.mainui.datamodel.ContentTagsRowDTO;
import org.sleuthkit.autopsy.mainui.datamodel.SearchResultsDTO;
import org.sleuthkit.datamodel.ContentTag;
/**
* A node representing a ContentTag.
*/
public final class ContentTagNode extends AbstractNode {
private static final String CONTENT_ICON_PATH = "org/sleuthkit/autopsy/images/blue-tag-icon-16.png"; //NON-NLS
private final ContentTagsRowDTO rowData;
private final List<ColumnKey> columns;
/**
* Construct a new node.
*
* @param results Search results.
* @param rowData Row data.
*/
public ContentTagNode(SearchResultsDTO results, ContentTagsRowDTO rowData) {
super(Children.LEAF, createLookup(rowData.getTag()));
this.rowData = rowData;
this.columns = results.getColumns();
setDisplayName(rowData.getDisplayName());
setName(rowData.getDisplayName());
setIconBaseWithExtension(CONTENT_ICON_PATH);
}
@Override
protected Sheet createSheet() {
return ContentNodeUtil.setSheet(super.createSheet(), columns, rowData.getCellValues());
}
/**
* Create the Lookup based on the tag type.
*
* @param tag The node tag.
*
* @return The lookup for the tag.
*/
private static Lookup createLookup(ContentTag tag) {
return Lookups.fixed(tag, tag.getContent());
}
// Not adding support for actions at this time, but am deleting the original node
// classes in dataModel. This is the action code from the original ContentTagNode
// public Action[] getActions(boolean context) {
// List<Action> actions = new ArrayList<>();
//
//
// AbstractFile file = getLookup().lookup(AbstractFile.class);
// if (file != null) {
// actions.add(ViewFileInTimelineAction.createViewFileAction(file));
// }
//
// actions.addAll(DataModelActionsFactory.getActions(tag, false));
// actions.add(null);
// actions.addAll(Arrays.asList(super.getActions(context)));
// return actions.toArray(new Action[actions.size()]);
// }
// From DataModelActionsFactory
// public static List<Action> getActions(ContentTag contentTag, boolean isArtifactSource) {
// List<Action> actionsList = new ArrayList<>();
// actionsList.add(new ViewContextAction((isArtifactSource ? VIEW_SOURCE_FILE_IN_DIR : VIEW_FILE_IN_DIR), contentTag.getContent()));
// final ContentTagNode tagNode = new ContentTagNode(contentTag);
// actionsList.add(null); // creates a menu separator
// actionsList.add(new NewWindowViewAction(VIEW_IN_NEW_WINDOW, tagNode));
// final Collection<AbstractFile> selectedFilesList
// = new HashSet<>(Utilities.actionsGlobalContext().lookupAll(AbstractFile.class));
// if (selectedFilesList.size() == 1) {
// actionsList.add(new ExternalViewerAction(OPEN_IN_EXTERNAL_VIEWER, tagNode));
// } else {
// actionsList.add(ExternalViewerShortcutAction.getInstance());
// }
// actionsList.add(null); // creates a menu separator
// actionsList.add(ExtractAction.getInstance());
// actionsList.add(ExportCSVAction.getInstance());
// actionsList.add(null); // creates a menu separator
// actionsList.add(AddContentTagAction.getInstance());
// if (isArtifactSource) {
// actionsList.add(AddBlackboardArtifactTagAction.getInstance());
// }
// if (selectedFilesList.size() == 1) {
// actionsList.add(DeleteFileContentTagAction.getInstance());
// }
// if (isArtifactSource) {
// final Collection<BlackboardArtifact> selectedArtifactsList
// = new HashSet<>(Utilities.actionsGlobalContext().lookupAll(BlackboardArtifact.class));
// if (selectedArtifactsList.size() == 1) {
// actionsList.add(DeleteFileBlackboardArtifactTagAction.getInstance());
// }
// }
// actionsList.add(DeleteContentTagAction.getInstance());
// actionsList.add(ReplaceContentTagAction.getInstance());
// actionsList.addAll(ContextMenuExtensionPoint.getActions());
// return actionsList;
// }
}

View File

@ -18,32 +18,20 @@
*/
package org.sleuthkit.autopsy.mainui.nodes;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashSet;
import java.util.List;
import java.util.Optional;
import javax.swing.Action;
import org.openide.nodes.AbstractNode;
import org.openide.nodes.Children;
import org.openide.nodes.Node;
import org.openide.nodes.Sheet;
import org.openide.util.NbBundle;
import org.openide.util.Utilities;
import org.sleuthkit.autopsy.actions.AddContentTagAction;
import org.sleuthkit.autopsy.actions.DeleteFileContentTagAction;
import org.sleuthkit.autopsy.coreutils.ContextMenuExtensionPoint;
import org.sleuthkit.autopsy.datamodel.FileTypeExtensions;
import org.sleuthkit.autopsy.mainui.datamodel.SearchResultsDTO;
import org.sleuthkit.autopsy.mainui.datamodel.FileRowDTO;
import org.sleuthkit.autopsy.directorytree.ExportCSVAction;
import org.sleuthkit.autopsy.directorytree.ExternalViewerAction;
import org.sleuthkit.autopsy.directorytree.ExternalViewerShortcutAction;
import org.sleuthkit.autopsy.directorytree.ExtractAction;
import org.sleuthkit.autopsy.directorytree.NewWindowViewAction;
import org.sleuthkit.autopsy.mainui.datamodel.ColumnKey;
import org.sleuthkit.autopsy.mainui.datamodel.FileRowDTO.ExtensionMediaType;
import org.sleuthkit.autopsy.modules.embeddedfileextractor.ExtractArchiveWithPasswordAction;
import org.sleuthkit.autopsy.timeline.actions.ViewFileInTimelineAction;
import org.sleuthkit.autopsy.mainui.nodes.actions.ActionContext;
import org.sleuthkit.autopsy.mainui.nodes.actions.ActionsFactory;
import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.BlackboardArtifact;
import org.sleuthkit.datamodel.TskCoreException;
@ -52,7 +40,7 @@ import org.sleuthkit.datamodel.TskData.TSK_DB_FILES_TYPE_ENUM;
/**
* A node for representing an AbstractFile.
*/
public class FileNode extends AbstractNode {
public class FileNode extends AbstractNode implements ActionContext {
/**
* Gets the path to the icon file that should be used to visually represent
@ -91,17 +79,15 @@ public class FileNode extends AbstractNode {
return "org/sleuthkit/autopsy/images/file-icon.png";
}
}
private final boolean directoryBrowseMode;
private final FileRowDTO fileData;
private final List<ColumnKey> columns;
public FileNode(SearchResultsDTO results, FileRowDTO file) {
this(results, file, true);
}
public FileNode(SearchResultsDTO results, FileRowDTO file, boolean directoryBrowseMode) {
// GVDTODO: at some point, this leaf will need to allow for children
super(Children.LEAF, ContentNodeUtil.getLookup(file.getAbstractFile()));
@ -128,55 +114,43 @@ public class FileNode extends AbstractNode {
}
}
/**
* Gets the set of actions that are associated with this node. This set is
* used to construct the context menu for the node.
*
* @param context Whether to find actions for context meaning or for the
* node itself.
*
* @return An array of the actions.
*/
@Override
@NbBundle.Messages({
"FileNodev2.getActions.viewFileInDir.text=View File in Directory",
"FileNodev2.getActions.viewInNewWin.text=View Item in New Window",
"FileNodev2.getActions.openInExtViewer.text=Open in External Viewer Ctrl+E",
"FileNodev2.getActions.searchFilesSameMD5.text=Search for files with the same MD5 hash"})
public Action[] getActions(boolean context) {
List<Action> actionsList = new ArrayList<>();
public boolean supportsViewInTimeline() {
return true;
}
// GVDTODO: action requires node
// if (!this.directoryBrowseMode) {
// actionsList.add(new ViewContextAction(Bundle.FileNodev2_getActions_viewFileInDir_text(), this));
// }
@Override
public Optional<AbstractFile> getFileForViewInTimelineAction() {
return Optional.of(fileData.getAbstractFile());
}
@Override
public boolean supportsSourceContentViewerActions() {
return true;
}
actionsList.add(ViewFileInTimelineAction.createViewFileAction(this.fileData.getAbstractFile()));
actionsList.add(null); // Creates an item separator
@Override
public Optional<Node> getNewWindowActionNode() {
return Optional.of(this);
}
actionsList.add(new NewWindowViewAction(Bundle.FileNodev2_getActions_viewInNewWin_text(), this));
final Collection<AbstractFile> selectedFilesList
= new HashSet<>(Utilities.actionsGlobalContext().lookupAll(AbstractFile.class));
if (selectedFilesList.size() == 1) {
actionsList.add(new ExternalViewerAction(
Bundle.FileNodev2_getActions_openInExtViewer_text(), this));
} else {
actionsList.add(ExternalViewerShortcutAction.getInstance());
}
@Override
public Optional<Node> getExternalViewerActionNode() {
return Optional.of(this);
}
actionsList.add(null); // Creates an item separator
@Override
public boolean supportsExtractActions() {
return true;
}
actionsList.add(ExtractAction.getInstance());
actionsList.add(ExportCSVAction.getInstance());
actionsList.add(null); // Creates an item separator
@Override
public boolean supportsContentTagAction() {
return true;
}
actionsList.add(AddContentTagAction.getInstance());
if (1 == selectedFilesList.size()) {
actionsList.add(DeleteFileContentTagAction.getInstance());
}
actionsList.addAll(ContextMenuExtensionPoint.getActions());
@Override
public Optional<AbstractFile> getExtractArchiveWithPasswordActionFile() {
// GVDTODO: HANDLE THIS ACTION IN A BETTER WAY!-----
// See JIRA-8099
AbstractFile file = this.fileData.getAbstractFile();
@ -187,15 +161,13 @@ public class FileNode extends AbstractNode {
} catch (TskCoreException ex) {
// TODO
}
if (encryptionDetected) {
actionsList.add(new ExtractArchiveWithPasswordAction(this.fileData.getAbstractFile()));
}
//------------------------------------------------
actionsList.add(null);
actionsList.addAll(Arrays.asList(super.getActions(true)));
return encryptionDetected ? Optional.of(fileData.getAbstractFile()) : Optional.empty();
}
return actionsList.toArray(new Action[actionsList.size()]);
@Override
public Action[] getActions(boolean context) {
return ActionsFactory.getActions(this);
}
@Override

View File

@ -29,6 +29,8 @@ import org.openide.nodes.Node;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.mainui.datamodel.AnalysisResultRowDTO;
import org.sleuthkit.autopsy.mainui.datamodel.AnalysisResultTableSearchResultsDTO;
import org.sleuthkit.autopsy.mainui.datamodel.BlackboardArtifactTagsRowDTO;
import org.sleuthkit.autopsy.mainui.datamodel.ContentTagsRowDTO;
import org.sleuthkit.autopsy.mainui.datamodel.DataArtifactRowDTO;
import org.sleuthkit.autopsy.mainui.datamodel.DataArtifactTableSearchResultsDTO;
import org.sleuthkit.autopsy.mainui.datamodel.FileRowDTO;
@ -72,7 +74,11 @@ public class SearchResultChildFactory extends ChildFactory<ChildKey> {
return new FileNode(key.getSearchResults(), (FileRowDTO) key.getRow());
} else if(AnalysisResultRowDTO.getTypeIdForClass().equals(typeId)) {
return new AnalysisResultNode((AnalysisResultTableSearchResultsDTO)key.getSearchResults(), (AnalysisResultRowDTO) key.getRow());
}else {
} else if(ContentTagsRowDTO.getTypeIdForClass().equals(typeId)) {
return new ContentTagNode(key.getSearchResults(), (ContentTagsRowDTO)key.getRow());
} else if(BlackboardArtifactTagsRowDTO.getTypeIdForClass().equals(typeId)) {
return new BlackboardArtifactTagNode(key.getSearchResults(), (BlackboardArtifactTagsRowDTO)key.getRow());
} else {
logger.log(Level.WARNING, MessageFormat.format("No known node for type id: {0} provided by row result: {1}", typeId, key.getRow()));
}
} catch (ClassCastException ex) {

View File

@ -22,6 +22,7 @@ import org.openide.nodes.AbstractNode;
import org.openide.nodes.Children;
import org.openide.nodes.Sheet;
import org.openide.util.NbBundle;
import org.openide.util.NbBundle.Messages;
import org.sleuthkit.autopsy.datamodel.NodeProperty;
import org.sleuthkit.autopsy.mainui.datamodel.SearchResultsDTO;
@ -45,6 +46,13 @@ public class SearchResultRootNode extends AbstractNode {
setDisplayName(initialResults.getDisplayName());
}
@Messages({
"SearchResultRootNode_noDesc=No Description",
"SearchResultRootNode_createSheet_type_name=Name",
"SearchResultRootNode_createSheet_type_displayName=Name",
"SearchResultRootNode_createSheet_childCount_name=Child Count",
"SearchResultRootNode_createSheet_childCount_displayName=Child Count"
})
@Override
protected Sheet createSheet() {
Sheet sheet = super.createSheet();
@ -54,14 +62,16 @@ public class SearchResultRootNode extends AbstractNode {
sheet.put(sheetSet);
}
sheetSet.put(new NodeProperty<>(NbBundle.getMessage(this.getClass(), "ArtifactTypeNode.createSheet.artType.name"),
NbBundle.getMessage(this.getClass(), "ArtifactTypeNode.createSheet.artType.displayName"),
NbBundle.getMessage(this.getClass(), "ArtifactTypeNode.createSheet.artType.desc"),
sheetSet.put(new NodeProperty<>(
Bundle.SearchResultRootNode_createSheet_type_name(),
Bundle.SearchResultRootNode_createSheet_type_displayName(),
Bundle.SearchResultRootNode_noDesc(),
getDisplayName()));
sheetSet.put(new NodeProperty<>(NbBundle.getMessage(this.getClass(), "ArtifactTypeNode.createSheet.childCnt.name"),
NbBundle.getMessage(this.getClass(), "ArtifactTypeNode.createSheet.childCnt.displayName"),
NbBundle.getMessage(this.getClass(), "ArtifactTypeNode.createSheet.childCnt.desc"),
sheetSet.put(new NodeProperty<>(
Bundle.SearchResultRootNode_createSheet_childCount_name(),
Bundle.SearchResultRootNode_createSheet_childCount_displayName(),
Bundle.SearchResultRootNode_noDesc(),
this.factory.getResultCount()));
return sheet;

View File

@ -0,0 +1,195 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.mainui.nodes.actions;
import java.util.Optional;
import org.openide.nodes.Node;
import org.sleuthkit.autopsy.mainui.nodes.actions.ActionsFactory.ActionGroup;
import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.BlackboardArtifact;
import org.sleuthkit.datamodel.Content;
/**
* Interface for nodes that want to use the ActionFactory to build their popup
* menu;
*
*/
public interface ActionContext {
/**
* Return the source content.
*
* @return The source content object.
*/
default Optional<Content> getSourceContent() {
return Optional.empty();
}
/**
* Returns an ActionGroup containing the Actions that are specific to the
* node.
*
* @return ActionGroup of actions.
*/
default Optional<ActionGroup> getNodeSpecificActions() {
return Optional.empty();
}
/**
* Return the linked/associated file for the context. This method must
* return a file if hasLinkedFile returns true.
*
* @return An AbstractFile.
*/
default Optional<AbstractFile> getLinkedFile() {
return Optional.empty();
}
/**
* Returns an instance of an BlackboardArtifact.
*
* @return An artifact or null if the ActionContext does not have an
* artifact.
*/
default Optional<BlackboardArtifact> getArtifact() {
return Optional.empty();
}
/**
* Returns true if this context supports showing an artifact or a file in
* the Timeline viewer.
*
* @return True if context supports this action.
*/
default boolean supportsViewInTimeline() {
return false;
}
/**
* Returns the artifact that should appear for the node in the Timeline
* viewer.
*
* @return The artifact to show in the timeline window.
*/
default Optional<BlackboardArtifact> getArtifactForTimeline() {
return Optional.empty();
}
/**
* Returns the file that should appear for the node in the Timeline viewer.
*
* @return The file to show in the timeline window.
*/
default Optional<AbstractFile> getFileForViewInTimelineAction() {
return Optional.empty();
}
/**
* True if the context supports an action to navigate to source content in
* tree hierarchy.
*
* @return True if this action is supported.
*/
default boolean supportsSourceContentActions() {
return false;
}
/**
* Returns the source AbstractFile for to be viewed in the Timeline window.
*
* @return The source file.
*/
default Optional<AbstractFile> getSourceFileForTimelineAction() {
return Optional.empty();
}
/**
* Returns true if the context supports the associated/link file actions.
*
* @return True if this action is supported.
*/
default boolean supportsAssociatedFileActions() {
return false;
}
/**
* True if the ActionContext supports showing a node in a new content
* panel.
*
* @return True if this action is supported.
*/
default boolean supportsSourceContentViewerActions() {
return false;
}
/**
* Returns the node to be display in a new content panel as launched by
* NewWindowAction.
*
* @return The node to display.
*/
default Optional<Node> getNewWindowActionNode() {
return Optional.empty();
}
/**
* Returns the node to be display in an external viewer.
*
* @return The node to be display.
*/
default Optional<Node> getExternalViewerActionNode() {
return Optional.empty();
}
/**
* Returns true if the context supported the extract action.
*
* @return True if the action is supported.
*/
default boolean supportsExtractActions() {
return false;
}
/**
* Returns true if the context supports the context tag actions.
*
* @return True if the action is supported.
*/
default boolean supportsContentTagAction() {
return false;
}
/**
* Returns true of the context supported the artifact tag actions.
*
* @return True if the action is supported.
*/
default boolean supportsArtifactTagAction() {
return false;
}
/**
* Returns the file to be extracted.
*
* @return True if the action is supported.
*/
default Optional<AbstractFile> getExtractArchiveWithPasswordActionFile() {
return Optional.empty();
}
}

View File

@ -0,0 +1,459 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.mainui.nodes.actions;
import java.util.AbstractCollection;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Iterator;
import java.util.List;
import java.util.Optional;
import java.util.function.Consumer;
import java.util.stream.Stream;
import javax.swing.Action;
import org.openide.actions.PropertiesAction;
import org.openide.nodes.Node;
import org.openide.util.NbBundle.Messages;
import org.openide.util.Utilities;
import org.openide.util.actions.SystemAction;
import org.sleuthkit.autopsy.actions.AddBlackboardArtifactTagAction;
import org.sleuthkit.autopsy.actions.AddContentTagAction;
import org.sleuthkit.autopsy.actions.DeleteFileBlackboardArtifactTagAction;
import org.sleuthkit.autopsy.actions.DeleteFileContentTagAction;
import org.sleuthkit.autopsy.actions.ViewArtifactAction;
import org.sleuthkit.autopsy.actions.ViewOsAccountAction;
import org.sleuthkit.autopsy.coreutils.ContextMenuExtensionPoint;
import org.sleuthkit.autopsy.datamodel.BlackboardArtifactItem;
import org.sleuthkit.autopsy.datamodel.DataModelActionsFactory;
import org.sleuthkit.autopsy.directorytree.ExportCSVAction;
import org.sleuthkit.autopsy.directorytree.ExternalViewerAction;
import org.sleuthkit.autopsy.directorytree.ExternalViewerShortcutAction;
import org.sleuthkit.autopsy.directorytree.ExtractAction;
import org.sleuthkit.autopsy.directorytree.NewWindowViewAction;
import org.sleuthkit.autopsy.directorytree.ViewContextAction;
import org.sleuthkit.autopsy.modules.embeddedfileextractor.ExtractArchiveWithPasswordAction;
import org.sleuthkit.autopsy.timeline.actions.ViewArtifactInTimelineAction;
import org.sleuthkit.autopsy.timeline.actions.ViewFileInTimelineAction;
import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.BlackboardArtifact;
import org.sleuthkit.datamodel.Content;
import org.sleuthkit.datamodel.DataArtifact;
import org.sleuthkit.datamodel.OsAccount;
import org.sleuthkit.datamodel.Report;
import org.sleuthkit.datamodel.TskCoreException;
/**
* An action factory for node classes that will have a popup menu.
*
* Nodes do not need to implement the full ActionContext interface. They should
* subclass AbstractAutopsyNode and implement only the ActionContext methods for
* their supported actions.
*/
public final class ActionsFactory {
// private constructor for utility class.
private ActionsFactory() {}
/**
* Create the list of actions for given ActionContext.
* @param actionContext The context for the actions.
*
* @return The list of Actions to display.
*/
public static Action[] getActions(ActionContext actionContext) {
List<ActionGroup> actionGroups = new ArrayList<>();
Optional<ActionGroup> nodeSpecificGroup = actionContext.getNodeSpecificActions();
if (nodeSpecificGroup.isPresent()) {
actionGroups.add(nodeSpecificGroup.get());
}
if (actionContext.supportsViewInTimeline()) {
actionGroups.add(new ActionGroup(getViewInTimelineAction(actionContext)));
}
ActionGroup group = new ActionGroup();
if (actionContext.supportsAssociatedFileActions()) {
group.addAll(getAssociatedFileActions(actionContext).get());
}
if (actionContext.getSourceContent().isPresent()) {
Optional<ActionGroup> optionalGroup = getSourceContentActions(actionContext);
if (optionalGroup.isPresent()) {
group.addAll(optionalGroup.get());
}
}
actionGroups.add(group);
Optional<Content> optionalSourceContext = actionContext.getSourceContent();
if (optionalSourceContext.isPresent() && optionalSourceContext.get() instanceof Report) {
actionGroups.add(new ActionGroup(DataModelActionsFactory.getActions(optionalSourceContext.get(), false)));
}
if (actionContext.supportsSourceContentViewerActions()) {
Optional<ActionGroup> optionalGroup = getSourceContentViewerActions(actionContext);
if (optionalGroup.isPresent()) {
actionGroups.add(optionalGroup.get());
}
}
if (actionContext.supportsExtractActions()) {
actionGroups.add(getExtractActions());
}
actionGroups.add(getTagActions(actionContext));
actionGroups.add(new ActionGroup(ContextMenuExtensionPoint.getActions()));
Optional<AbstractFile> optionalFile = actionContext.getExtractArchiveWithPasswordActionFile();
if (optionalFile.isPresent()) {
actionGroups.add(new ActionGroup(new ExtractArchiveWithPasswordAction(optionalFile.get())));
}
List<Action> actionList = new ArrayList<>();
for (ActionGroup aGroup : actionGroups) {
if (aGroup != null) {
actionList.addAll(aGroup);
actionList.add(null);
}
}
// Add the properties menu item to the bottom.
actionList.add(SystemAction.get(PropertiesAction.class));
Action[] actions = new Action[actionList.size()];
actionList.toArray(actions);
return actions;
}
/**
* Returns the Extract actions. These actions are not specific to the
* ActionContext.
*
* @return The Extract ActionGroup.
*/
static ActionGroup getExtractActions() {
ActionGroup actionsGroup = new ActionGroup();
actionsGroup.add(ExtractAction.getInstance());
actionsGroup.add(ExportCSVAction.getInstance());
return actionsGroup;
}
/**
* Returns the ActionGroup for the source content viewer actions .
*
* @param actionContext
*
* @return The action group with the actions, or null if these actions are
* not supported by the ActionContext.
*/
@Messages({
"ActionsFactory_getSrcContentViewerActions_viewInNewWin=View Item in New Window",
"ActionsFactory_getSrcContentViewerActions_openInExtViewer=Open in External Viewer Ctrl+E"
})
private static Optional<ActionGroup> getSourceContentViewerActions(ActionContext actionContext) {
ActionGroup actionGroup = new ActionGroup();
Optional<Node> nodeOptional = actionContext.getNewWindowActionNode();
if (nodeOptional.isPresent()) {
actionGroup.add(new NewWindowViewAction(Bundle.ActionsFactory_getSrcContentViewerActions_viewInNewWin(), nodeOptional.get()));
}
nodeOptional = actionContext.getExternalViewerActionNode();
if (nodeOptional.isPresent()) {
int selectedFileCount = Utilities.actionsGlobalContext().lookupAll(AbstractFile.class).size();
if (selectedFileCount == 1) {
actionGroup.add(new ExternalViewerAction(Bundle.ActionsFactory_getSrcContentViewerActions_openInExtViewer(), nodeOptional.get()));
} else {
actionGroup.add(ExternalViewerShortcutAction.getInstance());
}
}
return actionGroup.isEmpty() ? Optional.empty() : Optional.of(actionGroup);
}
/**
* Creates the ActionGroup for the source content actions.
*
* @param actionContext The context for these actions.
*
* @return An ActionGroup if one of the actions is supported.
*/
@Messages({
"# {0} - contentType",
"ActionsFactory_getTimelineSrcContentAction_actionDisplayName=View Source {0} in Timeline... "
})
private static Optional<ActionGroup> getSourceContentActions(ActionContext actionContext) {
ActionGroup group = new ActionGroup();
Optional<Action> optionalAction = getViewSrcContentAction(actionContext);
if (optionalAction.isPresent()) {
group.add(optionalAction.get());
}
Optional<AbstractFile> srcContentOptional = actionContext.getSourceFileForTimelineAction();
if (srcContentOptional.isPresent()) {
group.add(new ViewFileInTimelineAction(srcContentOptional.get(),
Bundle.ActionsFactory_getTimelineSrcContentAction_actionDisplayName(
getContentTypeStr(srcContentOptional.get()))));
}
return group.isEmpty() ? Optional.empty() : Optional.of(group);
}
/**
*
* @param context
*
* @return
*/
@Messages({
"# {0} - type",
"ActionsFactory_getAssociatedFileActions_viewAssociatedFileAction=View {0} in Directory",
"# {0} - type",
"ActionsFactory_getAssociatedFileActions_viewAssociatedFileInTimelineAction=View {0} in Timeline..."
})
private static Optional<ActionGroup> getAssociatedFileActions(ActionContext context) {
Optional<AbstractFile> associatedFileOptional = context.getLinkedFile();
Optional<BlackboardArtifact> artifactOptional = context.getArtifact();
if (!associatedFileOptional.isPresent() || !artifactOptional.isPresent()) {
return Optional.empty();
}
BlackboardArtifact.Type artifactType;
try {
artifactType = artifactOptional.get().getType();
} catch (TskCoreException ex) {
return Optional.empty();
}
ActionGroup group = new ActionGroup(Arrays.asList(
new ViewContextAction(
Bundle.ActionsFactory_getAssociatedFileActions_viewAssociatedFileAction(
getAssociatedTypeStr(artifactType)),
associatedFileOptional.get()),
new ViewFileInTimelineAction(associatedFileOptional.get(),
Bundle.ActionsFactory_getAssociatedFileActions_viewAssociatedFileInTimelineAction(
getAssociatedTypeStr(artifactType)))
));
return Optional.of(group);
}
/**
* Returns the tag actions for the given context.
*
* @param context The action context.
*
* @return Tag ActionGroup.
*/
private static ActionGroup getTagActions(ActionContext context) {
int selectedFileCount = Utilities.actionsGlobalContext().lookupAll(AbstractFile.class).size();
int selectedArtifactCount = Utilities.actionsGlobalContext().lookupAll(BlackboardArtifactItem.class).size();
ActionGroup actionGroup = new ActionGroup();
if (context.supportsContentTagAction()) {
actionGroup.add(AddContentTagAction.getInstance());
}
if (context.supportsArtifactTagAction()) {
actionGroup.add(AddBlackboardArtifactTagAction.getInstance());
}
if (context.supportsContentTagAction() && (selectedFileCount == 1)) {
actionGroup.add(DeleteFileContentTagAction.getInstance());
}
if (context.supportsArtifactTagAction() && selectedArtifactCount == 1) {
actionGroup.add(DeleteFileBlackboardArtifactTagAction.getInstance());
}
return actionGroup;
}
@Messages({
"# {0} - contentType",
"ArtifactFactory_getViewSrcContentAction_displayName=View Source {0} in Directory"
})
/**
* Create an action to navigate to source content in tree hierarchy.
*
* @param context
*
* @return The action for the given context.
*/
private static Optional<Action> getViewSrcContentAction(ActionContext context) {
Optional<Content> sourceContent = context.getSourceContent();
Optional<BlackboardArtifact> artifact = context.getArtifact();
if (sourceContent.isPresent()) {
if (sourceContent.get() instanceof DataArtifact) {
return Optional.of(new ViewArtifactAction(
(BlackboardArtifact) sourceContent.get(),
Bundle.ArtifactFactory_getViewSrcContentAction_displayName(
getContentTypeStr(sourceContent.get()))));
} else if (sourceContent.get() instanceof OsAccount) {
return Optional.of(new ViewOsAccountAction(
(OsAccount) sourceContent.get(),
Bundle.ArtifactFactory_getViewSrcContentAction_displayName(
getContentTypeStr(sourceContent.get()))));
} else if (sourceContent.get() instanceof AbstractFile || (artifact.isPresent() && artifact.get() instanceof DataArtifact)) {
return Optional.of(new ViewContextAction(
Bundle.ArtifactFactory_getViewSrcContentAction_displayName(
getContentTypeStr(sourceContent.get())),
sourceContent.get()));
}
}
return Optional.empty();
}
/**
* Returns the name to represent the type of the content (file, data
* artifact, os account, item).
*
* @param content The content.
*
* @return The name of the type of content.
*/
@Messages({
"ActionFactory_getViewSrcContentAction_type_File=File",
"ActionFactory_getViewSrcContentAction_type_DataArtifact=Data Artifact",
"ActionFactory_getViewSrcContentAction_type_OSAccount=OS Account",
"ActionFactory_getViewSrcContentAction_type_unknown=Item"
})
private static String getContentTypeStr(Content content) {
if (content instanceof AbstractFile) {
return Bundle.ActionFactory_getViewSrcContentAction_type_File();
} else if (content instanceof DataArtifact) {
return Bundle.ActionFactory_getViewSrcContentAction_type_DataArtifact();
} else if (content instanceof OsAccount) {
return Bundle.ActionFactory_getViewSrcContentAction_type_OSAccount();
} else {
return Bundle.ActionFactory_getViewSrcContentAction_type_unknown();
}
}
/**
* If the artifact represented by this node has a timestamp, an action to
* view it in the timeline.
*
* @param context The action context.
*
* @return The action or null if no action should exist.
*/
@Messages({
"ActionsFactory_getTimelineArtifactAction_displayName=View Selected Item in Timeline... "
})
private static Action getViewInTimelineAction(ActionContext context) {
Optional<BlackboardArtifact> optionalArtifact = context.getArtifact();
Optional<AbstractFile> optionalFile = context.getFileForViewInTimelineAction();
if (optionalArtifact.isPresent()) {
return new ViewArtifactInTimelineAction(optionalArtifact.get(), Bundle.ActionsFactory_getTimelineArtifactAction_displayName());
} else if (optionalFile.isPresent()) {
return ViewFileInTimelineAction.createViewFileAction(optionalFile.get());
}
return null;
}
/**
* Returns the name of the artifact based on the artifact type to be used
* with the associated file string in a right click menu.
*
* @param artifactType The artifact type.
*
* @return The artifact type name.
*/
@Messages({
"ActionsFactory_getAssociatedTypeStr_webCache=Cached File",
"ActionsFactory_getAssociatedTypeStr_webDownload=Downloaded File",
"ActionsFactory_getAssociatedTypeStr_associated=Associated File",})
private static String getAssociatedTypeStr(BlackboardArtifact.Type artifactType) {
if (BlackboardArtifact.Type.TSK_WEB_CACHE.equals(artifactType)) {
return Bundle.ActionsFactory_getAssociatedTypeStr_webCache();
} else if (BlackboardArtifact.Type.TSK_WEB_DOWNLOAD.equals(artifactType)) {
return Bundle.ActionsFactory_getAssociatedTypeStr_webDownload();
} else {
return Bundle.ActionsFactory_getAssociatedTypeStr_associated();
}
}
/**
* Represents a group of related actions.
*/
public static class ActionGroup extends AbstractCollection<Action> {
private final List<Action> actionList;
/**
* Construct a new ActionGroup instance with an empty list.
*/
ActionGroup() {
this.actionList = new ArrayList<>();
}
/**
* Construct a new ActionGroup instance with the given list of actions.
*
* @param actionList List of actions to add to the group.
*/
ActionGroup(List<Action> actionList) {
this();
this.actionList.addAll(actionList);
}
ActionGroup(Action action) {
this();
actionList.add(action);
}
@Override
public boolean isEmpty() {
return actionList.isEmpty();
}
@Override
public boolean add(Action action) {
return actionList.add(action);
}
@Override
public Iterator<Action> iterator() {
return actionList.iterator();
}
@Override
public void forEach(Consumer<? super Action> action) {
actionList.forEach(action);
}
@Override
public int size() {
return actionList.size();
}
@Override
public Stream<Action> stream() {
return actionList.stream();
}
}
}

View File

@ -0,0 +1,18 @@
ActionFactory_getViewSrcContentAction_type_DataArtifact=Data Artifact
ActionFactory_getViewSrcContentAction_type_File=File
ActionFactory_getViewSrcContentAction_type_OSAccount=OS Account
ActionFactory_getViewSrcContentAction_type_unknown=Item
# {0} - type
ActionsFactory_getAssociatedFileActions_viewAssociatedFileAction=View {0} in Directory
# {0} - type
ActionsFactory_getAssociatedFileActions_viewAssociatedFileInTimelineAction=View {0} in Timeline...
ActionsFactory_getAssociatedTypeStr_associated=Associated File
ActionsFactory_getAssociatedTypeStr_webCache=Cached File
ActionsFactory_getAssociatedTypeStr_webDownload=Downloaded File
ActionsFactory_getSrcContentViewerActions_openInExtViewer=Open in External Viewer Ctrl+E
ActionsFactory_getSrcContentViewerActions_viewInNewWin=View Item in New Window
ActionsFactory_getTimelineArtifactAction_displayName=View Selected Item in Timeline...
# {0} - contentType
ActionsFactory_getTimelineSrcContentAction_actionDisplayName=View Source {0} in Timeline...
# {0} - contentType
ArtifactFactory_getViewSrcContentAction_displayName=View Source {0} in Directory

View File

@ -1,7 +1,7 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2013-2019 Basis Technology Corp.
* Copyright 2013-2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
@ -303,7 +303,7 @@ public class DataSourceIntegrityIngestModule implements DataSourceIngestModule {
.getAnalysisResult();
Case.getCurrentCase().getServices().getArtifactsBlackboard()
.postArtifact(verificationFailedArtifact, DataSourceIntegrityModuleFactory.getModuleName());
.postArtifact(verificationFailedArtifact, DataSourceIntegrityModuleFactory.getModuleName(), context.getJobId());
} catch (TskCoreException ex) {
logger.log(Level.SEVERE, "Error creating verification failed artifact", ex);
} catch (Blackboard.BlackboardException ex) {

View File

@ -1,7 +1,7 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2020 Basis Technology Corp.
* Copyright 2020-2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
@ -114,7 +114,7 @@ final class DATExtractor extends DroneExtractor {
GeoTrackPoints trackPoints = processCSVFile(context, DATFile, csvFilePath);
if (trackPoints != null && !trackPoints.isEmpty()) {
(new GeoArtifactsHelper(getSleuthkitCase(), getName(), "DatCon", DATFile)).addTrack(DATFile.getName(), trackPoints, null);
(new GeoArtifactsHelper(getSleuthkitCase(), getName(), "DatCon", DATFile, context.getJobId())).addTrack(DATFile.getName(), trackPoints, null);
} else {
logger.log(Level.INFO, String.format("No trackpoints with valid longitude or latitude found in %s", DATFile.getName())); //NON-NLS
}

View File

@ -341,7 +341,7 @@ class SevenZipExtractor {
* keyword search, and fire an event to notify UI of this
* new artifact
*/
blackboard.postArtifact(artifact, MODULE_NAME);
blackboard.postArtifact(artifact, MODULE_NAME, context.getJobId());
String msg = NbBundle.getMessage(SevenZipExtractor.class,
"EmbeddedFileExtractorIngestModule.ArchiveExtractor.isZipBombCheck.warnMsg", archiveFile.getName(), escapedFilePath);//NON-NLS
@ -870,7 +870,7 @@ class SevenZipExtractor {
* keyword search, and fire an event to notify UI of this
* new artifact
*/
blackboard.postArtifact(artifact, MODULE_NAME);
blackboard.postArtifact(artifact, MODULE_NAME, context.getJobId());
} catch (Blackboard.BlackboardException ex) {
logger.log(Level.SEVERE, "Unable to post blackboard artifact " + artifact.getArtifactID(), ex); //NON-NLS
MessageNotifyUtil.Notify.error(

View File

@ -1,7 +1,7 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2018 Basis Technology Corp.
* Copyright 2018-2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
@ -172,7 +172,7 @@ final class EncryptionDetectionDataSourceIngestModule implements DataSourceInges
* post the artifact which will index the artifact for keyword
* search, and fire an event to notify UI of this new artifact
*/
blackboard.postArtifact(artifact, EncryptionDetectionModuleFactory.getModuleName());
blackboard.postArtifact(artifact, EncryptionDetectionModuleFactory.getModuleName(), context.getJobId());
} catch (Blackboard.BlackboardException ex) {
logger.log(Level.SEVERE, "Unable to index blackboard artifact " + artifact.getArtifactID(), ex); //NON-NLS
}

View File

@ -1,7 +1,7 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2017-2018 Basis Technology Corp.
* Copyright 2017-2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
@ -212,7 +212,7 @@ final class EncryptionDetectionFileIngestModule extends FileIngestModuleAdapter
* post the artifact which will index the artifact for keyword
* search, and fire an event to notify UI of this new artifact
*/
blackboard.postArtifact(artifact, EncryptionDetectionModuleFactory.getModuleName());
blackboard.postArtifact(artifact, EncryptionDetectionModuleFactory.getModuleName(), context.getJobId());
} catch (Blackboard.BlackboardException ex) {
logger.log(Level.SEVERE, "Unable to index blackboard artifact " + artifact.getArtifactID(), ex); //NON-NLS
}

View File

@ -1,7 +1,7 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2011-2018 Basis Technology Corp.
* Copyright 2012-2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
@ -155,7 +155,7 @@ public class FileExtMismatchIngestModule implements FileIngestModule {
* keyword search, and fire an event to notify UI of this
* new artifact
*/
blackboard.postArtifact(bart, FileExtMismatchDetectorModuleFactory.getModuleName());
blackboard.postArtifact(bart, FileExtMismatchDetectorModuleFactory.getModuleName(), jobId);
} catch (Blackboard.BlackboardException ex) {
logger.log(Level.SEVERE, "Unable to index blackboard artifact " + bart.getArtifactID(), ex); //NON-NLS
MessageNotifyUtil.Notify.error(FileExtMismatchDetectorModuleFactory.getModuleName(), Bundle.FileExtMismatchIngestModule_indexError_message());

View File

@ -1,7 +1,7 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2013-2018 Basis Technology Corp.
* Copyright 2013-2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
@ -206,7 +206,7 @@ public class FileTypeIdIngestModule implements FileIngestModule {
* keyword search, and fire an event to notify UI of this
* new artifact
*/
tskBlackboard.postArtifact(artifact, FileTypeIdModuleFactory.getModuleName());
tskBlackboard.postArtifact(artifact, FileTypeIdModuleFactory.getModuleName(), jobId);
} catch (Blackboard.BlackboardException ex) {
logger.log(Level.SEVERE, String.format("Unable to index TSK_INTERESTING_FILE_HIT blackboard artifact %d (file obj_id=%d)", artifact.getArtifactID(), file.getId()), ex); //NON-NLS
}

View File

@ -1,7 +1,7 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2011-2018 Basis Technology Corp.
* Copyright 2012-2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
@ -20,7 +20,6 @@ package org.sleuthkit.autopsy.modules.hashdatabase;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.concurrent.atomic.AtomicLong;
@ -43,13 +42,11 @@ import org.sleuthkit.autopsy.modules.hashdatabase.HashDbManager.HashDb;
import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.Blackboard;
import org.sleuthkit.datamodel.BlackboardArtifact;
import org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE;
import org.sleuthkit.datamodel.BlackboardAttribute;
import org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE;
import org.sleuthkit.datamodel.HashHitInfo;
import org.sleuthkit.datamodel.HashUtility;
import org.sleuthkit.datamodel.Score;
import org.sleuthkit.datamodel.Score.Significance;
import org.sleuthkit.datamodel.SleuthkitCase;
import org.sleuthkit.datamodel.TskCoreException;
import org.sleuthkit.datamodel.TskData;
@ -552,7 +549,7 @@ public class HashDbIngestModule implements FileIngestModule {
* post the artifact which will index the artifact for keyword
* search, and fire an event to notify UI of this new artifact
*/
blackboard.postArtifact(badFile, moduleName);
blackboard.postArtifact(badFile, moduleName, jobId);
} catch (Blackboard.BlackboardException ex) {
logger.log(Level.SEVERE, "Unable to index blackboard artifact " + badFile.getArtifactID(), ex); //NON-NLS
MessageNotifyUtil.Notify.error(

View File

@ -1,7 +1,7 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2014-2018 Basis Technology Corp.
* Copyright 2014-2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
@ -151,7 +151,7 @@ final class FilesIdentifierIngestModule implements FileIngestModule {
try {
// Post thet artifact to the blackboard.
blackboard.postArtifact(artifact, MODULE_NAME);
blackboard.postArtifact(artifact, MODULE_NAME, context.getJobId());
} catch (Blackboard.BlackboardException ex) {
logger.log(Level.SEVERE, "Unable to index blackboard artifact " + artifact.getArtifactID(), ex); //NON-NLS
MessageNotifyUtil.Notify.error(Bundle.FilesIdentifierIngestModule_indexError_message(), artifact.getDisplayName());

View File

@ -1,7 +1,7 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2020 Basis Technology Corp.
* Copyright 2020-2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
@ -101,7 +101,7 @@ public class ALeappAnalyzerIngestModule implements DataSourceIngestModule {
}
try {
aLeappFileProcessor = new LeappFileProcessor(XMLFILE, ALeappAnalyzerModuleFactory.getModuleName());
aLeappFileProcessor = new LeappFileProcessor(XMLFILE, ALeappAnalyzerModuleFactory.getModuleName(), context);
} catch (IOException | IngestModuleException | NoCurrentCaseException ex) {
throw new IngestModuleException(Bundle.ALeappAnalyzerIngestModule_error_ileapp_file_processor_init(), ex);
}

View File

@ -1,7 +1,7 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2020 Basis Technology Corp.
* Copyright 2020-2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
@ -101,7 +101,7 @@ public class ILeappAnalyzerIngestModule implements DataSourceIngestModule {
}
try {
iLeappFileProcessor = new LeappFileProcessor(XMLFILE, ILeappAnalyzerModuleFactory.getModuleName());
iLeappFileProcessor = new LeappFileProcessor(XMLFILE, ILeappAnalyzerModuleFactory.getModuleName(), context);
} catch (IOException | IngestModuleException | NoCurrentCaseException ex) {
throw new IngestModuleException(Bundle.ILeappAnalyzerIngestModule_error_ileapp_file_processor_init(), ex);
}
@ -333,7 +333,7 @@ public class ILeappAnalyzerIngestModule implements DataSourceIngestModule {
* added to reports
*/
private void addILeappReportToReports(Path iLeappOutputDir, Case currentCase) {
List<String> allIndexFiles = new ArrayList<>();
List<String> allIndexFiles;
try (Stream<Path> walk = Files.walk(iLeappOutputDir)) {
@ -402,7 +402,7 @@ public class ILeappAnalyzerIngestModule implements DataSourceIngestModule {
String fileName = FilenameUtils.getName(ffp);
String filePath = FilenameUtils.getPath(ffp);
List<AbstractFile> iLeappFiles = new ArrayList<>();
List<AbstractFile> iLeappFiles;
try {
if (filePath.isEmpty()) {
iLeappFiles = fileManager.findFiles(dataSource, fileName); //NON-NLS

View File

@ -1,7 +1,7 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2020 Basis Technology Corp.
* Copyright 2020-2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
@ -61,6 +61,7 @@ import org.sleuthkit.autopsy.casemodule.services.FileManager;
import org.sleuthkit.autopsy.coreutils.NetworkUtils;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.coreutils.PlatformUtil;
import org.sleuthkit.autopsy.ingest.IngestJobContext;
import org.sleuthkit.autopsy.ingest.IngestModule.IngestModuleException;
import org.sleuthkit.autopsy.ingest.IngestModule.ProcessResult;
import org.sleuthkit.datamodel.AbstractFile;
@ -108,10 +109,10 @@ public final class LeappFileProcessor {
* Main constructor.
*
* @param attributeType The BlackboardAttribute type or null if not
* used. used.
* @param columnName The name of the column in the tsv file.
* @param required Whether or not this attribute is required to be
* present.
* used. used.
* @param columnName The name of the column in the tsv file.
* @param required Whether or not this attribute is required to be
* present.
*/
TsvColumn(BlackboardAttribute.Type attributeType, String columnName, boolean required) {
this.attributeType = attributeType;
@ -144,6 +145,7 @@ public final class LeappFileProcessor {
private static final Logger logger = Logger.getLogger(LeappFileProcessor.class.getName());
private final String xmlFile; //NON-NLS
private final String moduleName;
private final IngestJobContext context;
private final Map<String, String> tsvFiles;
private final Map<String, BlackboardArtifact.Type> tsvFileArtifacts;
@ -192,15 +194,16 @@ public final class LeappFileProcessor {
.put("call history.tsv", "calllog")
.build();
Blackboard blkBoard;
private final Blackboard blkBoard;
public LeappFileProcessor(String xmlFile, String moduleName) throws IOException, IngestModuleException, NoCurrentCaseException {
public LeappFileProcessor(String xmlFile, String moduleName, IngestJobContext context) throws IOException, IngestModuleException, NoCurrentCaseException {
this.tsvFiles = new HashMap<>();
this.tsvFileArtifacts = new HashMap<>();
this.tsvFileArtifactComments = new HashMap<>();
this.tsvFileAttributes = new HashMap<>();
this.xmlFile = xmlFile;
this.moduleName = moduleName;
this.context = context;
blkBoard = Case.getCurrentCaseThrows().getSleuthkitCase().getBlackboard();
@ -218,7 +221,8 @@ public final class LeappFileProcessor {
"LeappFileProcessor.has.run=Leapp",
"LeappFileProcessor.Leapp.cancelled=Leapp run was canceled",
"LeappFileProcessor.completed=Leapp Processing Completed",
"LeappFileProcessor.error.reading.Leapp.directory=Error reading Leapp Output Directory"})
"LeappFileProcessor.error.reading.Leapp.directory=Error reading Leapp Output Directory"
})
public ProcessResult processFiles(Content dataSource, Path moduleOutputPath, AbstractFile LeappFile) {
try {
List<String> LeappTsvOutputFiles = findTsvFiles(moduleOutputPath);
@ -249,7 +253,7 @@ public final class LeappFileProcessor {
* we know we want to process and return the list to process those files.
*/
private List<String> findTsvFiles(Path LeappOutputDir) throws IngestModuleException {
List<String> allTsvFiles = new ArrayList<>();
List<String> allTsvFiles;
List<String> foundTsvFiles = new ArrayList<>();
try (Stream<Path> walk = Files.walk(LeappOutputDir)) {
@ -275,7 +279,7 @@ public final class LeappFileProcessor {
* Process the Leapp files that were found that match the xml mapping file
*
* @param LeappFilesToProcess List of files to process
* @param LeappImageFile Abstract file to create artifact for
* @param LeappImageFile Abstract file to create artifact for
*
* @throws FileNotFoundException
* @throws IOException
@ -308,7 +312,7 @@ public final class LeappFileProcessor {
* Process the Leapp files that were found that match the xml mapping file
*
* @param LeappFilesToProcess List of files to process
* @param dataSource The data source.
* @param dataSource The data source.
*
* @throws FileNotFoundException
* @throws IOException
@ -318,7 +322,7 @@ public final class LeappFileProcessor {
for (String LeappFileName : LeappFilesToProcess) {
String fileName = FilenameUtils.getName(LeappFileName);
File LeappFile = new File(LeappFileName);
File LeappFile = new File(LeappFileName);
if (tsvFileAttributes.containsKey(fileName)) {
List<TsvColumn> attrList = tsvFileAttributes.get(fileName);
BlackboardArtifact.Type artifactType = tsvFileArtifacts.get(fileName);
@ -345,12 +349,12 @@ public final class LeappFileProcessor {
String trackpointSegmentName = null;
GeoTrackPoints pointList = new GeoTrackPoints();
AbstractFile geoAbstractFile = null;
if (LeappFile == null || !LeappFile.exists() || fileName == null) {
logger.log(Level.WARNING, String.format("Leap file: %s is null or does not exist", LeappFile == null ? LeappFile.toString() : "<null>"));
logger.log(Level.WARNING, String.format("Leap file: %s is null or does not exist", LeappFile != null ? LeappFile.toString() : "<null>"));
return;
} else if (attrList == null || artifactType == null || dataSource == null) {
logger.log(Level.WARNING, String.format("attribute list, artifact type or dataSource not provided for %s", LeappFile == null ? LeappFile.toString() : "<null>"));
logger.log(Level.WARNING, String.format("attribute list, artifact type or dataSource not provided for %s", LeappFile.toString()));
return;
}
@ -405,11 +409,10 @@ public final class LeappFileProcessor {
}
}
}
try {
if (ACCOUNT_RELATIONSHIPS.getOrDefault(fileName.toLowerCase(), "norelationship").toLowerCase() == "trackpoint") {
(new GeoArtifactsHelper(Case.getCurrentCaseThrows().getSleuthkitCase(), moduleName, "", geoAbstractFile)).addTrack(trackpointSegmentName, pointList, new ArrayList<>());
if (ACCOUNT_RELATIONSHIPS.getOrDefault(fileName.toLowerCase(), "norelationship").toLowerCase().equals("trackpoint")) {
(new GeoArtifactsHelper(Case.getCurrentCaseThrows().getSleuthkitCase(), moduleName, "", geoAbstractFile, context.getJobId())).addTrack(trackpointSegmentName, pointList, new ArrayList<>());
}
} catch (NoCurrentCaseException | TskCoreException | BlackboardException ex) {
throw new IngestModuleException(Bundle.LeappFileProcessor_cannot_create_message_relationship() + ex.getLocalizedMessage(), ex); //NON-NLS
@ -418,10 +421,9 @@ public final class LeappFileProcessor {
}
@NbBundle.Messages({
"LeappFileProcessor.cannot.create.waypoint.relationship=Cannot create TSK_WAYPOINT artifact.",
"LeappFileProcessor.cannot.create.waypoint.relationship=Cannot create TSK_WAYPOINT artifact."
})
private void createRoute (Collection<BlackboardAttribute> bbattributes, Content dataSource, String fileName) throws IngestModuleException {
private void createRoute(Collection<BlackboardAttribute> bbattributes, Content dataSource, String fileName) throws IngestModuleException {
Double startLatitude = Double.valueOf(0);
Double startLongitude = Double.valueOf(0);
@ -433,9 +435,9 @@ public final class LeappFileProcessor {
Long dateTime = Long.valueOf(0);
Collection<BlackboardAttribute> otherAttributes = new ArrayList<>();
String sourceFile = null;
AbstractFile absFile = null;
AbstractFile absFile;
String comment = "";
try {
for (BlackboardAttribute bba : bbattributes) {
switch (bba.getAttributeType().getTypeName()) {
@ -478,19 +480,17 @@ public final class LeappFileProcessor {
GeoWaypoints waypointList = new GeoWaypoints();
waypointList.addPoint(new Waypoint(startLatitude, startLongitude, zeroValue, ""));
waypointList.addPoint(new Waypoint(endLatitude, endLongitude, zeroValue, locationName));
(new GeoArtifactsHelper(Case.getCurrentCaseThrows().getSleuthkitCase(), moduleName, comment, absFile)).addRoute(destinationName, dateTime, waypointList, new ArrayList<>());
(new GeoArtifactsHelper(Case.getCurrentCaseThrows().getSleuthkitCase(), moduleName, comment, absFile, context.getJobId())).addRoute(destinationName, dateTime, waypointList, new ArrayList<>());
} catch (NoCurrentCaseException | TskCoreException | BlackboardException ex) {
throw new IngestModuleException(Bundle.LeappFileProcessor_cannot_create_waypoint_relationship() + ex.getLocalizedMessage(), ex); //NON-NLS
}
}
@NbBundle.Messages({
"LeappFileProcessor.cannot.create.trackpoint.relationship=Cannot create TSK_TRACK_POINT artifact.",
})
}
@NbBundle.Messages({
"LeappFileProcessor.cannot.create.trackpoint.relationship=Cannot create TSK_TRACK_POINT artifact."
})
private AbstractFile createTrackpoint(Collection<BlackboardAttribute> bbattributes, Content dataSource, String fileName, String trackpointSegmentName, GeoTrackPoints pointList) throws IngestModuleException {
Double latitude = Double.valueOf(0);
@ -503,7 +503,7 @@ public final class LeappFileProcessor {
String sourceFile = null;
String comment = null;
AbstractFile absFile = null;
try {
for (BlackboardAttribute bba : bbattributes) {
switch (bba.getAttributeType().getTypeName()) {
@ -538,29 +538,24 @@ public final class LeappFileProcessor {
if (absFile == null) {
absFile = (AbstractFile) dataSource;
}
if ((trackpointSegmentName == null) || (trackpointSegmentName == segmentName)) {
trackpointSegmentName = segmentName;
pointList.addPoint(new TrackPoint(latitude, longitude, altitude, segmentName, zeroValue, zeroValue, zeroValue, dateTime));
if ((trackpointSegmentName == null) || (trackpointSegmentName.equals(segmentName))) {
pointList.addPoint(new TrackPoint(latitude, longitude, altitude, segmentName, zeroValue, zeroValue, zeroValue, dateTime));
} else {
(new GeoArtifactsHelper(Case.getCurrentCaseThrows().getSleuthkitCase(), moduleName, comment, absFile)).addTrack(segmentName, pointList, new ArrayList<>());
trackpointSegmentName = segmentName;
pointList = new GeoTrackPoints();
pointList.addPoint(new TrackPoint(latitude, longitude, altitude, segmentName, zeroValue, zeroValue, zeroValue, dateTime));
(new GeoArtifactsHelper(Case.getCurrentCaseThrows().getSleuthkitCase(), moduleName, comment, absFile, context.getJobId())).addTrack(segmentName, pointList, new ArrayList<>());
pointList.addPoint(new TrackPoint(latitude, longitude, altitude, segmentName, zeroValue, zeroValue, zeroValue, dateTime));
}
} catch (NoCurrentCaseException | TskCoreException | BlackboardException ex) {
throw new IngestModuleException(Bundle.LeappFileProcessor_cannot_create_trackpoint_relationship() + ex.getLocalizedMessage(), ex); //NON-NLS
}
return absFile;
return absFile;
}
@NbBundle.Messages({
"LeappFileProcessor.cannot.create.message.relationship=Cannot create TSK_MESSAGE Relationship.",
"LeappFileProcessor.cannot.create.message.relationship=Cannot create TSK_MESSAGE Relationship."
})
private void createMessageRelationship(Collection<BlackboardAttribute> bbattributes, Content dataSource, String fileName) throws IngestModuleException {
String messageType = null;
@ -577,7 +572,7 @@ public final class LeappFileProcessor {
List<BlackboardAttribute> otherAttributes = new ArrayList<>();
List<FileAttachment> fileAttachments = new ArrayList<>();
String sourceFile = null;
MessageAttachments messageAttachments = null;
MessageAttachments messageAttachments;
try {
for (BlackboardAttribute bba : bbattributes) {
@ -614,7 +609,7 @@ public final class LeappFileProcessor {
sourceFile = bba.getValueString();
break;
case "TSK_READ_STATUS":
if (bba.getValueInt() == 1 ) {
if (bba.getValueInt() == 1) {
messageStatus = MessageReadStatus.READ;
} else {
messageStatus = MessageReadStatus.UNREAD;
@ -638,22 +633,22 @@ public final class LeappFileProcessor {
AbstractFile absFile = findAbstractFile(dataSource, sourceFile);
if (absFile == null) {
absFile = (AbstractFile) dataSource;
}
CommunicationArtifactsHelper accountArtifact;
Account.Type accountType = getAccountType(fileName);
if (alternateId == null) {
accountArtifact = new CommunicationArtifactsHelper(Case.getCurrentCaseThrows().getSleuthkitCase(),
moduleName, absFile, accountType);
} else {
accountArtifact = new CommunicationArtifactsHelper(Case.getCurrentCaseThrows().getSleuthkitCase(),
moduleName, absFile, accountType, accountType, alternateId);
}
BlackboardArtifact messageArtifact = accountArtifact.addMessage(messageType, communicationDirection, senderId,
receipentId, dateTime, messageStatus, subject,
messageText, threadId, otherAttributes);
CommunicationArtifactsHelper accountHelper;
Account.Type accountType = getAccountType(fileName);
if (alternateId == null) {
accountHelper = new CommunicationArtifactsHelper(Case.getCurrentCaseThrows().getSleuthkitCase(),
moduleName, absFile, accountType, context.getJobId());
} else {
accountHelper = new CommunicationArtifactsHelper(Case.getCurrentCaseThrows().getSleuthkitCase(),
moduleName, absFile, accountType, accountType, alternateId, context.getJobId());
}
BlackboardArtifact messageArtifact = accountHelper.addMessage(messageType, communicationDirection, senderId,
receipentId, dateTime, messageStatus, subject,
messageText, threadId, otherAttributes);
if (!fileAttachments.isEmpty()) {
messageAttachments = new MessageAttachments(fileAttachments, new ArrayList<>());
accountArtifact.addAttachments(messageArtifact, messageAttachments);
accountHelper.addAttachments(messageArtifact, messageAttachments);
}
} catch (NoCurrentCaseException | TskCoreException | BlackboardException ex) {
throw new IngestModuleException(Bundle.LeappFileProcessor_cannot_create_message_relationship() + ex.getLocalizedMessage(), ex); //NON-NLS
@ -662,7 +657,7 @@ public final class LeappFileProcessor {
}
@NbBundle.Messages({
"LeappFileProcessor.cannot.create.contact.relationship=Cannot create TSK_CONTACT Relationship.",
"LeappFileProcessor.cannot.create.contact.relationship=Cannot create TSK_CONTACT Relationship."
})
private void createContactRelationship(Collection<BlackboardAttribute> bbattributes, Content dataSource, String fileName) throws IngestModuleException {
@ -715,16 +710,16 @@ public final class LeappFileProcessor {
}
Account.Type accountType = getAccountType(fileName);
if (accountType != null) {
CommunicationArtifactsHelper accountArtifact;
CommunicationArtifactsHelper accountHelper;
if (alternateId == null) {
accountArtifact = new CommunicationArtifactsHelper(Case.getCurrentCaseThrows().getSleuthkitCase(),
moduleName, absFile, accountType);
accountHelper = new CommunicationArtifactsHelper(Case.getCurrentCaseThrows().getSleuthkitCase(),
moduleName, absFile, accountType, context.getJobId());
} else {
accountArtifact = new CommunicationArtifactsHelper(Case.getCurrentCaseThrows().getSleuthkitCase(),
moduleName, absFile, accountType, accountType, alternateId);
accountHelper = new CommunicationArtifactsHelper(Case.getCurrentCaseThrows().getSleuthkitCase(),
moduleName, absFile, accountType, accountType, alternateId, context.getJobId());
}
BlackboardArtifact messageArtifact = accountArtifact.addContact(contactName, phoneNumber, homePhoneNumber, mobilePhoneNumber, emailAddr, otherAttributes);
BlackboardArtifact messageArtifact = accountHelper.addContact(contactName, phoneNumber, homePhoneNumber, mobilePhoneNumber, emailAddr, otherAttributes);
}
} catch (NoCurrentCaseException | TskCoreException | BlackboardException ex) {
throw new IngestModuleException(Bundle.LeappFileProcessor_cannot_create_contact_relationship() + ex.getLocalizedMessage(), ex); //NON-NLS
@ -732,14 +727,13 @@ public final class LeappFileProcessor {
}
@NbBundle.Messages({
"LeappFileProcessor.cannot.create.calllog.relationship=Cannot create TSK_CALLLOG Relationship.",
"LeappFileProcessor.cannot.create.calllog.relationship=Cannot create TSK_CALLLOG Relationship."
})
private void createCalllogRelationship(Collection<BlackboardAttribute> bbattributes, Content dataSource, String fileName) throws IngestModuleException {
String callerId = null;
String alternateId = null;
List<String> calleeId = Arrays.asList();
List<String> calleeId = Arrays.asList();
CommunicationDirection communicationDirection = CommunicationDirection.UNKNOWN;
Long startDateTime = Long.valueOf(0);
Long endDateTime = Long.valueOf(0);
@ -751,14 +745,14 @@ public final class LeappFileProcessor {
for (BlackboardAttribute bba : bbattributes) {
switch (bba.getAttributeType().getTypeName()) {
case "TSK_TEXT_FILE":
sourceFile = bba.getValueString();
break;
sourceFile = bba.getValueString();
break;
case "TSK_DATETIME_START":
startDateTime = bba.getValueLong();
break;
startDateTime = bba.getValueLong();
break;
case "TSK_DATETIME_END":
startDateTime = bba.getValueLong();
break;
startDateTime = bba.getValueLong();
break;
case "TSK_DIRECTION":
if (bba.getValueString().toLowerCase().equals("outgoing")) {
communicationDirection = CommunicationDirection.OUTGOING;
@ -773,8 +767,8 @@ public final class LeappFileProcessor {
break;
case "TSK_PHONE_NUMBER_TO":
if (!bba.getValueString().isEmpty()) {
String [] calleeTempList = bba.getValueString().split(",", 0);
calleeId = Arrays.asList(calleeTempList);
String[] calleeTempList = bba.getValueString().split(",", 0);
calleeId = Arrays.asList(calleeTempList);
}
break;
case "TSK_ID":
@ -786,32 +780,32 @@ public final class LeappFileProcessor {
break;
}
}
if (calleeId.isEmpty() && communicationDirection == CommunicationDirection.OUTGOING) {
String [] calleeTempList = callerId.split(",", 0);
calleeId = Arrays.asList(calleeTempList);
callerId = null;
}
if (calleeId.isEmpty() && communicationDirection == CommunicationDirection.OUTGOING && callerId != null) {
String[] calleeTempList = callerId.split(",", 0);
calleeId = Arrays.asList(calleeTempList);
callerId = null;
}
AbstractFile absFile = findAbstractFile(dataSource, sourceFile);
if (absFile == null) {
absFile = (AbstractFile) dataSource;
}
Account.Type accountType = getAccountType(fileName);
CommunicationArtifactsHelper accountArtifact;
CommunicationArtifactsHelper accountHelper;
if (accountType != null) {
accountArtifact = new CommunicationArtifactsHelper(Case.getCurrentCaseThrows().getSleuthkitCase(),
moduleName, absFile, accountType);
accountHelper = new CommunicationArtifactsHelper(Case.getCurrentCaseThrows().getSleuthkitCase(),
moduleName, absFile, accountType, context.getJobId());
} else {
accountArtifact = new CommunicationArtifactsHelper(Case.getCurrentCaseThrows().getSleuthkitCase(),
moduleName, absFile, accountType, accountType, alternateId);
accountHelper = new CommunicationArtifactsHelper(Case.getCurrentCaseThrows().getSleuthkitCase(),
moduleName, absFile, accountType, accountType, alternateId, context.getJobId());
}
BlackboardArtifact callLogArtifact = accountArtifact.addCalllog(communicationDirection, callerId, calleeId, startDateTime, endDateTime, mediaType, otherAttributes);
accountHelper.addCalllog(communicationDirection, callerId, calleeId, startDateTime, endDateTime, mediaType, otherAttributes);
} catch (NoCurrentCaseException | TskCoreException | BlackboardException ex) {
throw new IngestModuleException(Bundle.LeappFileProcessor_cannot_create_calllog_relationship() + ex.getLocalizedMessage(), ex); //NON-NLS
}
}
private Account.Type getAccountType(String AccountTypeName) {
switch (AccountTypeName.toLowerCase()) {
case "zapya.tsv":
@ -849,7 +843,7 @@ public final class LeappFileProcessor {
case "whatsapp - contacts.tsv":
return Account.Type.WHATSAPP;
case "tangomessages messages.tsv":
return Account.Type.TANGO;
return Account.Type.TANGO;
case "shareit file transfer.tsv":
return Account.Type.SHAREIT;
case "line - calllogs.tsv":
@ -880,20 +874,22 @@ public final class LeappFileProcessor {
return Account.Type.PHONE;
}
}
/**
* Process the line read and create the necessary attributes for it.
*
* @param lineValues List of column values.
* @param lineValues List of column values.
* @param columnIndexes Mapping of column headers (trimmed; to lower case)
* to column index. All header columns and only all header columns should be
* present.
* @param attrList The list of attributes as specified for the schema of
* this file.
* @param fileName The name of the file being processed.
* @param lineNum The line number in the file.
* to column index. All header columns and only all
* header columns should be present.
* @param attrList The list of attributes as specified for the schema
* of this file.
* @param fileName The name of the file being processed.
* @param lineNum The line number in the file.
*
* @return The collection of blackboard attributes for the artifact created
* from this line.
* from this line.
*
* @throws IngestModuleException
*/
private Collection<BlackboardAttribute> processReadLine(List<String> lineValues, Map<String, Integer> columnIndexes,
@ -930,7 +926,7 @@ public final class LeappFileProcessor {
String formattedValue = formatValueBasedOnAttrType(colAttr, value);
BlackboardAttribute attr = (value == null) ? null : getAttribute(colAttr.getAttributeType(), formattedValue, fileName);
BlackboardAttribute attr = getAttribute(colAttr.getAttributeType(), formattedValue, fileName);
if (attr == null) {
logger.log(Level.WARNING, String.format("Blackboard attribute could not be parsed column %s at line %d in file %s. Omitting row.", colAttr.getColumnName(), lineNum, fileName));
return Collections.emptyList();
@ -949,9 +945,10 @@ public final class LeappFileProcessor {
* Check type of attribute and possibly format string based on it.
*
* @param colAttr Column Attribute information
* @param value string to be formatted
* @param value string to be formatted
*
* @return formatted string based on attribute type if no attribute type
* found then return original string
* found then return original string
*/
private String formatValueBasedOnAttrType(TsvColumn colAttr, String value) {
if (colAttr.getAttributeType().getTypeName().equals("TSK_DOMAIN")) {
@ -971,9 +968,10 @@ public final class LeappFileProcessor {
* value.
*
* @param attrType The attribute type.
* @param value The string value to be converted to the appropriate data
* type for the attribute type.
* @param value The string value to be converted to the appropriate data
* type for the attribute type.
* @param fileName The file name that the value comes from.
*
* @return The generated blackboard attribute.
*/
private BlackboardAttribute getAttribute(BlackboardAttribute.Type attrType, String value, String fileName) {
@ -998,7 +996,7 @@ public final class LeappFileProcessor {
(v) -> new BlackboardAttribute(attrType, moduleName, Double.valueOf(v).longValue()));
case DOUBLE:
return parseAttrValue(value.trim(), attrType, fileName, true, false,
(v) -> new BlackboardAttribute(attrType, moduleName, (double) Double.valueOf(v)));
(v) -> new BlackboardAttribute(attrType, moduleName, Double.valueOf(v)));
case BYTE:
return parseAttrValue(value.trim(), attrType, fileName, true, false,
(v) -> new BlackboardAttribute(attrType, moduleName, new byte[]{Byte.valueOf(v)}));
@ -1022,7 +1020,9 @@ public final class LeappFileProcessor {
* Handles converting a string value to a blackboard attribute.
*
* @param orig The original string value.
*
* @return The generated blackboard attribute.
*
* @throws ParseException
* @throws NumberFormatException
*/
@ -1033,36 +1033,41 @@ public final class LeappFileProcessor {
* Runs parsing function on string value to convert to right data type and
* generates a blackboard attribute for that converted data type.
*
* @param value The string value.
* @param attrType The blackboard attribute type.
* @param fileName The name of the file from which the value comes.
* @param blankIsNull If string is blank return null attribute.
* @param zeroIsNull If string is some version of 0, return null attribute.
* @param value The string value.
* @param attrType The blackboard attribute type.
* @param fileName The name of the file from which the value comes.
* @param blankIsNull If string is blank return null attribute.
* @param zeroIsNull If string is some version of 0, return null
* attribute.
* @param valueConverter The means of converting the string value to an
* appropriate blackboard attribute.
* appropriate blackboard attribute.
*
* @return The generated blackboard attribute or null if not determined.
*/
private BlackboardAttribute parseAttrValue(String value, BlackboardAttribute.Type attrType, String fileName, boolean blankIsNull, boolean zeroIsNull, ParseExceptionFunction valueConverter) {
// remove non-printable characters from tsv input
// https://stackoverflow.com/a/6199346
value = value.replaceAll("\\p{C}", "");
String sanitizedValue = value.replaceAll("\\p{C}", "");
if (blankIsNull && StringUtils.isBlank(value)) {
if (blankIsNull && StringUtils.isBlank(sanitizedValue)) {
return null;
}
if (zeroIsNull && value.matches("^\\s*[0\\.]*\\s*$")) {
if (zeroIsNull && sanitizedValue.matches("^\\s*[0\\.]*\\s*$")) {
return null;
}
try {
return valueConverter.apply(value);
return valueConverter.apply(sanitizedValue);
} catch (NumberFormatException | ParseException ex) {
logger.log(Level.WARNING, String.format("Unable to format '%s' as value type %s while converting to attributes from %s.", value, attrType.getValueType().getLabel(), fileName), ex);
logger.log(Level.WARNING, String.format("Unable to format '%s' as value type %s while converting to attributes from %s.", sanitizedValue, attrType.getValueType().getLabel(), fileName), ex);
return null;
}
}
/**
* Read the XML config file and load the mappings into maps
*/
@NbBundle.Messages({
"LeappFileProcessor.cannot.load.artifact.xml=Cannot load xml artifact file.",
"LeappFileProcessor.cannotBuildXmlParser=Cannot buld an XML parser.",
@ -1070,10 +1075,6 @@ public final class LeappFileProcessor {
"LeappFileProcessor.postartifacts_error=Error posting Blackboard Artifact",
"LeappFileProcessor.error.creating.new.artifacts=Error creating new artifacts."
})
/**
* Read the XML config file and load the mappings into maps
*/
private void loadConfigFile() throws IngestModuleException {
Document xmlinput;
try {
@ -1120,7 +1121,7 @@ public final class LeappFileProcessor {
BlackboardArtifact.Type foundArtifactType = null;
try {
foundArtifactType = Case.getCurrentCase().getSleuthkitCase().getArtifactType(artifactName);
foundArtifactType = Case.getCurrentCase().getSleuthkitCase().getBlackboard().getArtifactType(artifactName);
} catch (TskCoreException ex) {
logger.log(Level.SEVERE, String.format("There was an issue that arose while trying to fetch artifact type for %s.", artifactName), ex);
}
@ -1157,7 +1158,7 @@ public final class LeappFileProcessor {
for (int k = 0; k < attributeNlist.getLength(); k++) {
NamedNodeMap nnm = attributeNlist.item(k).getAttributes();
String attributeName = nnm.getNamedItem("attributename").getNodeValue();
if (!attributeName.toLowerCase().matches("null")) {
String columnName = nnm.getNamedItem("columnName").getNodeValue();
String required = nnm.getNamedItem("required").getNodeValue();
@ -1165,7 +1166,7 @@ public final class LeappFileProcessor {
BlackboardAttribute.Type foundAttrType = null;
try {
foundAttrType = Case.getCurrentCase().getSleuthkitCase().getAttributeType(attributeName.toUpperCase());
foundAttrType = Case.getCurrentCase().getSleuthkitCase().getBlackboard().getAttributeType(attributeName.toUpperCase());
} catch (TskCoreException ex) {
logger.log(Level.SEVERE, String.format("There was an issue that arose while trying to fetch attribute type for %s.", attributeName), ex);
}
@ -1181,10 +1182,13 @@ public final class LeappFileProcessor {
if (columnName == null) {
logger.log(Level.SEVERE, String.format("No column name provided for [%s]", getXmlAttrIdentifier(parentName, attributeName)));
continue;
} else if (columnName.trim().length() != columnName.length()) {
logger.log(Level.SEVERE, String.format("Column name '%s' starts or ends with whitespace for [%s]", columnName, getXmlAttrIdentifier(parentName, attributeName)));
continue;
} else if (columnName.matches("[^ \\S]")) {
logger.log(Level.SEVERE, String.format("Column name '%s' contains invalid characters [%s]", columnName, getXmlAttrIdentifier(parentName, attributeName)));
continue;
}
TsvColumn thisCol = new TsvColumn(
@ -1209,11 +1213,12 @@ public final class LeappFileProcessor {
/**
* Generic method for creating a blackboard artifact with attributes
*
* @param artType The artifact type.
* @param dataSource is the Content object that needs to have the artifact
* added for it
* @param artType The artifact type.
* @param dataSource is the Content object that needs to have the artifact
* added for it
* @param bbattributes is the collection of blackboard attributes that need
* to be added to the artifact after the artifact has been created
* to be added to the artifact after the artifact has
* been created
*
* @return The newly-created artifact, or null on error
*/
@ -1225,7 +1230,7 @@ public final class LeappFileProcessor {
case ANALYSIS_RESULT:
return dataSource.newAnalysisResult(artType, Score.SCORE_UNKNOWN, null, null, null, bbattributes).getAnalysisResult();
default:
logger.log(Level.SEVERE, "Unknown category type: " + artType.getCategory().getDisplayName());
logger.log(Level.SEVERE, String.format("Unknown category type: %s", artType.getCategory().getDisplayName()));
return null;
}
} catch (TskException ex) {
@ -1238,7 +1243,7 @@ public final class LeappFileProcessor {
* Method to post a list of BlackboardArtifacts to the blackboard.
*
* @param artifacts A list of artifacts. IF list is empty or null, the
* function will return.
* function will return.
*/
void postArtifacts(Collection<BlackboardArtifact> artifacts) {
if (artifacts == null || artifacts.isEmpty()) {
@ -1246,7 +1251,7 @@ public final class LeappFileProcessor {
}
try {
Case.getCurrentCase().getSleuthkitCase().getBlackboard().postArtifacts(artifacts, moduleName);
Case.getCurrentCase().getSleuthkitCase().getBlackboard().postArtifacts(artifacts, moduleName, context.getJobId());
} catch (Blackboard.BlackboardException ex) {
logger.log(Level.SEVERE, Bundle.LeappFileProcessor_postartifacts_error(), ex); //NON-NLS
}
@ -1259,7 +1264,7 @@ public final class LeappFileProcessor {
*/
private void configExtractor() throws IOException {
PlatformUtil.extractResourceToUserConfigDir(LeappFileProcessor.class,
xmlFile, true);
xmlFile, true);
}
private static final Set<String> ALLOWED_EXTENSIONS = new HashSet<>(Arrays.asList("zip", "tar", "tgz"));
@ -1316,14 +1321,14 @@ public final class LeappFileProcessor {
}
}
private AbstractFile findAbstractFile(Content dataSource, String fileNamePath) {
if (fileNamePath == null) {
return null;
}
List<AbstractFile> files;
String fileName = FilenameUtils.getName(fileNamePath);
String filePath = FilenameUtils.normalize(FilenameUtils.getPath(fileNamePath), true);
@ -1347,4 +1352,4 @@ public final class LeappFileProcessor {
return null;
}
}
}

View File

@ -172,7 +172,7 @@ public class EXIFProcessor implements PictureProcessor {
artifacts.add(userSuspectedArtifact);
try {
blackboard.postArtifacts(artifacts, MODULE_NAME);
blackboard.postArtifacts(artifacts, MODULE_NAME, context.getJobId());
} catch (Blackboard.BlackboardException ex) {
logger.log(Level.SEVERE, String.format("Error posting TSK_METADATA_EXIF and TSK_USER_CONTENT_SUSPECTED artifacts for %s (object ID = %d)", file.getName(), file.getId()), ex); //NON-NLS
}

View File

@ -1,7 +1,7 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2018-2019 Basis Technology Corp.
* Copyright 2018-2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
@ -352,7 +352,7 @@ public class PlasoIngestModule implements DataSourceIngestModule {
* keyword search, and fire an event to notify UI of
* this new artifact
*/
blackboard.postArtifact(bbart, MODULE_NAME);
blackboard.postArtifact(bbart, MODULE_NAME, context.getJobId());
} catch (BlackboardException ex) {
logger.log(Level.SEVERE, "Error Posting Artifact.", ex);//NON-NLS
}

View File

@ -1,7 +1,7 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2020 Basis Technology Corp.
* Copyright 2020-2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
@ -159,7 +159,7 @@ public class YaraIngestModule extends FileIngestModuleAdapter {
if(!artifacts.isEmpty()) {
Blackboard blackboard = Case.getCurrentCaseThrows().getSleuthkitCase().getBlackboard();
blackboard.postArtifacts(artifacts, YaraIngestModuleFactory.getModuleName());
blackboard.postArtifacts(artifacts, YaraIngestModuleFactory.getModuleName(), context.getJobId());
}
} catch (BlackboardException | NoCurrentCaseException | IngestModuleException | TskCoreException | YaraWrapperException ex) {

View File

@ -45,7 +45,6 @@ import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
import org.sleuthkit.autopsy.casemodule.services.TagsManager;
import org.sleuthkit.autopsy.coreutils.ImageUtils;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.datamodel.ContentUtils;
import org.sleuthkit.autopsy.report.ReportProgressPanel;
import static org.sleuthkit.autopsy.casemodule.services.TagsManager.getNotableTagLabel;
import org.sleuthkit.autopsy.coreutils.TimeZoneUtils;
@ -358,7 +357,7 @@ class TableReportGenerator {
// Give the modules the rows for the content tags.
for (ContentTag tag : tags) {
try {
if(shouldFilterFromReport(tag.getContent())) {
if (shouldFilterFromReport(tag.getContent())) {
continue;
}
} catch (TskCoreException ex) {
@ -366,7 +365,7 @@ class TableReportGenerator {
logger.log(Level.SEVERE, "Failed to access content data from the case database.", ex); //NON-NLS
return;
}
// skip tags that we are not reporting on
String notableString = tag.getName().getKnownStatus() == TskData.FileKnown.BAD ? TagsManager.getNotableTagLabel() : "";
if (passesTagNamesFilter(tag.getName().getDisplayName() + notableString) == false) {
@ -451,15 +450,15 @@ class TableReportGenerator {
// Give the modules the rows for the content tags.
for (BlackboardArtifactTag tag : tags) {
try {
if(shouldFilterFromReport(tag.getContent())) {
if (shouldFilterFromReport(tag.getContent())) {
continue;
}
} catch (TskCoreException ex) {
} catch (TskCoreException ex) {
errorList.add(NbBundle.getMessage(this.getClass(), "ReportGenerator.errList.failedGetBBArtifactTags"));
logger.log(Level.SEVERE, "Failed to access content data from the case database.", ex); //NON-NLS
return;
}
String notableString = tag.getName().getKnownStatus() == TskData.FileKnown.BAD ? TagsManager.getNotableTagLabel() : "";
if (passesTagNamesFilter(tag.getName().getDisplayName() + notableString) == false) {
continue;
@ -813,7 +812,7 @@ class TableReportGenerator {
AbstractFile f = openCase.getSleuthkitCase().getAbstractFileById(objId);
if (f != null) {
uniquePath = openCase.getSleuthkitCase().getAbstractFileById(objId).getUniquePath();
if(shouldFilterFromReport(f)) {
if (shouldFilterFromReport(f)) {
continue;
}
}
@ -973,7 +972,7 @@ class TableReportGenerator {
AbstractFile f = openCase.getSleuthkitCase().getAbstractFileById(objId);
if (f != null) {
uniquePath = openCase.getSleuthkitCase().getAbstractFileById(objId).getUniquePath();
if(shouldFilterFromReport(f)) {
if (shouldFilterFromReport(f)) {
continue;
}
}
@ -1217,11 +1216,11 @@ class TableReportGenerator {
private List<ArtifactData> getFilteredArtifacts(BlackboardArtifact.Type type, HashSet<String> tagNamesFilter) {
List<ArtifactData> artifacts = new ArrayList<>();
try {
for (BlackboardArtifact artifact : Case.getCurrentCaseThrows().getSleuthkitCase().getBlackboardArtifacts(type.getTypeID())) {
if(shouldFilterFromReport(artifact)) {
for (BlackboardArtifact artifact : Case.getCurrentCaseThrows().getSleuthkitCase().getBlackboard().getArtifacts(Collections.singletonList(type), settings.getSelectedDataSources())) {
if (shouldFilterFromReport(artifact)) {
continue;
}
List<BlackboardArtifactTag> tags = Case.getCurrentCaseThrows().getServices().getTagsManager().getBlackboardArtifactTagsByArtifact(artifact);
HashSet<String> uniqueTagNames = new HashSet<>();
for (BlackboardArtifactTag tag : tags) {
@ -1232,7 +1231,7 @@ class TableReportGenerator {
continue;
}
try {
artifacts.add(new ArtifactData(artifact, Case.getCurrentCaseThrows().getSleuthkitCase().getBlackboardAttributes(artifact), uniqueTagNames));
artifacts.add(new ArtifactData(artifact, Case.getCurrentCaseThrows().getSleuthkitCase().getBlackboard().getBlackboardAttributes(artifact), uniqueTagNames));
} catch (TskCoreException ex) {
errorList.add(NbBundle.getMessage(this.getClass(), "ReportGenerator.errList.failedGetBBAttribs"));
logger.log(Level.SEVERE, "Failed to get Blackboard Attributes when generating report.", ex); //NON-NLS
@ -1339,7 +1338,7 @@ class TableReportGenerator {
new BlackboardAttribute.Type(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PATH)));
columns.add(new AttributeColumn(NbBundle.getMessage(this.getClass(), "ReportGenerator.artTableColHdr.dateTime"),
new BlackboardAttribute.Type(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED )));
new BlackboardAttribute.Type(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED)));
attributeTypeSet.remove(new Type(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PATH_ID));
} else if (BlackboardArtifact.ARTIFACT_TYPE.TSK_INSTALLED_PROG.getTypeID() == artifactTypeId) {
@ -1817,19 +1816,19 @@ class TableReportGenerator {
return "";
}
/**
* Indicates if the content should be filtered from the report.
*/
private boolean shouldFilterFromReport(Content content) throws TskCoreException {
if(this.settings.getSelectedDataSources() == null) {
if (this.settings.getSelectedDataSources() == null) {
return false;
}
if (content.getDataSource() == null) {
return false;
}
long dataSourceId = content.getDataSource().getId();
return !this.settings.getSelectedDataSources().contains(dataSourceId);
}

View File

@ -1,7 +1,7 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2019-2020 Basis Technology Corp.
* Copyright 2019-2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
@ -354,7 +354,7 @@ public class PortableCaseReportModule implements ReportModule {
}
for (BlackboardAttribute.ATTRIBUTE_TYPE type : BlackboardAttribute.ATTRIBUTE_TYPE.values()) {
try {
oldAttrTypeIdToNewAttrType.put(type.getTypeID(), portableSkCase.getAttributeType(type.getLabel()));
oldAttrTypeIdToNewAttrType.put(type.getTypeID(), portableSkCase.getBlackboard().getAttributeType(type.getLabel()));
} catch (TskCoreException ex) {
handleError("Error looking up attribute name " + type.getLabel(),
Bundle.PortableCaseReportModule_generateReport_errorLookingUpAttrType(type.getLabel()),
@ -1084,7 +1084,7 @@ public class PortableCaseReportModule implements ReportModule {
return oldArtTypeIdToNewArtTypeId.get(oldArtifact.getArtifactTypeID());
}
BlackboardArtifact.Type oldCustomType = currentCase.getSleuthkitCase().getArtifactType(oldArtifact.getArtifactTypeName());
BlackboardArtifact.Type oldCustomType = currentCase.getSleuthkitCase().getBlackboard().getArtifactType(oldArtifact.getArtifactTypeName());
try {
BlackboardArtifact.Type newCustomType = portableSkCase.getBlackboard().getOrAddArtifactType(oldCustomType.getTypeName(), oldCustomType.getDisplayName());
oldArtTypeIdToNewArtTypeId.put(oldArtifact.getArtifactTypeID(), newCustomType.getTypeID());
@ -1424,7 +1424,7 @@ public class PortableCaseReportModule implements ReportModule {
// Add the attachment. The account type specified in the constructor will not be used.
CommunicationArtifactsHelper communicationArtifactsHelper = new CommunicationArtifactsHelper(currentCase.getSleuthkitCase(),
newSourceStr, newFile, Account.Type.EMAIL);
newSourceStr, newFile, Account.Type.EMAIL, null);
communicationArtifactsHelper.addAttachments(newArtifact, new MessageAttachments(newFileAttachments, msgAttachments.getUrlAttachments()));
}
catch (BlackboardJsonAttrUtil.InvalidJsonException ex) {

View File

@ -1,7 +1,7 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2017-2020 Basis Technology Corp.
* Copyright 2017-2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
@ -23,7 +23,6 @@ import java.util.List;
import javax.xml.bind.DatatypeConverter;
import org.joda.time.DateTime;
import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.Blackboard;
import org.sleuthkit.datamodel.Blackboard.BlackboardException;
import org.sleuthkit.datamodel.BlackboardArtifact;
@ -87,6 +86,7 @@ final class CustomArtifactType {
* blackboard.
*
* @param source The artifact source content.
* @param ingestJobId The ingest job ID.
*
* @return A BlackboardArtifact object.
*
@ -95,7 +95,7 @@ final class CustomArtifactType {
* @throws Blackboard.BlackboardException If there is an error posting the
* artifact to the blackboard.
*/
static BlackboardArtifact createAndPostInstance(Content source) throws TskCoreException, Blackboard.BlackboardException {
static BlackboardArtifact createAndPostInstance(Content source, long ingestJobId) throws TskCoreException, Blackboard.BlackboardException {
List<BlackboardAttribute> attributes = new ArrayList<>();
attributes.add(new BlackboardAttribute(intAttrType, MODULE_NAME, 0));
attributes.add(new BlackboardAttribute(doubleAttrType, MODULE_NAME, 0.0));
@ -131,7 +131,7 @@ final class CustomArtifactType {
}
Blackboard blackboard = Case.getCurrentCase().getServices().getArtifactsBlackboard();
blackboard.postArtifact(artifact, MODULE_NAME);
blackboard.postArtifact(artifact, MODULE_NAME, ingestJobId);
return artifact;
}

View File

@ -1,7 +1,7 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2017-2020 Basis Technology Corp.
* Copyright 2017-2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
@ -38,7 +38,8 @@ import org.sleuthkit.datamodel.TskCoreException;
public class CustomArtifactsCreatorDataSourceIngestModule extends DataSourceIngestModuleAdapter {
private static final Logger logger = Logger.getLogger(CustomArtifactsCreatorDataSourceIngestModule.class.getName());
private IngestJobContext context;
/**
* Adds the custom artifact type this module uses to the case database of
* the current case.
@ -51,6 +52,7 @@ public class CustomArtifactsCreatorDataSourceIngestModule extends DataSourceInge
*/
@Override
public void startUp(IngestJobContext context) throws IngestModuleException {
this.context = context;
try {
CustomArtifactType.addToCaseDatabase();
} catch (Blackboard.BlackboardException ex) {
@ -70,7 +72,7 @@ public class CustomArtifactsCreatorDataSourceIngestModule extends DataSourceInge
@Override
public ProcessResult process(Content dataSource, DataSourceIngestModuleProgress progressBar) {
try {
CustomArtifactType.createAndPostInstance(dataSource);
CustomArtifactType.createAndPostInstance(dataSource, context.getJobId());
} catch (TskCoreException | Blackboard.BlackboardException ex) {
logger.log(Level.SEVERE, String.format("Failed to process data source (obj_id = %d)", dataSource.getId()), ex);
return ProcessResult.ERROR;

View File

@ -1,7 +1,7 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2017-2020 Basis Technology Corp.
* Copyright 2017-2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
@ -37,6 +37,7 @@ import org.sleuthkit.datamodel.TskCoreException;
final class CustomArtifactsCreatorFileIngestModule extends FileIngestModuleAdapter {
private static final Logger logger = Logger.getLogger(CustomArtifactsCreatorFileIngestModule.class.getName());
private IngestJobContext context;
/**
* Adds the custom artifact type this module uses to the case database of
@ -50,6 +51,7 @@ final class CustomArtifactsCreatorFileIngestModule extends FileIngestModuleAdapt
*/
@Override
public void startUp(IngestJobContext context) throws IngestModuleException {
this.context = context;
try {
CustomArtifactType.addToCaseDatabase();
} catch (Blackboard.BlackboardException ex) {
@ -71,7 +73,7 @@ final class CustomArtifactsCreatorFileIngestModule extends FileIngestModuleAdapt
return ProcessResult.OK;
}
try {
CustomArtifactType.createAndPostInstance(file);
CustomArtifactType.createAndPostInstance(file, context.getJobId());
} catch (TskCoreException | Blackboard.BlackboardException ex) {
logger.log(Level.SEVERE, String.format("Failed to process file (obj_id = %d)", file.getId()), ex);
return ProcessResult.ERROR;

View File

@ -25,6 +25,7 @@ import java.util.Collections;
import java.util.EnumSet;
import java.util.List;
import java.util.Set;
import javax.annotation.concurrent.GuardedBy;
import org.sleuthkit.autopsy.events.AutopsyEvent;
import org.sleuthkit.autopsy.ingest.IngestJobSettings;
import org.sleuthkit.autopsy.ingest.IngestJobStartResult;
@ -53,7 +54,7 @@ public final class IngestJobRunner {
*/
public static List<IngestModuleError> runIngestJob(Collection<Content> dataSources, IngestJobSettings settings) throws InterruptedException {
Object ingestMonitor = new Object();
IngestJobCompletiontListener completiontListener = new IngestJobCompletiontListener(ingestMonitor);
IngestJobCompletionListener completiontListener = new IngestJobCompletionListener(ingestMonitor, dataSources.size());
IngestManager ingestManager = IngestManager.getInstance();
ingestManager.addIngestJobEventListener(INGEST_JOB_EVENTS_OF_INTEREST, completiontListener);
try {
@ -81,9 +82,12 @@ public final class IngestJobRunner {
* An ingest job event listener that allows IngestRunner.runIngestJob to
* block until the specified ingest job is completed.
*/
private static final class IngestJobCompletiontListener implements PropertyChangeListener {
private static final class IngestJobCompletionListener implements PropertyChangeListener {
private final Object ingestMonitor;
@GuardedBy("ingestMonitor")
private int remainingJobsCount;
/**
* Constructs an ingest job event listener that allows
@ -92,9 +96,11 @@ public final class IngestJobRunner {
*
* @param ingestMonitor A Java object to notify when the ingest job is
* omcpleted.
* @param jobsCount The number of jobs to listen for before notifying monitor.
*/
IngestJobCompletiontListener(Object ingestMonitor) {
IngestJobCompletionListener(Object ingestMonitor, int jobsCount) {
this.ingestMonitor = ingestMonitor;
this.remainingJobsCount = jobsCount;
}
/**
@ -109,7 +115,10 @@ public final class IngestJobRunner {
String eventType = event.getPropertyName();
if (eventType.equals(IngestManager.IngestJobEvent.COMPLETED.toString()) || eventType.equals(IngestManager.IngestJobEvent.CANCELLED.toString())) {
synchronized (ingestMonitor) {
ingestMonitor.notify();
this.remainingJobsCount--;
if (this.remainingJobsCount <= 0) {
ingestMonitor.notify();
}
}
}
}

View File

@ -157,7 +157,7 @@ public class AddManualEvent extends Action {
BlackboardArtifact artifact = eventInfo.datasource.newDataArtifact(new BlackboardArtifact.Type(TSK_TL_EVENT), attributes, null);
try {
sleuthkitCase.getBlackboard().postArtifact(artifact, source);
sleuthkitCase.getBlackboard().postArtifact(artifact, source, null);
} catch (Blackboard.BlackboardException ex) {
logger.log(Level.SEVERE, "Error posting artifact to the blackboard.", ex); //NON-NLS
new Alert(Alert.AlertType.ERROR, Bundle.AddManualEvent_postArtifactFailed(), ButtonType.OK).showAndWait();

View File

@ -20,6 +20,7 @@
package org.sleuthkit.autopsy.commonpropertiessearch;
import java.sql.SQLException;
import java.text.MessageFormat;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
@ -88,6 +89,26 @@ public class IngestedWithHashAndFileTypeIntraCaseTest extends NbTestCase {
this.utils.tearDown();
}
/**
* Assert that the given file appears a precise number times in the given
* data source.
*
* @param searchDomain search domain
* @param objectIdToDataSourceMap mapping of file ids to data source names
* @param fileName name of file to search for
* @param dataSource name of data source where file should
* appear
* @param instanceCount number of appearances of the given file
*
* @return true if a file with the given name exists the specified number of
* times in the given data source
*/
static void assertInstanceExistenceAndCount(List<AbstractFile> searchDomain, Map<Long, String> objectIdToDataSourceMap, String fileName, String dataSource, int instanceCount) {
int foundCount = IntraCaseTestUtils.getInstanceCount(searchDomain, objectIdToDataSourceMap, fileName, dataSource);
String errorMessage = MessageFormat.format("Expected to find {0} matches for {1} in {2} but found {3}.", instanceCount, fileName, dataSource, foundCount);
assertEquals(errorMessage, instanceCount, foundCount);
}
/**
* Find all matches & all file types. Confirm file.jpg is found on all three
* and file.docx is found on two.
@ -103,25 +124,25 @@ public class IngestedWithHashAndFileTypeIntraCaseTest extends NbTestCase {
List<AbstractFile> files = IntraCaseTestUtils.getFiles(objectIdToDataSource.keySet());
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, IMG, SET1, 2));
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, IMG, SET2, 1));
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, IMG, SET3, 1));
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, IMG, SET4, 0));
assertInstanceExistenceAndCount(files, objectIdToDataSource, IMG, SET1, 2);
assertInstanceExistenceAndCount(files, objectIdToDataSource, IMG, SET2, 1);
assertInstanceExistenceAndCount(files, objectIdToDataSource, IMG, SET3, 1);
assertInstanceExistenceAndCount(files, objectIdToDataSource, IMG, SET4, 0);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, DOC, SET1, 1));
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, DOC, SET2, 0));
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, DOC, SET3, 1));
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, DOC, SET4, 0));
assertInstanceExistenceAndCount(files, objectIdToDataSource, DOC, SET1, 1);
assertInstanceExistenceAndCount(files, objectIdToDataSource, DOC, SET2, 0);
assertInstanceExistenceAndCount(files, objectIdToDataSource, DOC, SET3, 1);
assertInstanceExistenceAndCount(files, objectIdToDataSource, DOC, SET4, 0);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, PDF, SET1, 0));
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, PDF, SET2, 0));
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, PDF, SET3, 0));
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, PDF, SET4, 0));
assertInstanceExistenceAndCount(files, objectIdToDataSource, PDF, SET1, 0);
assertInstanceExistenceAndCount(files, objectIdToDataSource, PDF, SET2, 0);
assertInstanceExistenceAndCount(files, objectIdToDataSource, PDF, SET3, 0);
assertInstanceExistenceAndCount(files, objectIdToDataSource, PDF, SET4, 0);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, EMPTY, SET1, 0));
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, EMPTY, SET2, 0));
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, EMPTY, SET3, 0));
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, EMPTY, SET4, 0));
assertInstanceExistenceAndCount(files, objectIdToDataSource, EMPTY, SET1, 0);
assertInstanceExistenceAndCount(files, objectIdToDataSource, EMPTY, SET2, 0);
assertInstanceExistenceAndCount(files, objectIdToDataSource, EMPTY, SET3, 0);
assertInstanceExistenceAndCount(files, objectIdToDataSource, EMPTY, SET4, 0);
} catch (NoCurrentCaseException | TskCoreException | SQLException | CentralRepoException ex) {
Exceptions.printStackTrace(ex);
@ -144,25 +165,25 @@ public class IngestedWithHashAndFileTypeIntraCaseTest extends NbTestCase {
List<AbstractFile> files = getFiles(objectIdToDataSource.keySet());
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, IMG, SET1, 2));
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, IMG, SET2, 1));
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, IMG, SET3, 1));
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, IMG, SET4, 0));
assertInstanceExistenceAndCount(files, objectIdToDataSource, IMG, SET1, 2);
assertInstanceExistenceAndCount(files, objectIdToDataSource, IMG, SET2, 1);
assertInstanceExistenceAndCount(files, objectIdToDataSource, IMG, SET3, 1);
assertInstanceExistenceAndCount(files, objectIdToDataSource, IMG, SET4, 0);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, DOC, SET1, 0));
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, DOC, SET2, 0));
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, DOC, SET3, 0));
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, DOC, SET4, 0));
assertInstanceExistenceAndCount(files, objectIdToDataSource, DOC, SET1, 0);
assertInstanceExistenceAndCount(files, objectIdToDataSource, DOC, SET2, 0);
assertInstanceExistenceAndCount(files, objectIdToDataSource, DOC, SET3, 0);
assertInstanceExistenceAndCount(files, objectIdToDataSource, DOC, SET4, 0);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, PDF, SET1, 0));
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, PDF, SET2, 0));
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, PDF, SET3, 0));
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, PDF, SET4, 0));
assertInstanceExistenceAndCount(files, objectIdToDataSource, PDF, SET1, 0);
assertInstanceExistenceAndCount(files, objectIdToDataSource, PDF, SET2, 0);
assertInstanceExistenceAndCount(files, objectIdToDataSource, PDF, SET3, 0);
assertInstanceExistenceAndCount(files, objectIdToDataSource, PDF, SET4, 0);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, EMPTY, SET1, 0));
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, EMPTY, SET2, 0));
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, EMPTY, SET3, 0));
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, EMPTY, SET4, 0));
assertInstanceExistenceAndCount(files, objectIdToDataSource, EMPTY, SET1, 0);
assertInstanceExistenceAndCount(files, objectIdToDataSource, EMPTY, SET2, 0);
assertInstanceExistenceAndCount(files, objectIdToDataSource, EMPTY, SET3, 0);
assertInstanceExistenceAndCount(files, objectIdToDataSource, EMPTY, SET4, 0);
} catch (NoCurrentCaseException | TskCoreException | SQLException | CentralRepoException ex) {
Exceptions.printStackTrace(ex);
@ -185,25 +206,25 @@ public class IngestedWithHashAndFileTypeIntraCaseTest extends NbTestCase {
List<AbstractFile> files = getFiles(objectIdToDataSource.keySet());
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, IMG, SET1, 0));
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, IMG, SET2, 0));
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, IMG, SET3, 0));
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, IMG, SET4, 0));
assertInstanceExistenceAndCount(files, objectIdToDataSource, IMG, SET1, 0);
assertInstanceExistenceAndCount(files, objectIdToDataSource, IMG, SET2, 0);
assertInstanceExistenceAndCount(files, objectIdToDataSource, IMG, SET3, 0);
assertInstanceExistenceAndCount(files, objectIdToDataSource, IMG, SET4, 0);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, DOC, SET1, 1));
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, DOC, SET2, 0));
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, DOC, SET3, 1));
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, DOC, SET4, 0));
assertInstanceExistenceAndCount(files, objectIdToDataSource, DOC, SET1, 1);
assertInstanceExistenceAndCount(files, objectIdToDataSource, DOC, SET2, 0);
assertInstanceExistenceAndCount(files, objectIdToDataSource, DOC, SET3, 1);
assertInstanceExistenceAndCount(files, objectIdToDataSource, DOC, SET4, 0);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, PDF, SET1, 0));
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, PDF, SET2, 0));
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, PDF, SET3, 0));
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, PDF, SET4, 0));
assertInstanceExistenceAndCount(files, objectIdToDataSource, PDF, SET1, 0);
assertInstanceExistenceAndCount(files, objectIdToDataSource, PDF, SET2, 0);
assertInstanceExistenceAndCount(files, objectIdToDataSource, PDF, SET3, 0);
assertInstanceExistenceAndCount(files, objectIdToDataSource, PDF, SET4, 0);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, EMPTY, SET1, 0));
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, EMPTY, SET2, 0));
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, EMPTY, SET3, 0));
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, EMPTY, SET4, 0));
assertInstanceExistenceAndCount(files, objectIdToDataSource, EMPTY, SET1, 0);
assertInstanceExistenceAndCount(files, objectIdToDataSource, EMPTY, SET2, 0);
assertInstanceExistenceAndCount(files, objectIdToDataSource, EMPTY, SET3, 0);
assertInstanceExistenceAndCount(files, objectIdToDataSource, EMPTY, SET4, 0);
} catch (NoCurrentCaseException | TskCoreException | SQLException | CentralRepoException ex) {
Exceptions.printStackTrace(ex);
@ -227,25 +248,25 @@ public class IngestedWithHashAndFileTypeIntraCaseTest extends NbTestCase {
List<AbstractFile> files = getFiles(objectIdToDataSource.keySet());
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, IMG, SET1, 2));
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, IMG, SET2, 1));
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, IMG, SET3, 1));
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, IMG, SET4, 0));
assertInstanceExistenceAndCount(files, objectIdToDataSource, IMG, SET1, 2);
assertInstanceExistenceAndCount(files, objectIdToDataSource, IMG, SET2, 1);
assertInstanceExistenceAndCount(files, objectIdToDataSource, IMG, SET3, 1);
assertInstanceExistenceAndCount(files, objectIdToDataSource, IMG, SET4, 0);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, DOC, SET1, 1));
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, DOC, SET2, 0));
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, DOC, SET3, 1));
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, DOC, SET4, 0));
assertInstanceExistenceAndCount(files, objectIdToDataSource, DOC, SET1, 1);
assertInstanceExistenceAndCount(files, objectIdToDataSource, DOC, SET2, 0);
assertInstanceExistenceAndCount(files, objectIdToDataSource, DOC, SET3, 1);
assertInstanceExistenceAndCount(files, objectIdToDataSource, DOC, SET4, 0);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, PDF, SET1, 0));
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, PDF, SET2, 0));
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, PDF, SET3, 0));
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, PDF, SET4, 0));
assertInstanceExistenceAndCount(files, objectIdToDataSource, PDF, SET1, 0);
assertInstanceExistenceAndCount(files, objectIdToDataSource, PDF, SET2, 0);
assertInstanceExistenceAndCount(files, objectIdToDataSource, PDF, SET3, 0);
assertInstanceExistenceAndCount(files, objectIdToDataSource, PDF, SET4, 0);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, EMPTY, SET1, 0));
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, EMPTY, SET2, 0));
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, EMPTY, SET3, 0));
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, EMPTY, SET4, 0));
assertInstanceExistenceAndCount(files, objectIdToDataSource, EMPTY, SET1, 0);
assertInstanceExistenceAndCount(files, objectIdToDataSource, EMPTY, SET2, 0);
assertInstanceExistenceAndCount(files, objectIdToDataSource, EMPTY, SET3, 0);
assertInstanceExistenceAndCount(files, objectIdToDataSource, EMPTY, SET4, 0);
} catch (NoCurrentCaseException | TskCoreException | SQLException | CentralRepoException ex) {
Exceptions.printStackTrace(ex);
@ -269,25 +290,25 @@ public class IngestedWithHashAndFileTypeIntraCaseTest extends NbTestCase {
List<AbstractFile> files = getFiles(objectIdToDataSource.keySet());
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, IMG, SET1, 2));
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, IMG, SET2, 1));
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, IMG, SET3, 1));
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, IMG, SET4, 0));
assertInstanceExistenceAndCount(files, objectIdToDataSource, IMG, SET1, 2);
assertInstanceExistenceAndCount(files, objectIdToDataSource, IMG, SET2, 1);
assertInstanceExistenceAndCount(files, objectIdToDataSource, IMG, SET3, 1);
assertInstanceExistenceAndCount(files, objectIdToDataSource, IMG, SET4, 0);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, DOC, SET1, 0));
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, DOC, SET2, 0));
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, DOC, SET3, 0));
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, DOC, SET4, 0));
assertInstanceExistenceAndCount(files, objectIdToDataSource, DOC, SET1, 0);
assertInstanceExistenceAndCount(files, objectIdToDataSource, DOC, SET2, 0);
assertInstanceExistenceAndCount(files, objectIdToDataSource, DOC, SET3, 0);
assertInstanceExistenceAndCount(files, objectIdToDataSource, DOC, SET4, 0);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, PDF, SET1, 0));
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, PDF, SET2, 0));
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, PDF, SET3, 0));
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, PDF, SET4, 0));
assertInstanceExistenceAndCount(files, objectIdToDataSource, PDF, SET1, 0);
assertInstanceExistenceAndCount(files, objectIdToDataSource, PDF, SET2, 0);
assertInstanceExistenceAndCount(files, objectIdToDataSource, PDF, SET3, 0);
assertInstanceExistenceAndCount(files, objectIdToDataSource, PDF, SET4, 0);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, EMPTY, SET1, 0));
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, EMPTY, SET2, 0));
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, EMPTY, SET3, 0));
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, EMPTY, SET4, 0));
assertInstanceExistenceAndCount(files, objectIdToDataSource, EMPTY, SET1, 0);
assertInstanceExistenceAndCount(files, objectIdToDataSource, EMPTY, SET2, 0);
assertInstanceExistenceAndCount(files, objectIdToDataSource, EMPTY, SET3, 0);
assertInstanceExistenceAndCount(files, objectIdToDataSource, EMPTY, SET4, 0);
} catch (NoCurrentCaseException | TskCoreException | SQLException | CentralRepoException ex) {
Exceptions.printStackTrace(ex);
@ -311,25 +332,25 @@ public class IngestedWithHashAndFileTypeIntraCaseTest extends NbTestCase {
List<AbstractFile> files = getFiles(objectIdToDataSource.keySet());
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, IMG, SET1, 0));
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, IMG, SET2, 0));
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, IMG, SET3, 0));
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, IMG, SET4, 0));
assertInstanceExistenceAndCount(files, objectIdToDataSource, IMG, SET1, 0);
assertInstanceExistenceAndCount(files, objectIdToDataSource, IMG, SET2, 0);
assertInstanceExistenceAndCount(files, objectIdToDataSource, IMG, SET3, 0);
assertInstanceExistenceAndCount(files, objectIdToDataSource, IMG, SET4, 0);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, DOC, SET1, 1));
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, DOC, SET2, 0));
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, DOC, SET3, 1));
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, DOC, SET4, 0));
assertInstanceExistenceAndCount(files, objectIdToDataSource, DOC, SET1, 1);
assertInstanceExistenceAndCount(files, objectIdToDataSource, DOC, SET2, 0);
assertInstanceExistenceAndCount(files, objectIdToDataSource, DOC, SET3, 1);
assertInstanceExistenceAndCount(files, objectIdToDataSource, DOC, SET4, 0);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, PDF, SET1, 0));
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, PDF, SET2, 0));
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, PDF, SET3, 0));
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, PDF, SET4, 0));
assertInstanceExistenceAndCount(files, objectIdToDataSource, PDF, SET1, 0);
assertInstanceExistenceAndCount(files, objectIdToDataSource, PDF, SET2, 0);
assertInstanceExistenceAndCount(files, objectIdToDataSource, PDF, SET3, 0);
assertInstanceExistenceAndCount(files, objectIdToDataSource, PDF, SET4, 0);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, EMPTY, SET1, 0));
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, EMPTY, SET2, 0));
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, EMPTY, SET3, 0));
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, EMPTY, SET4, 0));
assertInstanceExistenceAndCount(files, objectIdToDataSource, EMPTY, SET1, 0);
assertInstanceExistenceAndCount(files, objectIdToDataSource, EMPTY, SET2, 0);
assertInstanceExistenceAndCount(files, objectIdToDataSource, EMPTY, SET3, 0);
assertInstanceExistenceAndCount(files, objectIdToDataSource, EMPTY, SET4, 0);
} catch (NoCurrentCaseException | TskCoreException | SQLException | CentralRepoException ex) {
Exceptions.printStackTrace(ex);
@ -353,25 +374,25 @@ public class IngestedWithHashAndFileTypeIntraCaseTest extends NbTestCase {
List<AbstractFile> files = getFiles(objectIdToDataSource.keySet());
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, IMG, SET1, 2));
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, IMG, SET2, 1));
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, IMG, SET3, 1));
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, IMG, SET4, 0));
assertInstanceExistenceAndCount(files, objectIdToDataSource, IMG, SET1, 2);
assertInstanceExistenceAndCount(files, objectIdToDataSource, IMG, SET2, 1);
assertInstanceExistenceAndCount(files, objectIdToDataSource, IMG, SET3, 1);
assertInstanceExistenceAndCount(files, objectIdToDataSource, IMG, SET4, 0);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, DOC, SET1, 0));
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, DOC, SET2, 0));
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, DOC, SET3, 0));
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, DOC, SET4, 0));
assertInstanceExistenceAndCount(files, objectIdToDataSource, DOC, SET1, 0);
assertInstanceExistenceAndCount(files, objectIdToDataSource, DOC, SET2, 0);
assertInstanceExistenceAndCount(files, objectIdToDataSource, DOC, SET3, 0);
assertInstanceExistenceAndCount(files, objectIdToDataSource, DOC, SET4, 0);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, PDF, SET1, 0));
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, PDF, SET2, 0));
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, PDF, SET3, 0));
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, PDF, SET4, 0));
assertInstanceExistenceAndCount(files, objectIdToDataSource, PDF, SET1, 0);
assertInstanceExistenceAndCount(files, objectIdToDataSource, PDF, SET2, 0);
assertInstanceExistenceAndCount(files, objectIdToDataSource, PDF, SET3, 0);
assertInstanceExistenceAndCount(files, objectIdToDataSource, PDF, SET4, 0);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, EMPTY, SET1, 0));
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, EMPTY, SET2, 0));
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, EMPTY, SET3, 0));
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, EMPTY, SET4, 0));
assertInstanceExistenceAndCount(files, objectIdToDataSource, EMPTY, SET1, 0);
assertInstanceExistenceAndCount(files, objectIdToDataSource, EMPTY, SET2, 0);
assertInstanceExistenceAndCount(files, objectIdToDataSource, EMPTY, SET3, 0);
assertInstanceExistenceAndCount(files, objectIdToDataSource, EMPTY, SET4, 0);
} catch (NoCurrentCaseException | TskCoreException | SQLException | CentralRepoException ex) {
Exceptions.printStackTrace(ex);
@ -394,25 +415,25 @@ public class IngestedWithHashAndFileTypeIntraCaseTest extends NbTestCase {
List<AbstractFile> files = getFiles(objectIdToDataSource.keySet());
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, IMG, SET1, 0));
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, IMG, SET2, 0));
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, IMG, SET3, 0));
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, IMG, SET4, 0));
assertInstanceExistenceAndCount(files, objectIdToDataSource, IMG, SET1, 0);
assertInstanceExistenceAndCount(files, objectIdToDataSource, IMG, SET2, 0);
assertInstanceExistenceAndCount(files, objectIdToDataSource, IMG, SET3, 0);
assertInstanceExistenceAndCount(files, objectIdToDataSource, IMG, SET4, 0);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, DOC, SET1, 0));
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, DOC, SET1, 0));
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, DOC, SET3, 0));
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, DOC, SET4, 0));
assertInstanceExistenceAndCount(files, objectIdToDataSource, DOC, SET1, 0);
assertInstanceExistenceAndCount(files, objectIdToDataSource, DOC, SET1, 0);
assertInstanceExistenceAndCount(files, objectIdToDataSource, DOC, SET3, 0);
assertInstanceExistenceAndCount(files, objectIdToDataSource, DOC, SET4, 0);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, PDF, SET1, 0));
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, PDF, SET2, 0));
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, PDF, SET3, 0));
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, PDF, SET4, 0));
assertInstanceExistenceAndCount(files, objectIdToDataSource, PDF, SET1, 0);
assertInstanceExistenceAndCount(files, objectIdToDataSource, PDF, SET2, 0);
assertInstanceExistenceAndCount(files, objectIdToDataSource, PDF, SET3, 0);
assertInstanceExistenceAndCount(files, objectIdToDataSource, PDF, SET4, 0);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, EMPTY, SET1, 0));
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, EMPTY, SET2, 0));
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, EMPTY, SET3, 0));
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, EMPTY, SET4, 0));
assertInstanceExistenceAndCount(files, objectIdToDataSource, EMPTY, SET1, 0);
assertInstanceExistenceAndCount(files, objectIdToDataSource, EMPTY, SET2, 0);
assertInstanceExistenceAndCount(files, objectIdToDataSource, EMPTY, SET3, 0);
assertInstanceExistenceAndCount(files, objectIdToDataSource, EMPTY, SET4, 0);
} catch (NoCurrentCaseException | TskCoreException | SQLException | CentralRepoException ex) {
Exceptions.printStackTrace(ex);
@ -435,25 +456,25 @@ public class IngestedWithHashAndFileTypeIntraCaseTest extends NbTestCase {
List<AbstractFile> files = getFiles(objectIdToDataSource.keySet());
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, IMG, SET1, 2));
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, IMG, SET2, 1));
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, IMG, SET3, 1));
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, IMG, SET4, 0));
assertInstanceExistenceAndCount(files, objectIdToDataSource, IMG, SET1, 2);
assertInstanceExistenceAndCount(files, objectIdToDataSource, IMG, SET2, 1);
assertInstanceExistenceAndCount(files, objectIdToDataSource, IMG, SET3, 1);
assertInstanceExistenceAndCount(files, objectIdToDataSource, IMG, SET4, 0);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, DOC, SET1, 1));
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, DOC, SET2, 0));
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, DOC, SET3, 1));
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, DOC, SET4, 0));
assertInstanceExistenceAndCount(files, objectIdToDataSource, DOC, SET1, 1);
assertInstanceExistenceAndCount(files, objectIdToDataSource, DOC, SET2, 0);
assertInstanceExistenceAndCount(files, objectIdToDataSource, DOC, SET3, 1);
assertInstanceExistenceAndCount(files, objectIdToDataSource, DOC, SET4, 0);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, PDF, SET1, 0));
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, PDF, SET2, 0));
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, PDF, SET3, 0));
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, PDF, SET4, 0));
assertInstanceExistenceAndCount(files, objectIdToDataSource, PDF, SET1, 0);
assertInstanceExistenceAndCount(files, objectIdToDataSource, PDF, SET2, 0);
assertInstanceExistenceAndCount(files, objectIdToDataSource, PDF, SET3, 0);
assertInstanceExistenceAndCount(files, objectIdToDataSource, PDF, SET4, 0);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, EMPTY, SET1, 0));
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, EMPTY, SET2, 0));
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, EMPTY, SET3, 0));
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, EMPTY, SET4, 0));
assertInstanceExistenceAndCount(files, objectIdToDataSource, EMPTY, SET1, 0);
assertInstanceExistenceAndCount(files, objectIdToDataSource, EMPTY, SET2, 0);
assertInstanceExistenceAndCount(files, objectIdToDataSource, EMPTY, SET3, 0);
assertInstanceExistenceAndCount(files, objectIdToDataSource, EMPTY, SET4, 0);
} catch (NoCurrentCaseException | TskCoreException | SQLException | CentralRepoException ex) {
Exceptions.printStackTrace(ex);

View File

@ -179,6 +179,37 @@ class IntraCaseTestUtils {
return tally == instanceCount;
}
/**
* Verify that the given file appears a precise number times in the given
* data source.
*
* @param searchDomain search domain
* @param objectIdToDataSourceMap mapping of file ids to data source names
* @param fileName name of file to search for
* @param dataSource name of data source where file should appear
* @param instanceCount number of appearances of the given file
* @return The count of items found.
*/
static int getInstanceCount(List<AbstractFile> searchDomain, Map<Long, String> objectIdToDataSourceMap, String fileName, String dataSource) {
int tally = 0;
for (AbstractFile file : searchDomain) {
Long objectId = file.getId();
String name = file.getName();
String dataSourceName = objectIdToDataSourceMap.get(objectId);
if (name.equalsIgnoreCase(fileName) && dataSourceName.equalsIgnoreCase(dataSource)) {
tally++;
}
}
return tally;
}
/**
* Convenience method which verifies that a file exists within a given data
* source exactly once.

View File

@ -46,8 +46,8 @@ import org.sleuthkit.datamodel.Content;
import org.sleuthkit.datamodel.DataArtifact;
import org.sleuthkit.datamodel.DataSource;
import org.sleuthkit.datamodel.FileSystem;
import org.sleuthkit.datamodel.FsContent;
import org.sleuthkit.datamodel.Host;
import org.sleuthkit.datamodel.Person;
import org.sleuthkit.datamodel.Pool;
import org.sleuthkit.datamodel.Image;
import org.sleuthkit.datamodel.Score;
@ -114,6 +114,12 @@ public class TableSearchTest extends NbTestCase {
private static final String SOURCE_NAME_COLUMN = "Source Name";
private static final String SOURCE_FILE_PATH_COLUMN = "Source File Path";
// File system test
private static final String PERSON_NAME = "Person1";
private static final String PERSON_HOST_NAME1 = "Host for Person A";
private static final String PERSON_HOST_NAME2 = "Host for Person B";
/////////////////////////////////////////////////
// Data to be used across the test methods.
// These are initialized in setUpCaseDatabase().
@ -160,6 +166,8 @@ public class TableSearchTest extends NbTestCase {
Image fsTestImageB = null; // Another image
Volume fsTestVolumeB1 = null; // Another volume
Pool fsTestPoolB = null; // A pool
Person person1 = null; // A person
Host personHost1 = null; // A host belonging to the above person
// Tags test
TagName knownTag1 = null;
@ -435,6 +443,12 @@ public class TableSearchTest extends NbTestCase {
trans.commit();
trans = null;
// Create a person associated with two hosts
person1 = db.getPersonManager().newPerson(PERSON_NAME);
personHost1 = db.getHostManager().newHost(PERSON_HOST_NAME1);
Host personHost2 = db.getHostManager().newHost(PERSON_HOST_NAME2);
db.getPersonManager().addHostsToPerson(person1, Arrays.asList(personHost1, personHost2));
// Add tags ----
knownTag1 = tagsManager.addTagName("Tag 1", TAG_DESCRIPTION, TagName.HTML_COLOR.RED, TskData.FileKnown.KNOWN);
@ -987,9 +1001,25 @@ public class TableSearchTest extends NbTestCase {
try {
FileSystemDAO fileSystemDAO = MainDAO.getInstance().getFileSystemDAO();
// There are 4 hosts not associated with a person
FileSystemPersonSearchParam personParam = new FileSystemPersonSearchParam(null);
BaseSearchResultsDTO results = fileSystemDAO.getHostsForTable(personParam, 0, null, false);
assertEquals(4, results.getTotalResultsCount());
assertEquals(4, results.getItems().size());
// Person1 is associated with two hosts
personParam = new FileSystemPersonSearchParam(person1.getPersonId());
results = fileSystemDAO.getHostsForTable(personParam, 0, null, false);
assertEquals(2, results.getTotalResultsCount());
assertEquals(2, results.getItems().size());
// Check that the name of the first host is present
RowDTO row = results.getItems().get(0);
assertTrue(row.getCellValues().contains(PERSON_HOST_NAME1));
// HostA is associated with two images
FileSystemHostSearchParam hostParam = new FileSystemHostSearchParam(fsTestHostA.getHostId());
BaseSearchResultsDTO results = fileSystemDAO.getContentForTable(hostParam, 0, null, false);
results = fileSystemDAO.getContentForTable(hostParam, 0, null, false);
assertEquals(2, results.getTotalResultsCount());
assertEquals(2, results.getItems().size());

View File

@ -143,7 +143,7 @@ public final class AutoIngestControlPanel extends JPanel implements Observer {
private static final int COMPLETED_TIME_COL_PREFERRED_WIDTH = 280;
private static final String UPDATE_TASKS_THREAD_NAME = "AID-update-tasks-%d";
private static final String LOCAL_HOST_NAME = NetworkUtils.getLocalHostName();
private static final String RUNNING_AS_SERVICE_PROPERTY = "autoingest.runningasservice";
private static final String RUNNING_AS_SERVICE_PROPERTY = "autoingest.runningasservice";
private static final Logger sysLogger = AutoIngestSystemLogger.getLogger();
private static AutoIngestControlPanel instance;
private final DefaultTableModel pendingTableModel;
@ -160,7 +160,7 @@ public final class AutoIngestControlPanel extends JPanel implements Observer {
* Maintain a mapping of each service to it's last status update.
*/
private final ConcurrentHashMap<String, String> statusByService;
/*
* The enum is used in conjunction with the DefaultTableModel class to
* provide table models for the JTables used to display a view of the
@ -177,7 +177,8 @@ public final class AutoIngestControlPanel extends JPanel implements Observer {
"AutoIngestControlPanel.JobsTableModel.ColumnHeader.StartedTime=Stage Started",
"AutoIngestControlPanel.JobsTableModel.ColumnHeader.CompletedTime=Job Completed",
"AutoIngestControlPanel.JobsTableModel.ColumnHeader.Stage=Stage",
"AutoIngestControlPanel.JobsTableModel.ColumnHeader.StageTime=Time in Stage",
"# {0} - unitSeparator",
"AutoIngestControlPanel.JobsTableModel.ColumnHeader.StageTime=Time in Stage (dd{0}hh{0}mm{0}ss)",
"AutoIngestControlPanel.JobsTableModel.ColumnHeader.Status=Status",
"AutoIngestControlPanel.JobsTableModel.ColumnHeader.CaseFolder=Case Folder",
"AutoIngestControlPanel.JobsTableModel.ColumnHeader.LocalJob= Local Job?",
@ -193,7 +194,7 @@ public final class AutoIngestControlPanel extends JPanel implements Observer {
STARTED_TIME(NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.JobsTableModel.ColumnHeader.StartedTime")),
COMPLETED_TIME(NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.JobsTableModel.ColumnHeader.CompletedTime")),
STAGE(NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.JobsTableModel.ColumnHeader.Stage")),
STAGE_TIME(NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.JobsTableModel.ColumnHeader.StageTime")),
STAGE_TIME(Bundle.AutoIngestControlPanel_JobsTableModel_ColumnHeader_StageTime(DurationCellRenderer.getUnitSeperator())),
STATUS(NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.JobsTableModel.ColumnHeader.Status")),
CASE_DIRECTORY_PATH(NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.JobsTableModel.ColumnHeader.CaseFolder")),
IS_LOCAL_JOB(NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.JobsTableModel.ColumnHeader.LocalJob")),
@ -250,7 +251,7 @@ public final class AutoIngestControlPanel extends JPanel implements Observer {
* controlling automated ingest for a single node within the cluster.
*/
private AutoIngestControlPanel() {
this.statusByService = new ConcurrentHashMap<>();
//Disable the main window so they can only use the dashboard (if we used setVisible the taskBar icon would go away)
@ -290,10 +291,10 @@ public final class AutoIngestControlPanel extends JPanel implements Observer {
* Update status of the services on the dashboard
*/
private void displayServicesStatus() {
tbServicesStatusMessage.setText(NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.tbServicesStatusMessage.Message",
statusByService.get(ServicesMonitor.Service.REMOTE_CASE_DATABASE.toString()),
statusByService.get(ServicesMonitor.Service.REMOTE_KEYWORD_SEARCH.toString()),
statusByService.get(ServicesMonitor.Service.REMOTE_KEYWORD_SEARCH.toString()),
tbServicesStatusMessage.setText(NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.tbServicesStatusMessage.Message",
statusByService.get(ServicesMonitor.Service.REMOTE_CASE_DATABASE.toString()),
statusByService.get(ServicesMonitor.Service.REMOTE_KEYWORD_SEARCH.toString()),
statusByService.get(ServicesMonitor.Service.REMOTE_KEYWORD_SEARCH.toString()),
statusByService.get(ServicesMonitor.Service.MESSAGING.toString())));
String upStatus = NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.tbServicesStatusMessage.Message.Up");
if (statusByService.get(ServicesMonitor.Service.REMOTE_CASE_DATABASE.toString()).compareTo(upStatus) != 0
@ -304,7 +305,7 @@ public final class AutoIngestControlPanel extends JPanel implements Observer {
tbServicesStatusMessage.setForeground(Color.BLACK);
}
}
/**
* Queries the services monitor and sets the text for the services status
* text box.
@ -411,7 +412,7 @@ public final class AutoIngestControlPanel extends JPanel implements Observer {
column.setMaxWidth(PRIORITY_COLUMN_MAX_WIDTH);
column.setPreferredWidth(PRIORITY_COLUMN_PREFERRED_WIDTH);
column.setWidth(PRIORITY_COLUMN_PREFERRED_WIDTH);
column = pendingTable.getColumn(JobsTableModelColumns.OCR.getColumnHeader());
column.setCellRenderer(new OcrIconCellRenderer());
column.setMaxWidth(OCR_COLUMN_MAX_WIDTH);
@ -469,7 +470,7 @@ public final class AutoIngestControlPanel extends JPanel implements Observer {
runningTable.removeColumn(runningTable.getColumn(JobsTableModelColumns.MANIFEST_FILE_PATH.getColumnHeader()));
runningTable.removeColumn(runningTable.getColumn(JobsTableModelColumns.PRIORITY.getColumnHeader()));
runningTable.removeColumn(runningTable.getColumn(JobsTableModelColumns.OCR.getColumnHeader()));
/*
* Set up a column to display the cases associated with the jobs.
*/
@ -566,7 +567,7 @@ public final class AutoIngestControlPanel extends JPanel implements Observer {
completedTable.removeColumn(completedTable.getColumn(JobsTableModelColumns.CASE_DIRECTORY_PATH.getColumnHeader()));
completedTable.removeColumn(completedTable.getColumn(JobsTableModelColumns.MANIFEST_FILE_PATH.getColumnHeader()));
completedTable.removeColumn(completedTable.getColumn(JobsTableModelColumns.PRIORITY.getColumnHeader()));
/*
* Set up a column to display the cases associated with the jobs.
*/
@ -617,7 +618,7 @@ public final class AutoIngestControlPanel extends JPanel implements Observer {
column.setMaxWidth(STATUS_COL_MAX_WIDTH);
column.setPreferredWidth(STATUS_COL_PREFERRED_WIDTH);
column.setWidth(STATUS_COL_PREFERRED_WIDTH);
/*
* Set up a column to display OCR enabled/disabled flag.
*/
@ -732,30 +733,30 @@ public final class AutoIngestControlPanel extends JPanel implements Observer {
}
PropertyChangeListener propChangeListener = (PropertyChangeEvent evt) -> {
String serviceDisplayName = ServicesMonitor.Service.valueOf(evt.getPropertyName()).toString();
String status = evt.getNewValue().toString();
if (status.equals(ServicesMonitor.ServiceStatus.UP.toString())) {
status = NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.tbServicesStatusMessage.Message.Up");
} else if (status.equals(ServicesMonitor.ServiceStatus.DOWN.toString())) {
status = NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.tbServicesStatusMessage.Message.Down");
sysLogger.log(Level.SEVERE, "Connection to {0} is down", serviceDisplayName); //NON-NLS
}
// if the status update is for an existing service who's status hasn't changed - do nothing.
if (statusByService.containsKey(serviceDisplayName) && status.equals(statusByService.get(serviceDisplayName))) {
return;
}
statusByService.put(serviceDisplayName, status);
displayServicesStatus();
};
// Subscribe to all multi-user services in order to display their status
Set<String> servicesList = new HashSet<>();
servicesList.add(ServicesMonitor.Service.REMOTE_CASE_DATABASE.toString());
servicesList.add(ServicesMonitor.Service.REMOTE_KEYWORD_SEARCH.toString());
servicesList.add(ServicesMonitor.Service.REMOTE_KEYWORD_SEARCH.toString());
servicesList.add(ServicesMonitor.Service.MESSAGING.toString());
ServicesMonitor.getInstance().addSubscriber(servicesList, propChangeListener);
@ -879,7 +880,7 @@ public final class AutoIngestControlPanel extends JPanel implements Observer {
case JOB_COMPLETED:
case CASE_DELETED:
case REPROCESS_JOB:
case OCR_STATE_CHANGE:
case OCR_STATE_CHANGE:
updateExecutor.submit(new UpdateAllJobsTablesTask());
break;
case PAUSED_BY_USER_REQUEST:

View File

@ -53,7 +53,8 @@ final class AutoIngestJobsNode extends AbstractNode {
"AutoIngestJobsNode.dataSource.text=Data Source",
"AutoIngestJobsNode.hostName.text=Host Name",
"AutoIngestJobsNode.stage.text=Stage",
"AutoIngestJobsNode.stageTime.text=Time in Stage",
"# {0} - unitSeparator",
"AutoIngestJobsNode.stageTime.text=Time in Stage (dd{0}hh{0}mm{0}ss)",
"AutoIngestJobsNode.jobCreated.text=Job Created",
"AutoIngestJobsNode.jobCompleted.text=Job Completed",
"AutoIngestJobsNode.priority.text=Prioritized",
@ -345,8 +346,10 @@ final class AutoIngestJobsNode extends AbstractNode {
jobWrapper.getProcessingHostName()));
ss.put(new NodeProperty<>(Bundle.AutoIngestJobsNode_stage_text(), Bundle.AutoIngestJobsNode_stage_text(), Bundle.AutoIngestJobsNode_stage_text(),
status.getDescription()));
ss.put(new NodeProperty<>(Bundle.AutoIngestJobsNode_stageTime_text(), Bundle.AutoIngestJobsNode_stageTime_text(), Bundle.AutoIngestJobsNode_stageTime_text(),
DurationCellRenderer.longToDurationString((Date.from(Instant.now()).getTime()) - (status.getStartDate().getTime()))));
ss.put(new NodeProperty<>(Bundle.AutoIngestJobsNode_stageTime_text(DurationCellRenderer.getUnitSeperator()),
Bundle.AutoIngestJobsNode_stageTime_text(DurationCellRenderer.getUnitSeperator()),
Bundle.AutoIngestJobsNode_stageTime_text(DurationCellRenderer.getUnitSeperator()),
DurationCellRenderer.longToDurationString(Date.from(Instant.now()).getTime() - status.getStartDate().getTime())));
break;
case COMPLETED_JOB:
ss.put(new NodeProperty<>(Bundle.AutoIngestJobsNode_jobCreated_text(), Bundle.AutoIngestJobsNode_jobCreated_text(), Bundle.AutoIngestJobsNode_jobCreated_text(),
@ -356,7 +359,7 @@ final class AutoIngestJobsNode extends AbstractNode {
ss.put(new NodeProperty<>(Bundle.AutoIngestJobsNode_status_text(), Bundle.AutoIngestJobsNode_status_text(), Bundle.AutoIngestJobsNode_status_text(),
jobWrapper.getErrorsOccurred() ? StatusIconCellRenderer.Status.WARNING : StatusIconCellRenderer.Status.OK));
ss.put(new NodeProperty<>(Bundle.AutoIngestJobsNode_ocr_text(), Bundle.AutoIngestJobsNode_ocr_text(), Bundle.AutoIngestJobsNode_ocr_text(),
jobWrapper.getOcrEnabled()));
jobWrapper.getOcrEnabled()));
break;
default:
}
@ -377,7 +380,7 @@ final class AutoIngestJobsNode extends AbstractNode {
PrioritizationAction.DeprioritizeCaseAction deprioritizeCaseAction = new PrioritizationAction.DeprioritizeCaseAction(jobWrapper.getJob());
deprioritizeCaseAction.setEnabled(jobWrapper.getPriority() > 0);
actions.add(deprioritizeCaseAction);
actions.add(new AutoIngestAdminActions.EnableOCR(jobWrapper.getJob()));
AutoIngestAdminActions.DisableOCR disableOCRAction = new AutoIngestAdminActions.DisableOCR(jobWrapper.getJob());
disableOCRAction.setEnabled(jobWrapper.getOcrEnabled() == true);

View File

@ -31,6 +31,7 @@ import org.sleuthkit.autopsy.datamodel.EmptyNode;
import org.sleuthkit.autopsy.experimental.autoingest.AutoIngestJobsNode.AutoIngestJobStatus;
import org.sleuthkit.autopsy.experimental.autoingest.AutoIngestJobsNode.JobNode;
import org.sleuthkit.autopsy.experimental.autoingest.AutoIngestNodeRefreshEvents.AutoIngestRefreshEvent;
import org.sleuthkit.autopsy.guiutils.DurationCellRenderer;
import org.sleuthkit.autopsy.guiutils.StatusIconCellRenderer;
/**
@ -64,6 +65,8 @@ final class AutoIngestJobsPanel extends javax.swing.JPanel implements ExplorerMa
customize();
}
/**
* Set up the AutoIngestJobsPanel's so that its outlineView is displaying
* the correct columns for the specified AutoIngestJobStatus
@ -99,7 +102,8 @@ final class AutoIngestJobsPanel extends javax.swing.JPanel implements ExplorerMa
outlineView.setPropertyColumns(Bundle.AutoIngestJobsNode_dataSource_text(), Bundle.AutoIngestJobsNode_dataSource_text(),
Bundle.AutoIngestJobsNode_hostName_text(), Bundle.AutoIngestJobsNode_hostName_text(),
Bundle.AutoIngestJobsNode_stage_text(), Bundle.AutoIngestJobsNode_stage_text(),
Bundle.AutoIngestJobsNode_stageTime_text(), Bundle.AutoIngestJobsNode_stageTime_text());
Bundle.AutoIngestJobsNode_stageTime_text(DurationCellRenderer.getUnitSeperator()),
Bundle.AutoIngestJobsNode_stageTime_text(DurationCellRenderer.getUnitSeperator()));
indexOfColumn = getColumnIndexByName(Bundle.AutoIngestJobsNode_caseName_text());
if (indexOfColumn != INVALID_INDEX) {
outline.setColumnSorted(indexOfColumn, true, 1);
@ -124,7 +128,7 @@ final class AutoIngestJobsPanel extends javax.swing.JPanel implements ExplorerMa
if (indexOfColumn != INVALID_INDEX) {
outline.getColumnModel().getColumn(indexOfColumn).setPreferredWidth(INITIAL_OCR_WIDTH);
outline.getColumnModel().getColumn(indexOfColumn).setCellRenderer(new OcrIconCellRenderer());
}
}
break;
default:
}
@ -177,8 +181,8 @@ final class AutoIngestJobsPanel extends javax.swing.JPanel implements ExplorerMa
* Update the contents of this AutoIngestJobsPanel while retaining currently
* selected node.
*
* @param refreshEvent - the AutoIngestRefreshEvent which will provide the new
* contents
* @param refreshEvent - the AutoIngestRefreshEvent which will provide the
* new contents
*/
void refresh(AutoIngestRefreshEvent refreshEvent) {
synchronized (this) {
@ -191,7 +195,6 @@ final class AutoIngestJobsPanel extends javax.swing.JPanel implements ExplorerMa
}
outline.setRowSelectionAllowed(true);
outline.setFocusable(true);
}
}

View File

@ -78,7 +78,8 @@ AutoIngestControlPanel.JobsTableModel.ColumnHeader.ManifestFilePath=\ Manifest F
AutoIngestControlPanel.JobsTableModel.ColumnHeader.OCR=OCR
AutoIngestControlPanel.JobsTableModel.ColumnHeader.Priority=Prioritized
AutoIngestControlPanel.JobsTableModel.ColumnHeader.Stage=Stage
AutoIngestControlPanel.JobsTableModel.ColumnHeader.StageTime=Time in Stage
# {0} - unitSeparator
AutoIngestControlPanel.JobsTableModel.ColumnHeader.StageTime=Time in Stage (dd{0}hh{0}mm{0}ss)
AutoIngestControlPanel.JobsTableModel.ColumnHeader.StartedTime=Stage Started
AutoIngestControlPanel.JobsTableModel.ColumnHeader.Status=Status
AutoIngestControlPanel.OK=OK
@ -140,7 +141,8 @@ AutoIngestJobsNode.prioritized.false=No
AutoIngestJobsNode.prioritized.true=Yes
AutoIngestJobsNode.priority.text=Prioritized
AutoIngestJobsNode.stage.text=Stage
AutoIngestJobsNode.stageTime.text=Time in Stage
# {0} - unitSeparator
AutoIngestJobsNode.stageTime.text=Time in Stage (dd{0}hh{0}mm{0}ss)
AutoIngestJobsNode.status.text=Status
AutoIngestJobsPanel.waitNode.text=Please Wait...
AutoIngestMetricsDialog.initReportText=Select a date above and click the 'Generate Metrics Report' button to generate\na metrics report.

View File

@ -1084,13 +1084,13 @@ final class FileExportRuleSet implements Serializable, Comparable<FileExportRule
SleuthkitCase caseDb = currentCase.getSleuthkitCase();
BlackboardArtifact.Type artifactType;
try {
artifactType = caseDb.getArtifactType(artifactTypeName);
artifactType = caseDb.getBlackboard().getArtifactType(artifactTypeName);
} catch (TskCoreException ex) {
throw new ExportRulesException(String.format("The specified %s artifact type does not exist in case database for %s", artifactTypeName, currentCase.getCaseDirectory()), ex);
}
BlackboardAttribute.Type attributeType;
try {
attributeType = caseDb.getAttributeType(attributeTypeName);
attributeType = caseDb.getBlackboard().getAttributeType(attributeTypeName);
} catch (TskCoreException ex) {
throw new ExportRulesException(String.format("The specified %s attribute type does not exist in case database for %s", attributeTypeName, currentCase.getCaseDirectory()), ex);
}

View File

@ -1,7 +1,7 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2018 Basis Technology Corp.
* Copyright 2018-2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
@ -177,7 +177,7 @@ public class ObjectDetectectionFileIngestModule extends FileIngestModuleAdapter
/*
* Index the artifact for keyword search.
*/
blackboard.postArtifact(artifact, MODULE_NAME);
blackboard.postArtifact(artifact, MODULE_NAME, jobId);
} catch (Blackboard.BlackboardException ex) {
logger.log(Level.SEVERE, "Unable to index blackboard artifact " + artifact.getArtifactID(), ex); //NON-NLS
}

View File

@ -1,7 +1,7 @@
/*
* Autopsy
*
* Copyright 2018 Basis Technology Corp.
* Copyright 2018-2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
@ -390,7 +390,7 @@ class VolatilityProcessor {
try {
// index the artifact for keyword search
blackboard.postArtifact(volArtifact, VOLATILITY);
blackboard.postArtifact(volArtifact, VOLATILITY, null);
} catch (Blackboard.BlackboardException ex) {
errorMsgs.add(Bundle.VolatilityProcessor_errorMessage_failedToIndexArtifact(pluginName));
/*

View File

@ -1,7 +1,7 @@
"""
Autopsy Forensic Browser
Copyright 2019-2020 Basis Technology Corp.
Copyright 2019-2021 Basis Technology Corp.
Contact: carrier <at> sleuthkit <dot> org
Licensed under the Apache License, Version 2.0 (the "License");
@ -134,7 +134,7 @@ class GPXParserFileIngestModule(FileIngestModule):
# Create a GeoArtifactsHelper for this file.
geoArtifactHelper = GeoArtifactsHelper(
self.skCase, self.moduleName, None, file)
self.skCase, self.moduleName, None, file, context.getJobId())
if self.writeDebugMsgs:
self.log(Level.INFO, "Processing " + file.getUniquePath() +
@ -213,7 +213,7 @@ class GPXParserFileIngestModule(FileIngestModule):
art = file.newDataArtifact(BlackboardArtifact.Type(BlackboardArtifact.ARTIFACT_TYPE.TSK_GPS_BOOKMARK), attributes)
self.blackboard.postArtifact(art, self.moduleName)
self.blackboard.postArtifact(art, self.moduleName, context.getJobId())
except Blackboard.BlackboardException as e:
self.log(Level.SEVERE, "Error posting GPS bookmark artifact for " +

View File

@ -1,7 +1,7 @@
"""
Autopsy Forensic Browser
Copyright 2016-2018 Basis Technology Corp.
Copyright 2016-2021 Basis Technology Corp.
Contact: carrier <at> sleuthkit <dot> org
Licensed under the Apache License, Version 2.0 (the "License");
@ -104,9 +104,8 @@ class BrowserLocationAnalyzer(general.AndroidComponentAnalyzer):
# NOTE: originally commented out
try:
# index the artifact for keyword search
blackboard = Case.getCurrentCase().getSleuthkitCase().getBlackboard()
blackboard.postArtifact(artifact, general.MODULE_NAME)
blackboard.postArtifact(artifact, general.MODULE_NAME, context.getJobId())
except Blackboard.BlackboardException as ex:
self._logger.log(Level.SEVERE, "Unable to index blackboard artifact " + str(artifact.getArtifactTypeName()), ex)
self._logger.log(Level.SEVERE, traceback.format_exc())

View File

@ -1,7 +1,7 @@
"""
Autopsy Forensic Browser
Copyright 2016-2018 Basis Technology Corp.
Copyright 2016-2021 Basis Technology Corp.
Contact: carrier <at> sleuthkit <dot> org
Licensed under the Apache License, Version 2.0 (the "License");
@ -102,9 +102,8 @@ class CacheLocationAnalyzer(general.AndroidComponentAnalyzer):
# artifact.addAttribute(BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_VALUE.getTypeID(), AndroidModuleFactorymodule.moduleName, accuracy))
# artifact.addAttribute(BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_COMMENT.getTypeID(), AndroidModuleFactorymodule.moduleName, confidence))
try:
# index the artifact for keyword search
blackboard = Case.getCurrentCase().getSleuthkitCase().getBlackboard()
blackboard.postArtifact(artifact, general.MODULE_NAME)
blackboard.postArtifact(artifact, general.MODULE_NAME, context.getJobId())
except Blackboard.BlackboardException as ex:
self._logger.log(Level.SEVERE, "Unable to index blackboard artifact " + str(artifact.getArtifactID()), ex)
self._logger.log(Level.SEVERE, traceback.format_exc())

View File

@ -1,7 +1,7 @@
"""
Autopsy Forensic Browser
Copyright 2016-2020 Basis Technology Corp.
Copyright 2016-2021 Basis Technology Corp.
Contact: carrier <at> sleuthkit <dot> org
Licensed under the Apache License, Version 2.0 (the "License");
@ -83,12 +83,12 @@ class CallLogAnalyzer(general.AndroidComponentAnalyzer):
callLogDbHelper = CommunicationArtifactsHelper(current_case.getSleuthkitCase(),
self._PARSER_NAME,
callLogDb.getDBFile(),
Account.Type.PHONE, Account.Type.PHONE, selfAccountId )
Account.Type.PHONE, Account.Type.PHONE, selfAccountId, context.getJobId())
else:
callLogDbHelper = CommunicationArtifactsHelper(current_case.getSleuthkitCase(),
self._PARSER_NAME,
callLogDb.getDBFile(),
Account.Type.PHONE )
Account.Type.PHONE, context.getJobId())
for tableName in CallLogAnalyzer._tableNames:
try:

View File

@ -1,7 +1,7 @@
"""
Autopsy Forensic Browser
Copyright 2016-2020 Basis Technology Corp.
Copyright 2016-2021 Basis Technology Corp.
Contact: carrier <at> sleuthkit <dot> org
Licensed under the Apache License, Version 2.0 (the "License");
@ -75,7 +75,7 @@ class ContactAnalyzer(general.AndroidComponentAnalyzer):
return
for contactDb in contactsDbs:
try:
self.__findContactsInDB(contactDb, dataSource)
self.__findContactsInDB(contactDb, dataSource, context)
except Exception as ex:
self._logger.log(Level.SEVERE, "Error parsing Contacts", ex)
self._logger.log(Level.SEVERE, traceback.format_exc())
@ -86,7 +86,7 @@ class ContactAnalyzer(general.AndroidComponentAnalyzer):
"""
Queries the given contact database and adds Contacts to the case.
"""
def __findContactsInDB(self, contactDb, dataSource):
def __findContactsInDB(self, contactDb, dataSource, context):
if not contactDb:
return
@ -97,7 +97,7 @@ class ContactAnalyzer(general.AndroidComponentAnalyzer):
contactDbHelper = CommunicationArtifactsHelper(current_case.getSleuthkitCase(),
self._PARSER_NAME,
contactDb.getDBFile(),
Account.Type.PHONE )
Account.Type.PHONE, context.getJobId())
# get display_name, mimetype(email or phone number) and data1 (phonenumber or email address depending on mimetype)
# sorted by name, so phonenumber/email would be consecutive for a person if they exist.
@ -158,7 +158,7 @@ class ContactAnalyzer(general.AndroidComponentAnalyzer):
phoneNumber, # phoneNumber,
None, # homePhoneNumber,
None, # mobilePhoneNumber,
emailAddr) # emailAddr
emailAddr, context.getJobId()) # emailAddr
except SQLException as ex:
self._logger.log(Level.WARNING, "Error processing query result for Android messages.", ex)

View File

@ -1,7 +1,7 @@
"""
Autopsy Forensic Browser
Copyright 2019-2020 Basis Technology Corp.
Copyright 2019-2021 Basis Technology Corp.
Contact: carrier <at> sleuthkit <dot> org
Licensed under the Apache License, Version 2.0 (the "License");
@ -148,11 +148,11 @@ class FBMessengerAnalyzer(general.AndroidComponentAnalyzer):
if self.selfAccountId is not None:
contactsDBHelper = CommunicationArtifactsHelper(self.current_case.getSleuthkitCase(),
self._MODULE_NAME, contactsDb.getDBFile(),
Account.Type.FACEBOOK, Account.Type.FACEBOOK, self.selfAccountId )
Account.Type.FACEBOOK, Account.Type.FACEBOOK, self.selfAccountId, context.getJobId())
else:
contactsDBHelper = CommunicationArtifactsHelper(self.current_case.getSleuthkitCase(),
self._MODULE_NAME, contactsDb.getDBFile(),
Account.Type.FACEBOOK)
Account.Type.FACEBOOK, context.getJobId())
## get the other contacts/friends
contactsResultSet = contactsDb.runQuery("SELECT fbid, display_name, added_time_ms FROM contacts WHERE added_time_ms <> 0")
@ -492,11 +492,11 @@ class FBMessengerAnalyzer(general.AndroidComponentAnalyzer):
if self.selfAccountId is not None:
threadsDBHelper = CommunicationArtifactsHelper(self.current_case.getSleuthkitCase(),
self._MODULE_NAME, threadsDb.getDBFile(),
Account.Type.FACEBOOK, Account.Type.FACEBOOK, self.selfAccountId )
Account.Type.FACEBOOK, Account.Type.FACEBOOK, self.selfAccountId, context.getJobId())
else:
threadsDBHelper = CommunicationArtifactsHelper(self.current_case.getSleuthkitCase(),
self._MODULE_NAME, threadsDb.getDBFile(),
Account.Type.FACEBOOK)
Account.Type.FACEBOOK, context.getJobId())
self.analyzeMessages(threadsDb, threadsDBHelper)
self.analyzeCallLogs(threadsDb, threadsDBHelper)

View File

@ -1,7 +1,7 @@
"""
Autopsy Forensic Browser
Copyright 2016-2018 Basis Technology Corp.
Copyright 2016-2021 Basis Technology Corp.
Contact: carrier <at> sleuthkit <dot> org
Licensed under the Apache License, Version 2.0 (the "License");
@ -76,7 +76,7 @@ class GoogleMapLocationAnalyzer(general.AndroidComponentAnalyzer):
try:
jFile = File(self.current_case.getTempDirectory(), str(abstractFile.getId()) + abstractFile.getName())
ContentUtils.writeToFile(abstractFile, jFile, context.dataSourceIngestIsCancelled)
self.__findGeoLocationsInDB(jFile.toString(), abstractFile)
self.__findGeoLocationsInDB(jFile.toString(), abstractFile, context)
except Exception as ex:
self._logger.log(Level.SEVERE, "Error parsing Google map locations", ex)
self._logger.log(Level.SEVERE, traceback.format_exc())
@ -84,13 +84,13 @@ class GoogleMapLocationAnalyzer(general.AndroidComponentAnalyzer):
# Error finding Google map locations.
pass
def __findGeoLocationsInDB(self, databasePath, abstractFile):
def __findGeoLocationsInDB(self, databasePath, abstractFile, context):
if not databasePath:
return
try:
artifactHelper = GeoArtifactsHelper(self.current_case.getSleuthkitCase(),
general.MODULE_NAME, self.PROGRAM_NAME, abstractFile)
general.MODULE_NAME, self.PROGRAM_NAME, abstractFile, context.getJobId())
Class.forName("org.sqlite.JDBC") # load JDBC driver
connection = DriverManager.getConnection("jdbc:sqlite:" + databasePath)
statement = connection.createStatement()

View File

@ -1,7 +1,7 @@
"""
Autopsy Forensic Browser
Copyright 2019-2020 Basis Technology Corp.
Copyright 2019-2021 Basis Technology Corp.
Contact: carrier <at> sleuthkit <dot> org
Licensed under the Apache License, Version 2.0 (the "License");
@ -109,12 +109,12 @@ class IMOAnalyzer(general.AndroidComponentAnalyzer):
friendsDBHelper = CommunicationArtifactsHelper(current_case.getSleuthkitCase(),
self._PARSER_NAME,
friendsDb.getDBFile(),
Account.Type.IMO, Account.Type.IMO, selfAccountId )
Account.Type.IMO, Account.Type.IMO, selfAccountId, context.getJobId())
else:
friendsDBHelper = CommunicationArtifactsHelper(current_case.getSleuthkitCase(),
self._PARSER_NAME,
friendsDb.getDBFile(),
Account.Type.IMO )
Account.Type.IMO, context.getJobId())
contactsResultSet = friendsDb.runQuery("SELECT buid, name FROM friends")
if contactsResultSet is not None:
while contactsResultSet.next():

View File

@ -1,7 +1,7 @@
"""
Autopsy Forensic Browser
Copyright 2019 Basis Technology Corp.
Copyright 2019-2021 Basis Technology Corp.
Contact: carrier <at> sleuthkit <dot> org
Licensed under the Apache License, Version 2.0 (the "License");
@ -75,7 +75,7 @@ class InstalledApplicationsAnalyzer(general.AndroidComponentAnalyzer):
try:
current_case = Case.getCurrentCaseThrows()
libraryDbHelper = ArtifactsHelper(current_case.getSleuthkitCase(),
self._MODULE_NAME, libraryDb.getDBFile())
self._MODULE_NAME, libraryDb.getDBFile(), context.getJobId())
queryString = "SELECT doc_id, purchase_time FROM ownership"
ownershipResultSet = libraryDb.runQuery(queryString)
if ownershipResultSet is not None:

View File

@ -1,7 +1,7 @@
"""
Autopsy Forensic Browser
Copyright 2019-2020 Basis Technology Corp.
Copyright 2019-2021 Basis Technology Corp.
Contact: carrier <at> sleuthkit <dot> org
Licensed under the Apache License, Version 2.0 (the "License");
@ -117,7 +117,7 @@ class LineAnalyzer(general.AndroidComponentAnalyzer):
current_case = Case.getCurrentCaseThrows()
helper = CommunicationArtifactsHelper(
current_case.getSleuthkitCase(), self._PARSER_NAME,
contact_and_message_db.getDBFile(), Account.Type.LINE)
contact_and_message_db.getDBFile(), Account.Type.LINE, context.getJobId())
self.parse_contacts(contact_and_message_db, helper)
self.parse_messages(contact_and_message_db, helper, current_case)
@ -125,7 +125,7 @@ class LineAnalyzer(general.AndroidComponentAnalyzer):
current_case = Case.getCurrentCaseThrows()
helper = CommunicationArtifactsHelper(
current_case.getSleuthkitCase(), self._PARSER_NAME,
calllog_db.getDBFile(), Account.Type.LINE)
calllog_db.getDBFile(), Account.Type.LINE, context.getJobId())
self.parse_calllogs(dataSource, calllog_db, helper)
except NoCurrentCaseException as ex:

Some files were not shown because too many files have changed in this diff Show More