initial merge from develop

This commit is contained in:
Greg DiCristofaro 2023-06-09 13:20:05 -04:00
commit 9c03889ed2
27 changed files with 977 additions and 103 deletions

View File

@ -47,6 +47,9 @@ class AddLocalFilesTask implements Runnable {
private final List<String> localFilePaths;
private final DataSourceProcessorProgressMonitor progress;
private final DataSourceProcessorCallback callback;
private final boolean createTimestamp;
private final boolean accessTimestamp;
private final boolean modifiedTimestamp;
/**
* Constructs a runnable that adds a set of local/logical files and/or
@ -67,15 +70,22 @@ class AddLocalFilesTask implements Runnable {
* @param localFilePaths A list of localFilePaths of local/logical
* files and/or directories.
* @param host The host for this data source (may be null).
* @param createTime Boolean value to add the time the file was locally created
* @param accessTime Boolean value to add the time the file was last accessed
* @param modifiedTime Boolean value to add the time the file was locally modified
* @param progressMonitor Progress monitor to report progress
* during processing.
* @param callback Callback to call when processing is done.
*/
AddLocalFilesTask(String deviceId, String rootVirtualDirectoryName, List<String> localFilePaths, Host host, DataSourceProcessorProgressMonitor progressMonitor, DataSourceProcessorCallback callback) {
AddLocalFilesTask(String deviceId, String rootVirtualDirectoryName, List<String> localFilePaths, Host host, boolean createTimestamp,
boolean accessTimestamp, boolean modifiedTimestamp, DataSourceProcessorProgressMonitor progressMonitor, DataSourceProcessorCallback callback) {
this.deviceId = deviceId;
this.rootVirtualDirectoryName = rootVirtualDirectoryName;
this.localFilePaths = localFilePaths;
this.host = host;
this.createTimestamp = createTimestamp;
this.accessTimestamp = accessTimestamp;
this.modifiedTimestamp = modifiedTimestamp;
this.callback = callback;
this.progress = progressMonitor;
}
@ -92,7 +102,8 @@ class AddLocalFilesTask implements Runnable {
try {
progress.setIndeterminate(true);
FileManager fileManager = Case.getCurrentCaseThrows().getServices().getFileManager();
LocalFilesDataSource newDataSource = fileManager.addLocalFilesDataSource(deviceId, rootVirtualDirectoryName, "", host, localFilePaths, new ProgressUpdater());
LocalFilesDataSource newDataSource = fileManager.addLocalFilesDataSource(deviceId, rootVirtualDirectoryName, "", host, localFilePaths, createTimestamp,
accessTimestamp, modifiedTimestamp, new ProgressUpdater());
newDataSources.add(newDataSource);
} catch (TskDataException | TskCoreException | NoCurrentCaseException ex) {
errors.add(ex.getMessage());

View File

@ -263,3 +263,8 @@ AddImageWizardSelectHostVisual.specifyNewHostRadio.text=Specify new host name
AddImageWizardSelectHostVisual.generateNewRadio.text=Generate new host name based on data source name
AddImageWizardSelectHostVisual.validationMessage.text=\
LocalFilesPanel.deleteButon.text=Delete
LocalFilesPanel.createTimeCheckBox.text=Creation Time - Often changed when a file is copied
LocalFilesPanel.modifiedTimeCheckBox.text=\ Modified Time - Often not changed when a file is copied
LocalFilesPanel.jLabel2.text=NOTE: Time stamps may have changed when the files were copied to the current location.
LocalFilesPanel.timestampToIncludeLabel.text=Timestamps To Include:
LocalFilesPanel.accessTimeCheckBox.text=Access Time - Can be changed when the file is opened

View File

@ -247,10 +247,15 @@ AddImageWizardIngestConfigPanel.dsProcDone.errs.text=*Errors encountered in addi
AddImageWizardIngestConfigVisual.getName.text=Configure Ingest
AddImageWizardIterator.stepXofN=Step {0} of {1}
AddLocalFilesTask.localFileAdd.progress.text=Adding: {0}/{1}
Case.getCurCase.exception.noneOpen=Cannot get the current case; there is no case open!
Case.getCurCase.exception.noneOpen=Cannot get the current case; there is no case open\!
Case.open.msgDlg.updated.msg=Updated case database schema.\nA backup copy of the database with the following path has been made:\n {0}
Case.open.msgDlg.updated.title=Case Database Schema Update
Case.checkImgExist.confDlg.doesntExist.msg=One of the images associated with \nthis case are missing. Would you like to search for them now?\nPreviously, the image was located at:\n{0}\nPlease note that you will still be able to browse directories and generate reports\nif you choose No, but you will not be able to view file content or run the ingest process.
Case.checkImgExist.confDlg.doesntExist.msg=One of the images associated with \n\
this case are missing. Would you like to search for them now?\n\
Previously, the image was located at:\n\
{0}\n\
Please note that you will still be able to browse directories and generate reports\n\
if you choose No, but you will not be able to view file content or run the ingest process.
Case.checkImgExist.confDlg.doesntExist.title=Missing Image
Case.addImg.exception.msg=Error adding image to the case
Case.updateCaseName.exception.msg=Error while trying to update the case name.
@ -269,9 +274,12 @@ Case.GetCaseTypeGivenPath.Failure=Unable to get case type
Case.metaDataFileCorrupt.exception.msg=The case metadata file (.aut) is corrupted.
Case.deleteReports.deleteFromDiskException.log.msg=Unable to delete the report from the disk.
Case.deleteReports.deleteFromDiskException.msg=Unable to delete the report {0} from the disk.\nYou may manually delete it from {1}
CaseDeleteAction.closeConfMsg.text=Are you sure want to close and delete this case? \nCase Name: {0}\nCase Directory: {1}
CaseDeleteAction.closeConfMsg.text=Are you sure want to close and delete this case? \n\
Case Name: {0}\n\
Case Directory: {1}
CaseDeleteAction.closeConfMsg.title=Warning: Closing the Current Case
CaseDeleteAction.msgDlg.fileInUse.msg=The delete action cannot be fully completed because the folder or file in it is open by another program.\n\nClose the folder and file and try again or you can delete the case manually.
CaseDeleteAction.msgDlg.fileInUse.msg=The delete action cannot be fully completed because the folder or file in it is open by another program.\n\n\
Close the folder and file and try again or you can delete the case manually.
CaseDeleteAction.msgDlg.fileInUse.title=Error: Folder In Use
CaseDeleteAction.msgDlg.caseDelete.msg=Case {0} has been deleted.
CaseOpenAction.autFilter.title={0} Case File ( {1})
@ -303,7 +311,8 @@ NewCaseWizardAction.databaseProblem1.text=Cannot open database. Cancelling case
NewCaseWizardAction.databaseProblem2.text=Error
NewCaseWizardPanel1.validate.errMsg.invalidSymbols=The Case Name cannot contain any of the following symbols: \\ / : * ? " &lt; > |
NewCaseWizardPanel1.validate.errMsg.dirExists=Case directory ''{0}'' already exists.
NewCaseWizardPanel1.validate.confMsg.createDir.msg=The base directory "{0}" does not exist. \n\nDo you want to create that directory?
NewCaseWizardPanel1.validate.confMsg.createDir.msg=The base directory "{0}" does not exist. \n\n\
Do you want to create that directory?
NewCaseWizardPanel1.validate.confMsg.createDir.title=Create directory
NewCaseWizardPanel1.validate.errMsg.cantCreateParDir.msg=Error: Could not create case parent directory {0}
NewCaseWizardPanel1.validate.errMsg.prevCreateBaseDir.msg=Prevented from creating base directory {0}
@ -360,8 +369,8 @@ UnpackageWorker.doInBackground.previouslySeenCase=Case has been previously opene
UpdateRecentCases.menuItem.clearRecentCases.text=Clear Recent Cases
UpdateRecentCases.menuItem.empty=-Empty-
AddImageWizardIngestConfigPanel.CANCEL_BUTTON.text=Cancel
NewCaseVisualPanel1.CaseFolderOnCDriveError.text=Warning: Path to multi-user case folder is on "C:" drive
NewCaseVisualPanel1.CaseFolderOnInternalDriveWindowsError.text=Warning: Path to case folder is on "C:" drive. Case folder is created on the target system
NewCaseVisualPanel1.CaseFolderOnCDriveError.text=Warning: Path to multi-user case folder is on \"C:\" drive
NewCaseVisualPanel1.CaseFolderOnInternalDriveWindowsError.text=Warning: Path to case folder is on \"C:\" drive. Case folder is created on the target system
NewCaseVisualPanel1.CaseFolderOnInternalDriveLinuxError.text=Warning: Path to case folder is on the target system. Create case folder in mounted drive.
NewCaseVisualPanel1.uncPath.error=Error: UNC paths are not allowed for Single-User cases
CollaborationMonitor.addingDataSourceStatus.msg={0} adding data source
@ -369,7 +378,7 @@ CollaborationMonitor.analyzingDataSourceStatus.msg={0} analyzing {1}
MissingImageDialog.lbWarning.text=
MissingImageDialog.lbWarning.toolTipText=
NewCaseVisualPanel1.caseParentDirWarningLabel.text=
NewCaseVisualPanel1.multiUserCaseRadioButton.text=Multi-User\t\t
NewCaseVisualPanel1.multiUserCaseRadioButton.text=Multi-User
NewCaseVisualPanel1.singleUserCaseRadioButton.text=Single-User
NewCaseVisualPanel1.caseTypeLabel.text=Case Type:
SingleUserCaseConverter.BadDatabaseFileName=Database file does not exist!
@ -482,3 +491,8 @@ AddImageWizardSelectHostVisual.specifyNewHostRadio.text=Specify new host name
AddImageWizardSelectHostVisual.generateNewRadio.text=Generate new host name based on data source name
AddImageWizardSelectHostVisual.validationMessage.text=\
LocalFilesPanel.deleteButon.text=Delete
LocalFilesPanel.createTimeCheckBox.text=Creation Time - Often changed when a file is copied
LocalFilesPanel.modifiedTimeCheckBox.text=\ Modified Time - Often not changed when a file is copied
LocalFilesPanel.jLabel2.text=NOTE: Time stamps may have changed when the files were copied to the current location.
LocalFilesPanel.timestampToIncludeLabel.text=Timestamps To Include:
LocalFilesPanel.accessTimeCheckBox.text=Access Time - Can be changed when the file is opened

View File

@ -176,6 +176,9 @@ public class LocalFilesDSProcessor implements DataSourceProcessor, AutoIngestDat
public void run(Host host, DataSourceProcessorProgressMonitor progressMonitor, DataSourceProcessorCallback callback) {
localFilePaths = configPanel.getContentPaths();
boolean createTimestamp = configPanel.getCreateTimestamp();
boolean modifiedTimestamp = configPanel.getModifiedTimestamp();
boolean accessTimestamp = configPanel.getAccessTimestamp();
if (configPanel.subTypeIsLogicalEvidencePanel()) {
try {
//if the L01 option was chosen
@ -191,7 +194,8 @@ public class LocalFilesDSProcessor implements DataSourceProcessor, AutoIngestDat
return;
}
}
run(UUID.randomUUID().toString(), configPanel.getFileSetName(), localFilePaths, host, progressMonitor, callback);
run(UUID.randomUUID().toString(), configPanel.getFileSetName(), localFilePaths, host, createTimestamp,
accessTimestamp, modifiedTimestamp, progressMonitor, callback);
}
/**
@ -330,7 +334,40 @@ public class LocalFilesDSProcessor implements DataSourceProcessor, AutoIngestDat
* @param callback Callback to call when processing is done.
*/
public void run(String deviceId, String rootVirtualDirectoryName, List<String> localFilePaths, Host host, DataSourceProcessorProgressMonitor progressMonitor, DataSourceProcessorCallback callback) {
new Thread(new AddLocalFilesTask(deviceId, rootVirtualDirectoryName, localFilePaths, host, progressMonitor, callback)).start();
new Thread(new AddLocalFilesTask(deviceId, rootVirtualDirectoryName, localFilePaths, host, false, false, false,progressMonitor, callback)).start();
}
/**
* Adds a data source to the case database using a background task in a
* separate thread and the given settings instead of those provided by the
* selection and configuration panel. Returns as soon as the background task
* is started and uses the callback object to signal task completion and
* return results.
*
* @param deviceId An ASCII-printable identifier for the
* device associated with the data source
* that is intended to be unique across
* multiple cases (e.g., a UUID).
* @param rootVirtualDirectoryName The name to give to the virtual directory
* that will serve as the root for the
* local/logical files and/or directories
* that compose the data source. Pass the
* empty string to get a default name of the
* form: LogicalFileSet[N]
* @param localFilePaths A list of local/logical file and/or
* directory localFilePaths.
* @param createTime Boolean value to add the time the file was locally created
* @param accessTime Boolean value to add the time the file was last accessed
* @param modifiedTime Boolean value to add the time the file was locally modified
* @param host The host for this data source.
* @param progressMonitor Progress monitor for reporting progress
* during processing.
* @param callback Callback to call when processing is done.
*/
public void run(String deviceId, String rootVirtualDirectoryName, List<String> localFilePaths, Host host, boolean createTimestamp, boolean accessTimestamp,
boolean modifiedTimestamp, DataSourceProcessorProgressMonitor progressMonitor, DataSourceProcessorCallback callback) {
new Thread(new AddLocalFilesTask(deviceId, rootVirtualDirectoryName, localFilePaths, host, createTimestamp, accessTimestamp, modifiedTimestamp,
progressMonitor, callback)).start();
}
/**
@ -356,7 +393,7 @@ public class LocalFilesDSProcessor implements DataSourceProcessor, AutoIngestDat
* during processing.
* @param callback Callback to call when processing is done.
*/
public void run(String deviceId, String rootVirtualDirectoryName, List<String> localFilePaths, DataSourceProcessorProgressMonitor progressMonitor, DataSourceProcessorCallback callback) {
public void run(String deviceId, String rootVirtualDirectoryName, List<String> localFilePaths, DataSourceProcessorProgressMonitor progressMonitor, DataSourceProcessorCallback callback) {
run(deviceId, rootVirtualDirectoryName, localFilePaths, null, progressMonitor, callback);
}

View File

@ -331,6 +331,33 @@ final class LocalFilesPanel extends javax.swing.JPanel {
.map(File::getAbsolutePath)
.collect(Collectors.toList());
}
/**
* Get whether the createTimestampcheckbox has been checked or not
* @return boolean if box was checked
*/
Boolean getCreateTimestamps() {
return createTimeCheckBox.isSelected();
}
/**
* Get whether the ModifiedTimestampcheckbox has been checked or not
* @return boolean if box was checked
*/
Boolean getModifiedTimestamps() {
return modifiedTimeCheckBox.isSelected();
}
/**
* Get whether the accessTimestampcheckbox has been checked or not
* @return boolean if box was checked
*/
Boolean getAccessTimestamps() {
return accessTimeCheckBox.isSelected();
}
/**
* Validates path to selected data source and displays warning if it is

View File

@ -16,16 +16,11 @@
<Layout>
<DimensionLayout dim="0">
<Group type="103" groupAlignment="0" attributes="0">
<Group type="102" attributes="0">
<Group type="103" groupAlignment="0" attributes="0">
<Group type="102" alignment="0" attributes="0">
<EmptySpace min="-2" max="-2" attributes="0"/>
<Component id="dspSubtypeComboBox" min="-2" max="-2" attributes="0"/>
</Group>
<Component id="dspSubtypePanel" alignment="0" min="-2" pref="466" max="-2" attributes="0"/>
</Group>
<EmptySpace max="32767" attributes="0"/>
<Group type="102" alignment="0" attributes="0">
<EmptySpace min="-2" max="-2" attributes="0"/>
<Component id="dspSubtypeComboBox" min="-2" max="-2" attributes="0"/>
</Group>
<Component id="dspSubtypePanel" alignment="0" min="-2" pref="524" max="-2" attributes="0"/>
</Group>
</DimensionLayout>
<DimensionLayout dim="1">
@ -33,8 +28,8 @@
<Group type="102" alignment="0" attributes="0">
<Component id="dspSubtypeComboBox" min="-2" max="-2" attributes="0"/>
<EmptySpace max="-2" attributes="0"/>
<Component id="dspSubtypePanel" min="-2" max="-2" attributes="0"/>
<EmptySpace min="0" pref="0" max="-2" attributes="0"/>
<Component id="dspSubtypePanel" min="-2" pref="334" max="-2" attributes="0"/>
<EmptySpace max="32767" attributes="0"/>
</Group>
</Group>
</DimensionLayout>
@ -50,12 +45,12 @@
<Layout>
<DimensionLayout dim="0">
<Group type="103" groupAlignment="0" attributes="0">
<EmptySpace min="0" pref="466" max="32767" attributes="0"/>
<EmptySpace min="0" pref="524" max="32767" attributes="0"/>
</Group>
</DimensionLayout>
<DimensionLayout dim="1">
<Group type="103" groupAlignment="0" attributes="0">
<EmptySpace min="0" pref="160" max="32767" attributes="0"/>
<EmptySpace min="0" pref="334" max="32767" attributes="0"/>
</Group>
</DimensionLayout>
</Layout>

View File

@ -114,11 +114,11 @@ final class LogicalFilesDspPanel extends JPanel {
dspSubtypePanel.setLayout(dspSubtypePanelLayout);
dspSubtypePanelLayout.setHorizontalGroup(
dspSubtypePanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGap(0, 466, Short.MAX_VALUE)
.addGap(0, 524, Short.MAX_VALUE)
);
dspSubtypePanelLayout.setVerticalGroup(
dspSubtypePanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGap(0, 160, Short.MAX_VALUE)
.addGap(0, 334, Short.MAX_VALUE)
);
dspSubtypeComboBox.setModel(new javax.swing.DefaultComboBoxModel<>(new String[] {Bundle.LogicalFilesDspPanel_subTypeComboBox_localFilesOption_text(), Bundle.LogicalFilesDspPanel_subTypeComboBox_l01FileOption_text()}));
@ -135,20 +135,17 @@ final class LogicalFilesDspPanel extends JPanel {
layout.setHorizontalGroup(
layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(layout.createSequentialGroup()
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(layout.createSequentialGroup()
.addContainerGap()
.addComponent(dspSubtypeComboBox, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE))
.addComponent(dspSubtypePanel, javax.swing.GroupLayout.PREFERRED_SIZE, 466, javax.swing.GroupLayout.PREFERRED_SIZE))
.addContainerGap(javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE))
.addContainerGap()
.addComponent(dspSubtypeComboBox, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE))
.addComponent(dspSubtypePanel, javax.swing.GroupLayout.PREFERRED_SIZE, 524, javax.swing.GroupLayout.PREFERRED_SIZE)
);
layout.setVerticalGroup(
layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(layout.createSequentialGroup()
.addComponent(dspSubtypeComboBox, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(dspSubtypePanel, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
.addGap(0, 0, 0))
.addComponent(dspSubtypePanel, javax.swing.GroupLayout.PREFERRED_SIZE, 334, javax.swing.GroupLayout.PREFERRED_SIZE)
.addContainerGap(javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE))
);
}// </editor-fold>//GEN-END:initComponents
@ -231,4 +228,32 @@ final class LogicalFilesDspPanel extends JPanel {
return "";
}
}
/**
* Get whether the createTimestampcheckbox was selected or not
*
* @return if box was checked or not
*/
Boolean getCreateTimestamp() {
return localFilesPanel.getCreateTimestamps();
}
/**
* Get whether the modifiedTimestampcheckbox was selected or not
*
* @return if box was checked or not
*/
Boolean getModifiedTimestamp() {
return localFilesPanel.getModifiedTimestamps();
}
/**
* Get whether the accessTimestampcheckbox was selected or not
*
* @return if box was checked or not
*/
Boolean getAccessTimestamp() {
return localFilesPanel.getAccessTimestamps();
}
}

View File

@ -25,6 +25,8 @@ package org.sleuthkit.autopsy.casemodule.services;
import java.io.Closeable;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.attribute.BasicFileAttributes;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
@ -424,9 +426,8 @@ public class FileManager implements Closeable {
* directory that does not exist or cannot be read.
*/
public LocalFilesDataSource addLocalFilesDataSource(String deviceId, String rootVirtualDirectoryName, String timeZone, List<String> localFilePaths, FileAddProgressUpdater progressUpdater) throws TskCoreException, TskDataException {
return addLocalFilesDataSource(deviceId, rootVirtualDirectoryName, timeZone, null, localFilePaths, progressUpdater);
return addLocalFilesDataSource(deviceId, rootVirtualDirectoryName, timeZone, null, localFilePaths, false, false, false, progressUpdater);
}
/**
* Adds a set of local/logical files and/or directories to the case database
* as data source.
@ -456,8 +457,44 @@ public class FileManager implements Closeable {
* @throws TskDataException if any of the local file paths is for a file or
* directory that does not exist or cannot be read.
*/
public LocalFilesDataSource addLocalFilesDataSource(String deviceId, String rootVirtualDirectoryName, String timeZone, Host host, List<String> localFilePaths, FileAddProgressUpdater progressUpdater) throws TskCoreException, TskDataException {
return addLocalFilesDataSource(deviceId, rootVirtualDirectoryName, timeZone, host, localFilePaths, false, false, false, progressUpdater);
}
/**
* Adds a set of local/logical files and/or directories to the case database
* as data source.
*
* @param deviceId An ASCII-printable identifier for the
* device associated with the data source
* that is intended to be unique across
* multiple cases (e.g., a UUID).
* @param rootVirtualDirectoryName The name to give to the virtual directory
* that will serve as the root for the
* local/logical files and/or directories
* that compose the data source. Pass the
* empty string to get a default name of the
* form: LogicalFileSet[N]
* @param timeZone The time zone used to process the data
* source, may be the empty string.
* @param host The host for this data source (may be null).
* @param localFilePaths A list of local/logical file and/or
* directory localFilePaths.
* @param createTime Boolean value to add the time the file was locally created
* @param accessTime Boolean value to add the time the file was last accessed
* @param modifiedTime Boolean value to add the time the file was locally modified
* @param progressUpdater Called after each file/directory is added
* to the case database.
*
* @return A local files data source object.
*
* @throws TskCoreException If there is a problem completing a database
* operation.
* @throws TskDataException if any of the local file paths is for a file or
* directory that does not exist or cannot be read.
*/
public LocalFilesDataSource addLocalFilesDataSource(String deviceId, String rootVirtualDirectoryName, String timeZone, Host host,
List<String> localFilePaths, FileAddProgressUpdater progressUpdater) throws TskCoreException, TskDataException {
List<String> localFilePaths, boolean createTimestamp, boolean accessTimestamp, boolean modifiedTimestamp, FileAddProgressUpdater progressUpdater) throws TskCoreException, TskDataException {
List<java.io.File> localFiles = getFilesAndDirectories(localFilePaths);
CaseDbTransaction trans = null;
try {
@ -474,7 +511,7 @@ public class FileManager implements Closeable {
LocalFilesDataSource dataSource = caseDb.addLocalFilesDataSource(deviceId, rootDirectoryName, timeZone, host, trans);
List<AbstractFile> filesAdded = new ArrayList<>();
for (java.io.File localFile : localFiles) {
AbstractFile fileAdded = addLocalFile(trans, dataSource, localFile, TskData.EncodingType.NONE, progressUpdater);
AbstractFile fileAdded = addLocalFile(trans, dataSource, localFile, createTimestamp, accessTimestamp, modifiedTimestamp, TskData.EncodingType.NONE, progressUpdater);
if (null != fileAdded) {
filesAdded.add(fileAdded);
} else {
@ -564,6 +601,9 @@ public class FileManager implements Closeable {
* @param parentDirectory The root virtual directory of the data source or
* the parent local directory.
* @param localFile The local/logical file or directory.
* @param createTime Boolean value to add the time the file was locally created
* @param accessTime Boolean value to add the time the file was locally modified
* @param modifiedTime Boolean value to add the time the file was last accessed
* @param encodingType Type of encoding used when storing the file
* @param progressUpdater Called after each file/directory is added to the
* case database.
@ -573,8 +613,8 @@ public class FileManager implements Closeable {
* @throws TskCoreException If there is a problem completing a database
* operation.
*/
private AbstractFile addLocalFile(CaseDbTransaction trans, SpecialDirectory parentDirectory, java.io.File localFile,
TskData.EncodingType encodingType, FileAddProgressUpdater progressUpdater) throws TskCoreException {
private AbstractFile addLocalFile(CaseDbTransaction trans, SpecialDirectory parentDirectory, java.io.File localFile, boolean createTime,
boolean accessTime, boolean modifiedTime, TskData.EncodingType encodingType, FileAddProgressUpdater progressUpdater) throws TskCoreException {
if (localFile.isDirectory()) {
/*
* Add the directory as a local directory.
@ -588,15 +628,35 @@ public class FileManager implements Closeable {
final java.io.File[] childFiles = localFile.listFiles();
if (childFiles != null && childFiles.length > 0) {
for (java.io.File childFile : childFiles) {
addLocalFile(trans, localDirectory, childFile, progressUpdater);
addLocalFile(trans, localDirectory, childFile, createTime, accessTime, modifiedTime, encodingType, progressUpdater);
}
}
return localDirectory;
} else {
return caseDb.addLocalFile(localFile.getName(), localFile.getAbsolutePath(), localFile.length(),
0, 0, 0, 0,
long createTimestamp = 0;
long modifiedTimestamp = 0;
long accessTimestamp = 0;
try {
BasicFileAttributes attrs;
attrs = Files.readAttributes(localFile.toPath(), BasicFileAttributes.class);
if (createTime) {
createTimestamp = (attrs.creationTime().toMillis()/1000);
}
if (modifiedTime) {
modifiedTimestamp = (attrs.lastModifiedTime().toMillis()/1000);
}
if (accessTime) {
accessTimestamp = (attrs.lastAccessTime().toMillis()/1000);
}
return caseDb.addLocalFile(localFile.getName(), localFile.getAbsolutePath(), localFile.length(),
0, createTimestamp, accessTimestamp, modifiedTimestamp,
localFile.isFile(), encodingType, parentDirectory, trans);
} catch (IOException ex) {
return caseDb.addLocalFile(localFile.getName(), localFile.getAbsolutePath(), localFile.length(),
0, 0, 0, 0,
localFile.isFile(), encodingType, parentDirectory, trans);
}
}
}
@ -752,7 +812,7 @@ public class FileManager implements Closeable {
*/
@Deprecated
private AbstractFile addLocalFile(CaseDbTransaction trans, SpecialDirectory parentDirectory, java.io.File localFile, FileAddProgressUpdater progressUpdater) throws TskCoreException {
return addLocalFile(trans, parentDirectory, localFile, TskData.EncodingType.NONE, progressUpdater);
return addLocalFile(trans, parentDirectory, localFile, false, false, false, TskData.EncodingType.NONE, progressUpdater);
}
/**

View File

@ -292,7 +292,16 @@ public class JLNK {
} else if (linkTargetIdList != null && !linkTargetIdList.isEmpty()) {
String ret = "";
for (String s : linkTargetIdList) {
ret += s;
if (s.endsWith("\\")) {
ret += s;
} else {
if (ret.endsWith("\\")) {
ret +=s;
} else {
ret += "\\";
ret += s;
}
}
}
return ret;
}

View File

@ -48,6 +48,10 @@ public interface AutopsyItemVisitor<T> {
T visit(DeletedContent dc);
T visit(DeletedContent.DeletedContentFilter dcf);
T visit(ScoreContent sc);
T visit(ScoreContent.ScoreContentFilter scf);
T visit(FileSize fs);
@ -124,6 +128,16 @@ public interface AutopsyItemVisitor<T> {
public T visit(DeletedContent.DeletedContentFilter dcf) {
return defaultVisit(dcf);
}
@Override
public T visit(ScoreContent dc) {
return defaultVisit(dc);
}
@Override
public T visit(ScoreContent.ScoreContentFilter dcf) {
return defaultVisit(dcf);
}
@Override
public T visit(FileSize fs) {

View File

@ -412,6 +412,13 @@ ReportNode.reportNameProperty.name=Report Name
ReportNode.reportNameProperty.displayName=Report Name
ReportNode.reportNameProperty.desc=Name of the report
ReportsListNode.displayName=Reports
ScoreContent_badFilter_text=Bad Items
ScoreContent_createSheet_filterType_desc=no description
ScoreContent_createSheet_filterType_displayName=Type
ScoreContent_createSheet_name_desc=no description
ScoreContent_createSheet_name_displayName=Name
ScoreContent_ScoreContentNode_name=Score
ScoreContent_susFilter_text=Suspicious Items
SlackFileNode.getActions.viewInNewWin.text=View in New Window
SlackFileNode.getActions.viewFileInDir.text=View File in Directory
SpecialDirectoryNode.getActions.viewInNewWin.text=View in New Window

View File

@ -32,6 +32,8 @@ import org.sleuthkit.autopsy.datamodel.FileSize.FileSizeRootNode;
import org.sleuthkit.autopsy.datamodel.FileTypes.FileTypesNode;
import org.sleuthkit.autopsy.datamodel.accounts.Accounts;
import org.sleuthkit.autopsy.allcasessearch.CorrelationAttributeInstanceNode;
import org.sleuthkit.autopsy.datamodel.ScoreContent.ScoreContentsNode;
import org.sleuthkit.autopsy.datamodel.ScoreContent.ScoreContentsChildren.ScoreContentNode;
/**
* Visitor pattern that goes over all nodes in the directory tree. This includes
@ -78,6 +80,10 @@ public interface DisplayableItemNodeVisitor<T> {
T visit(DeletedContentsNode dcn);
T visit(ScoreContentNode scn);
T visit(ScoreContentsNode scn);
T visit(FileSizeRootNode fsrn);
T visit(FileSizeNode fsn);
@ -335,6 +341,16 @@ public interface DisplayableItemNodeVisitor<T> {
return defaultVisit(dcn);
}
@Override
public T visit(ScoreContentNode scn) {
return defaultVisit(scn);
}
@Override
public T visit(ScoreContentsNode scn) {
return defaultVisit(scn);
}
@Override
public T visit(DeletedContentsNode dcn) {
return defaultVisit(dcn);

View File

@ -24,6 +24,8 @@ import org.openide.nodes.AbstractNode;
import org.openide.nodes.Children;
import org.openide.nodes.Node;
import org.openide.util.NbBundle;
import org.sleuthkit.autopsy.datamodel.ScoreContent.ScoreContentsChildren;
import org.sleuthkit.autopsy.datamodel.ScoreContent.ScoreContentsChildren.ScoreContentNode;
import org.sleuthkit.autopsy.datamodel.accounts.Accounts;
import org.sleuthkit.datamodel.SleuthkitVisitableItem;
@ -98,6 +100,11 @@ public class RootContentChildren extends Children.Keys<Object> {
return new DeletedContent.DeletedContentsNode(dc.getSleuthkitCase(), dc.filteringDataSourceObjId());
}
@Override
public AbstractNode visit(ScoreContent sc) {
return new ScoreContent.ScoreContentsNode(sc.getSleuthkitCase(), sc.filteringDataSourceObjId());
}
@Override
public AbstractNode visit(FileSize dc) {
return new FileSize.FileSizeRootNode(dc.getSleuthkitCase(), dc.filteringDataSourceObjId());

View File

@ -0,0 +1,596 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2023 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.datamodel;
import java.beans.PropertyChangeEvent;
import java.beans.PropertyChangeListener;
import java.text.MessageFormat;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.EnumSet;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.logging.Level;
import org.apache.commons.lang3.StringUtils;
import org.openide.nodes.AbstractNode;
import org.openide.nodes.ChildFactory;
import org.openide.nodes.Children;
import org.openide.nodes.Node;
import org.openide.nodes.Sheet;
import org.openide.util.NbBundle;
import org.openide.util.WeakListeners;
import org.openide.util.lookup.Lookups;
import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.guiutils.RefreshThrottler;
import org.sleuthkit.autopsy.ingest.IngestManager;
import org.sleuthkit.autopsy.ingest.IngestManager.IngestModuleEvent;
import org.sleuthkit.autopsy.ingest.ModuleDataEvent;
import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.BlackboardArtifact.Category;
import org.sleuthkit.datamodel.Content;
import org.sleuthkit.datamodel.ContentVisitor;
import org.sleuthkit.datamodel.DerivedFile;
import org.sleuthkit.datamodel.Directory;
import org.sleuthkit.datamodel.File;
import org.sleuthkit.datamodel.FsContent;
import org.sleuthkit.datamodel.LayoutFile;
import org.sleuthkit.datamodel.LocalFile;
import org.sleuthkit.datamodel.Score.Priority;
import org.sleuthkit.datamodel.Score.Significance;
import org.sleuthkit.datamodel.SlackFile;
import org.sleuthkit.datamodel.SleuthkitCase;
import org.sleuthkit.datamodel.TskCoreException;
import org.sleuthkit.datamodel.VirtualDirectory;
/**
* Score content view nodes.
*/
public class ScoreContent implements AutopsyVisitableItem {
private SleuthkitCase skCase;
private final long filteringDSObjId; // 0 if not filtering/grouping by data source
@NbBundle.Messages({"ScoreContent_badFilter_text=Bad Items",
"ScoreContent_susFilter_text=Suspicious Items"})
public enum ScoreContentFilter implements AutopsyVisitableItem {
BAD_ITEM_FILTER(0, "BAD_ITEM_FILTER",
Bundle.ScoreContent_badFilter_text()),
SUS_ITEM_FILTER(1, "SUS_ITEM_FILTER",
Bundle.ScoreContent_susFilter_text());
private int id;
private String name;
private String displayName;
private ScoreContentFilter(int id, String name, String displayName) {
this.id = id;
this.name = name;
this.displayName = displayName;
}
public String getName() {
return this.name;
}
public int getId() {
return this.id;
}
public String getDisplayName() {
return this.displayName;
}
@Override
public <T> T accept(AutopsyItemVisitor<T> visitor) {
return visitor.visit(this);
}
}
/**
* Constructor assuming no data source filtering.
* @param skCase The sleuthkit case.
*/
public ScoreContent(SleuthkitCase skCase) {
this(skCase, 0);
}
/**
* Constructor.
* @param skCase The sleuthkit case.
* @param dsObjId The data source object id to filter on if > 0.
*/
public ScoreContent(SleuthkitCase skCase, long dsObjId) {
this.skCase = skCase;
this.filteringDSObjId = dsObjId;
}
/**
* @return The data source object id to filter on if > 0.
*/
long filteringDataSourceObjId() {
return this.filteringDSObjId;
}
@Override
public <T> T accept(AutopsyItemVisitor<T> visitor) {
return visitor.visit(this);
}
/**
* @return The sleuthkit case used.
*/
public SleuthkitCase getSleuthkitCase() {
return this.skCase;
}
private static final Set<Case.Events> CASE_EVENTS_OF_INTEREST = EnumSet.of(
Case.Events.DATA_SOURCE_ADDED,
Case.Events.CURRENT_CASE,
Case.Events.CONTENT_TAG_ADDED,
Case.Events.CONTENT_TAG_DELETED,
Case.Events.BLACKBOARD_ARTIFACT_TAG_ADDED,
Case.Events.BLACKBOARD_ARTIFACT_TAG_DELETED
);
private static final Set<IngestManager.IngestJobEvent> INGEST_JOB_EVENTS_OF_INTEREST = EnumSet.of(IngestManager.IngestJobEvent.COMPLETED, IngestManager.IngestJobEvent.CANCELLED);
private static final Set<IngestManager.IngestModuleEvent> INGEST_MODULE_EVENTS_OF_INTEREST = EnumSet.of(IngestModuleEvent.CONTENT_CHANGED);
/**
* Returns a property change listener listening for possible updates to aggregate score updates for files.
* @param onRefresh Action on refresh.
* @param onRemove Action to remove listener (i.e. case close).
* @return The property change listener.
*/
private static PropertyChangeListener getPcl(final Runnable onRefresh, final Runnable onRemove) {
return (PropertyChangeEvent evt) -> {
String eventType = evt.getPropertyName();
if (eventType.equals(IngestManager.IngestModuleEvent.CONTENT_CHANGED.toString())) {
// only refresh if there is a current case.
try {
Case.getCurrentCaseThrows();
if (onRefresh != null) {
onRefresh.run();
}
} catch (NoCurrentCaseException notUsed) {
/**
* Case is closed, do nothing.
*/
}
} else if (eventType.equals(Case.Events.CURRENT_CASE.toString())) {
// case was closed. Remove listeners so that we don't get called with a stale case handle
if (evt.getNewValue() == null && onRemove != null) {
onRemove.run();
}
} else if (CASE_EVENTS_OF_INTEREST.contains(eventType)) {
// only refresh if there is a current case.
try {
Case.getCurrentCaseThrows();
if (onRefresh != null) {
onRefresh.run();
}
} catch (NoCurrentCaseException notUsed) {
/**
* Case is closed, do nothing.
*/
}
}
};
}
/**
* The sql where statement for the files.
* @param filter The filter type.
* @param filteringDSObjId The data source object id to filter on if > 0.
* @return The sql where statement.
* @throws IllegalArgumentException
*/
static private String getFileFilter(ScoreContent.ScoreContentFilter filter, long filteringDSObjId) throws IllegalArgumentException {
String aggregateScoreFilter = "";
switch (filter) {
case SUS_ITEM_FILTER:
aggregateScoreFilter = " tsk_aggregate_score.significance = " + Significance.LIKELY_NOTABLE.getId() + " AND (tsk_aggregate_score.priority = " + Priority.NORMAL.getId() + " OR tsk_aggregate_score.priority = " + Priority.OVERRIDE.getId() + " )";
break;
case BAD_ITEM_FILTER:
aggregateScoreFilter = " tsk_aggregate_score.significance = " + Significance.NOTABLE.getId() + " AND (tsk_aggregate_score.priority = " + Priority.NORMAL.getId() + " OR tsk_aggregate_score.priority = " + Priority.OVERRIDE.getId() + " )";
break;
default:
throw new IllegalArgumentException(MessageFormat.format("Unsupported filter type to get suspect content: {0}", filter));
}
String query = " obj_id IN (SELECT tsk_aggregate_score.obj_id FROM tsk_aggregate_score WHERE " + aggregateScoreFilter + ") ";
if (filteringDSObjId > 0) {
query += " AND data_source_obj_id = " + filteringDSObjId;
}
return query;
}
/**
* Checks for analysis results added to the case that could affect the
* aggregate score of the file.
*
* @param evt The event.
* @return True if has an analysis result.
*/
private static boolean isRefreshRequired(PropertyChangeEvent evt) {
String eventType = evt.getPropertyName();
if (eventType.equals(IngestManager.IngestModuleEvent.DATA_ADDED.toString())) {
// check if current case is active before updating
try {
Case.getCurrentCaseThrows();
final ModuleDataEvent event = (ModuleDataEvent) evt.getOldValue();
if (null != event && Category.ANALYSIS_RESULT.equals(event.getBlackboardArtifactType().getCategory())) {
return true;
}
} catch (NoCurrentCaseException notUsed) {
/**
* Case is closed, do nothing.
*/
}
}
return false;
}
/**
* Parent node in views section for content with score.
*/
public static class ScoreContentsNode extends DisplayableItemNode {
@NbBundle.Messages("ScoreContent_ScoreContentNode_name=Score")
private static final String NAME = Bundle.ScoreContent_ScoreContentNode_name();
ScoreContentsNode(SleuthkitCase skCase, long datasourceObjId) {
super(Children.create(new ScoreContentsChildren(skCase, datasourceObjId), true), Lookups.singleton(NAME));
super.setName(NAME);
super.setDisplayName(NAME);
this.setIconBaseWithExtension("org/sleuthkit/autopsy/images/red-circle-exclamation.png"); //NON-NLS
}
@Override
public boolean isLeafTypeNode() {
return false;
}
@Override
public <T> T accept(DisplayableItemNodeVisitor<T> visitor) {
return visitor.visit(this);
}
@Override
@NbBundle.Messages({
"ScoreContent_createSheet_name_displayName=Name",
"ScoreContent_createSheet_name_desc=no description"})
protected Sheet createSheet() {
Sheet sheet = super.createSheet();
Sheet.Set sheetSet = sheet.get(Sheet.PROPERTIES);
if (sheetSet == null) {
sheetSet = Sheet.createPropertiesSet();
sheet.put(sheetSet);
}
sheetSet.put(new NodeProperty<>("Name", //NON-NLS
Bundle.ScoreContent_createSheet_name_displayName(),
Bundle.ScoreContent_createSheet_name_desc(),
NAME));
return sheet;
}
@Override
public String getItemType() {
return getClass().getName();
}
}
/**
* Children that display a node for Bad Items and Score Items.
*/
public static class ScoreContentsChildren extends ChildFactory.Detachable<ScoreContent.ScoreContentFilter> implements RefreshThrottler.Refresher {
private SleuthkitCase skCase;
private final long datasourceObjId;
private final RefreshThrottler refreshThrottler = new RefreshThrottler(this);
private final PropertyChangeListener pcl = getPcl(
() -> ScoreContentsChildren.this.refresh(false),
() -> ScoreContentsChildren.this.removeNotify());
private final PropertyChangeListener weakPcl = WeakListeners.propertyChange(pcl, null);
private final Map<ScoreContentFilter, ScoreContentsChildren.ScoreContentNode> typeNodeMap = new HashMap<>();
public ScoreContentsChildren(SleuthkitCase skCase, long dsObjId) {
this.skCase = skCase;
this.datasourceObjId = dsObjId;
}
@Override
protected void addNotify() {
super.addNotify();
refreshThrottler.registerForIngestModuleEvents();
IngestManager.getInstance().addIngestJobEventListener(INGEST_JOB_EVENTS_OF_INTEREST, weakPcl);
IngestManager.getInstance().addIngestModuleEventListener(INGEST_MODULE_EVENTS_OF_INTEREST, weakPcl);
Case.addEventTypeSubscriber(CASE_EVENTS_OF_INTEREST, weakPcl);
}
@Override
protected void removeNotify() {
refreshThrottler.unregisterEventListener();
IngestManager.getInstance().removeIngestJobEventListener(INGEST_JOB_EVENTS_OF_INTEREST, weakPcl);
IngestManager.getInstance().removeIngestModuleEventListener(INGEST_MODULE_EVENTS_OF_INTEREST, weakPcl);
Case.removeEventTypeSubscriber(CASE_EVENTS_OF_INTEREST, weakPcl);
typeNodeMap.clear();
}
@Override
public void refresh() {
refresh(false);
}
@Override
public boolean isRefreshRequired(PropertyChangeEvent evt) {
return ScoreContent.isRefreshRequired(evt);
}
@Override
protected boolean createKeys(List<ScoreContent.ScoreContentFilter> list) {
list.addAll(Arrays.asList(ScoreContent.ScoreContentFilter.values()));
typeNodeMap.values().forEach(nd -> nd.updateDisplayName());
return true;
}
@Override
protected Node createNodeForKey(ScoreContent.ScoreContentFilter key) {
ScoreContentsChildren.ScoreContentNode nd = new ScoreContentsChildren.ScoreContentNode(skCase, key, datasourceObjId);
typeNodeMap.put(key, nd);
return nd;
}
/**
* Parent node showing files matching a score filter.
*/
public class ScoreContentNode extends DisplayableItemNode {
private static final Logger logger = Logger.getLogger(ScoreContentNode.class.getName());
private final ScoreContent.ScoreContentFilter filter;
private final long datasourceObjId;
ScoreContentNode(SleuthkitCase skCase, ScoreContent.ScoreContentFilter filter, long dsObjId) {
super(Children.create(new ScoreContentChildren(filter, skCase, dsObjId), true), Lookups.singleton(filter.getDisplayName()));
this.filter = filter;
this.datasourceObjId = dsObjId;
init();
}
private void init() {
super.setName(filter.getName());
String tooltip = filter.getDisplayName();
this.setShortDescription(tooltip);
switch (this.filter) {
case SUS_ITEM_FILTER:
this.setIconBaseWithExtension("org/sleuthkit/autopsy/images/yellow-circle-yield.png"); //NON-NLS
break;
default:
case BAD_ITEM_FILTER:
this.setIconBaseWithExtension("org/sleuthkit/autopsy/images/red-circle-exclamation.png"); //NON-NLS
break;
}
updateDisplayName();
}
void updateDisplayName() {
//get count of children without preloading all child nodes
long count = 0;
try {
count = calculateItems(skCase, filter, datasourceObjId);
} catch (TskCoreException ex) {
logger.log(Level.WARNING, "An error occurred while fetching file counts", ex);
}
super.setDisplayName(filter.getDisplayName() + " (" + count + ")");
}
/**
* Get children count without actually loading all nodes
*
* @param sleuthkitCase
* @param filter
*
* @return
*/
private static long calculateItems(SleuthkitCase sleuthkitCase, ScoreContent.ScoreContentFilter filter, long datasourceObjId) throws TskCoreException {
return sleuthkitCase.countFilesWhere(getFileFilter(filter, datasourceObjId));
}
@Override
public <T> T accept(DisplayableItemNodeVisitor<T> visitor) {
return visitor.visit(this);
}
@Override
@NbBundle.Messages({
"ScoreContent_createSheet_filterType_displayName=Type",
"ScoreContent_createSheet_filterType_desc=no description"})
protected Sheet createSheet() {
Sheet sheet = super.createSheet();
Sheet.Set sheetSet = sheet.get(Sheet.PROPERTIES);
if (sheetSet == null) {
sheetSet = Sheet.createPropertiesSet();
sheet.put(sheetSet);
}
sheetSet.put(new NodeProperty<>("Type", //NON_NLS
Bundle.ScoreContent_createSheet_filterType_displayName(),
Bundle.ScoreContent_createSheet_filterType_desc(),
filter.getDisplayName()));
return sheet;
}
@Override
public boolean isLeafTypeNode() {
return true;
}
@Override
public String getItemType() {
return DisplayableItemNode.FILE_PARENT_NODE_KEY;
}
}
/**
* Children showing files for a score filter.
*/
static class ScoreContentChildren extends BaseChildFactory<AbstractFile> implements RefreshThrottler.Refresher {
private final RefreshThrottler refreshThrottler = new RefreshThrottler(this);
private final PropertyChangeListener pcl = getPcl(
() -> ScoreContentChildren.this.refresh(false),
() -> ScoreContentChildren.this.removeNotify());
private final PropertyChangeListener weakPcl = WeakListeners.propertyChange(pcl, null);
private final SleuthkitCase skCase;
private final ScoreContent.ScoreContentFilter filter;
private static final Logger logger = Logger.getLogger(ScoreContentChildren.class.getName());
private final long datasourceObjId;
ScoreContentChildren(ScoreContent.ScoreContentFilter filter, SleuthkitCase skCase, long datasourceObjId) {
super(filter.getName(), new ViewsKnownAndSlackFilter<>());
this.skCase = skCase;
this.filter = filter;
this.datasourceObjId = datasourceObjId;
}
@Override
protected void onAdd() {
refreshThrottler.registerForIngestModuleEvents();
IngestManager.getInstance().addIngestJobEventListener(INGEST_JOB_EVENTS_OF_INTEREST, weakPcl);
IngestManager.getInstance().addIngestModuleEventListener(INGEST_MODULE_EVENTS_OF_INTEREST, weakPcl);
Case.addEventTypeSubscriber(CASE_EVENTS_OF_INTEREST, weakPcl);
}
@Override
protected void onRemove() {
refreshThrottler.unregisterEventListener();
IngestManager.getInstance().removeIngestJobEventListener(INGEST_JOB_EVENTS_OF_INTEREST, weakPcl);
IngestManager.getInstance().removeIngestModuleEventListener(INGEST_MODULE_EVENTS_OF_INTEREST, weakPcl);
Case.removeEventTypeSubscriber(CASE_EVENTS_OF_INTEREST, weakPcl);
}
@Override
public void refresh() {
refresh(false);
}
@Override
public boolean isRefreshRequired(PropertyChangeEvent evt) {
return ScoreContent.isRefreshRequired(evt);
}
private List<AbstractFile> runFsQuery() {
List<AbstractFile> ret = new ArrayList<>();
String query = null;
try {
query = getFileFilter(filter, datasourceObjId);
ret = skCase.findAllFilesWhere(query);
} catch (TskCoreException | IllegalArgumentException e) {
logger.log(Level.SEVERE, "Error getting files for the deleted content view using: " + StringUtils.defaultString(query, "<null>"), e); //NON-NLS
}
return ret;
}
@Override
protected List<AbstractFile> makeKeys() {
return runFsQuery();
}
@Override
protected Node createNodeForKey(AbstractFile key) {
return key.accept(new ContentVisitor.Default<AbstractNode>() {
public FileNode visit(AbstractFile f) {
return new FileNode(f, false);
}
public FileNode visit(FsContent f) {
return new FileNode(f, false);
}
@Override
public FileNode visit(LayoutFile f) {
return new FileNode(f, false);
}
@Override
public FileNode visit(File f) {
return new FileNode(f, false);
}
@Override
public FileNode visit(Directory f) {
return new FileNode(f, false);
}
@Override
public FileNode visit(VirtualDirectory f) {
return new FileNode(f, false);
}
@Override
public AbstractNode visit(SlackFile sf) {
return new FileNode(sf, false);
}
@Override
public AbstractNode visit(LocalFile lf) {
return new FileNode(lf, false);
}
@Override
public AbstractNode visit(DerivedFile df) {
return new FileNode(df, false);
}
@Override
protected AbstractNode defaultVisit(Content di) {
if (di instanceof AbstractFile) {
return visit((AbstractFile) di);
} else {
throw new UnsupportedOperationException("Not supported for this type of Displayable Item: " + di.toString());
}
}
});
}
}
}
}

View File

@ -46,7 +46,8 @@ public class ViewsNode extends DisplayableItemNode {
// add it back in if we can filter the results to a more managable size.
// new RecentFiles(sleuthkitCase),
new DeletedContent(sleuthkitCase, dsObjId),
new FileSize(sleuthkitCase, dsObjId))
new FileSize(sleuthkitCase, dsObjId),
new ScoreContent(sleuthkitCase, dsObjId))
),
Lookups.singleton(NAME)
);

View File

@ -18,20 +18,24 @@
*/
package org.sleuthkit.autopsy.directorytree;
import com.fasterxml.jackson.databind.ObjectWriter;
import com.fasterxml.jackson.databind.SequenceWriter;
import com.fasterxml.jackson.dataformat.csv.CsvMapper;
import com.fasterxml.jackson.dataformat.csv.CsvSchema;
import java.awt.Component;
import java.awt.event.ActionEvent;
import java.io.File;
import java.io.BufferedWriter;
import java.io.FileOutputStream;
import java.io.OutputStreamWriter;
import java.lang.reflect.InvocationTargetException;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Calendar;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ExecutionException;
import java.util.logging.Level;
import javax.swing.AbstractAction;
@ -57,7 +61,8 @@ import org.sleuthkit.autopsy.guiutils.JFileChooserFactory;
* Exports CSV version of result nodes to a location selected by the user.
*/
public final class ExportCSVAction extends AbstractAction {
// number of rows to sample for different columns
private static final int COLUMN_SAMPLING_ROW_NUM = 100;
private static final Logger logger = Logger.getLogger(ExportCSVAction.class.getName());
private final static String DEFAULT_FILENAME = "Results";
private final static List<String> columnsToSkip = Arrays.asList(AbstractFilePropertyType.SCORE.toString(),
@ -276,43 +281,64 @@ public final class ExportCSVAction extends AbstractAction {
progress.start();
progress.switchToIndeterminate();
try (BufferedWriter br = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(outputFile), StandardCharsets.UTF_8))) {
// Write BOM
br.write('\ufeff');
if (this.isCancelled()) {
return null;
}
Set<String> columnHeaderStrs = new HashSet<>();
List<CsvSchema.Column> columnHeaders = new ArrayList<>();
int remainingRowsToSample = 0;
int columnIdx = 0;
for (Node nd: nodesToExport) {
// sample up to 100 rows
if (remainingRowsToSample >= COLUMN_SAMPLING_ROW_NUM) {
break;
}
remainingRowsToSample++;
// Write the header
List<String> headers = new ArrayList<>();
PropertySet[] sets = nodesToExport.iterator().next().getPropertySets();
for(PropertySet set : sets) {
for (Property<?> prop : set.getProperties()) {
if ( ! columnsToSkip.contains(prop.getDisplayName())) {
headers.add(prop.getDisplayName());
for (PropertySet ps: nd.getPropertySets()) {
for (Property prop: ps.getProperties()) {
if (!columnHeaderStrs.contains(prop.getDisplayName()) && !columnsToSkip.contains(prop.getName())) {
columnHeaderStrs.add(prop.getDisplayName());
columnHeaders.add(new CsvSchema.Column(columnIdx, prop.getDisplayName()));
columnIdx++;
}
}
}
br.write(listToCSV(headers));
}
if (this.isCancelled()) {
return null;
}
CsvSchema schema = CsvSchema.builder()
.addColumns(columnHeaders)
.setUseHeader(true)
.setNullValue("")
.build();
CsvMapper mapper = new CsvMapper();
ObjectWriter writer = mapper.writerFor(Map.class).with(schema);
try (SequenceWriter seqWriter = writer.writeValues(outputFile)) {
// Write each line
Iterator<?> nodeIterator = nodesToExport.iterator();
while (nodeIterator.hasNext()) {
if (this.isCancelled()) {
break;
return null;
}
Map<String, Object> rowMap = new HashMap<>();
Node node = (Node)nodeIterator.next();
List<String> values = new ArrayList<>();
sets = node.getPropertySets();
for(PropertySet set : sets) {
for(PropertySet set : node.getPropertySets()) {
for (Property<?> prop : set.getProperties()) {
if ( ! columnsToSkip.contains(prop.getDisplayName())) {
values.add(escapeQuotes(prop.getValue().toString()));
if (!columnsToSkip.contains(prop.getName())) {
rowMap.put(prop.getDisplayName(), prop.getValue());
}
}
}
br.write(listToCSV(values));
seqWriter.write(rowMap);
}
}
return null;
}

View File

@ -167,6 +167,9 @@ class RecentDocumentsByLnk extends Extract {
String fileName = FilenameUtils.getName(normalizePathName);
String filePath = FilenameUtils.getPath(normalizePathName);
List<AbstractFile> sourceFiles;
if (filePath == null) {
return null;
}
try {
sourceFiles = currentCase.getSleuthkitCase().getFileManager().findFilesExactNameExactPath(dataSource, fileName, filePath);
for (AbstractFile sourceFile : sourceFiles) {

View File

@ -10,6 +10,20 @@ The ad hoc keyword search features allows you to run single keyword terms or lis
The \ref keyword_search_page must be selected during ingest before doing an ad hoc keyword search. If you don't want to search for any of the existing keyword lists, you can deselect everything to just index the files for later searching.
\subsection adhoc_limitations Limitations of Ad Hoc Keyword Search
With the release of Autopsy 4.21.0, two types of keyword searching are supported: Solr search with full text indexing and the built-in Autopsy "In-Line" Keyword Search.
Enabling full text indexing with Solr during the ingest process allows for comprehensive ad-hoc manual text searching, encompassing all of the extracted text from files and artifacts.
On the other hand, the In-Line Keyword Search conducts the search during ingest, specifically at the time of text extraction. It only indexes small sections of the files that contain keyword matches (for display purposes). Consequently, unless full text indexing with Solr is enabled, the ad-hoc search will be restricted to these limited sections of the files that had keyword hits. This limitation significantly reduces the amount of searchable text available for ad-hoc searches.
Other situations which will result in not being able to search all of the text extracted from all of the files and artifacts include:
<ul>
<li>If file filtering was used during ingest, resulting in only a subset of files getting ingested. See \ref file_filters for information on file filtering.
<li>If Autopsy case contains multiple data sources and one or more of those data sources was not indexed during it's ingest.
</ul>
\section ad_hoc_kw_types_section Creating Keywords
The following sections will give a description of each keyword type, then will show some sample text and how various search terms would work against it.
@ -36,7 +50,7 @@ Substring match should be used where the search term is just part of a word, or
- "UMP", "oX" will match
- "y dog", "ish-brown" will not match
## Regex match
\subsection regex_match Regex match
Regex match can be used to search for a specific pattern. Regular expressions are supported using Lucene Regex Syntax which is documented here: https://www.elastic.co/guide/en/elasticsearch/reference/1.6/query-dsl-regexp-query.html#regexp-syntax. Wildcards are automatically added to the beginning and end of the regular expressions to ensure all matches are found. Additionally, the resulting hits are split on common token separator boundaries (e.g. space, newline, colon, exclamation point etc.) to make the resulting keyword hit more amenable to highlighting. As of Autopsy 4.9, regex searches are no longer case sensitive. This includes literal characters and character classes.
@ -70,9 +84,12 @@ If you want to override this default behavior:
### Non-Latin text
In general all three types of keyword searches will work as expected but the feature has not been thoroughly tested with all character sets. For example, the searches may no longer be case-insensitive. As with regex above, we suggest testing on a sample file.
### Differences between "In-Line" and Solr regular expression search
It's important to be aware that there might be occasional differences in the results of regular expression searches between the "In-Line" Keyword Search and Solr search. This is because the "In-Line" Keyword Search uses Java regular expressions, while Solr search employs Lucene regular expressions.
\section ad_hoc_kw_search Keyword Search
Individual keyword or regular expressions can quickly be searched using the search text box widget. You can select "Exact Match", "Substring Match" and "Regular Expression" match. See the earlier \ref ad_hoc_kw_types_section section for information on each keyword type. The search can be restricted to only certain data sources by selecting the checkbox near the bottom and then highlighting the data sources to search within. Multiple data sources can be selected used shift+left click or control+left click. The "Save search results" checkbox determines whether the search results will be saved to the case database.
Individual keyword or regular expressions can quickly be searched using the search text box widget. You can select "Exact Match", "Substring Match" and "Regular Expression" match. See the earlier \ref ad_hoc_kw_types_section section for information on each keyword type, as well as \ref adhoc_limitations. The search can be restricted to only certain data sources by selecting the checkbox near the bottom and then highlighting the data sources to search within. Multiple data sources can be selected used shift+left click or control+left click. The "Save search results" checkbox determines whether the search results will be saved to the case database.
\image html keyword-search-bar.PNG
@ -92,14 +109,5 @@ If the "Save search results" checkbox was enabled, the results of the keyword li
\image html keyword-search-list-results.PNG
\section ad_hoc_during_ingest Doing ad hoc searches during ingest
Ad hoc searches are intended to be used after ingest completes, but can be used in a limited capacity while ingest is ongoing.
Manual \ref ad_hoc_kw_search for individual keywords or regular expressions can be executed while ingest is ongoing, using the current index. Note however, that you may miss some results if the entire index has not yet been populated. Autopsy enables you to perform the search on an incomplete index in order to retrieve some preliminary results in real-time.
During the ingest, the normal manual search using \ref ad_hoc_kw_lists behaves differently than after ingest is complete. A selected list can instead be added to the ingest process and it will be searched in the background instead.
Most keyword management features are disabled during ingest. You can not edit keyword lists but can create new lists (but not add to them) and copy and export existing lists.
*/

Binary file not shown.

Before

Width:  |  Height:  |  Size: 52 KiB

After

Width:  |  Height:  |  Size: 28 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 118 KiB

After

Width:  |  Height:  |  Size: 91 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 59 KiB

After

Width:  |  Height:  |  Size: 59 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 73 KiB

After

Width:  |  Height:  |  Size: 500 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 16 KiB

After

Width:  |  Height:  |  Size: 23 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 101 KiB

After

Width:  |  Height:  |  Size: 113 KiB

View File

@ -5,22 +5,30 @@
\section keyword_module_overview What Does It Do
The Keyword Search module facilitates both the \ref ingest_page "ingest" portion of searching and also supports manual text searching after ingest has completed (see \ref ad_hoc_keyword_search_page). It extracts text from files being ingested, selected reports generated by other modules, and results generated by other modules. This extracted text is then added to a Solr index that can then be searched.
The Keyword Search module facilitates both the \ref ingest_page "ingest" portion of searching and also supports manual text searching after ingest has completed (see \ref ad_hoc_keyword_search_page). It extracts text from files being ingested, selected reports generated by other modules, and results generated by other modules.
Autopsy tries its best to extract the maximum amount of text from the files being indexed. First, the indexing will try to extract text from supported file formats, such as pure text file format, MS Office Documents, PDF files, Email, and many others. If the file is not supported by the standard text extractor, Autopsy will fall back to a string extraction algorithm. String extraction on unknown file formats or arbitrary binary files can often extract a sizeable amount of text from a file, often enough to provide additional clues to reviewers. String extraction will not extract text strings from encrypted files.
Autopsy tries its best to extract the maximum amount of text from the files being indexed. First, it will try to extract text from supported file formats, such as pure text file format, MS Office Documents, PDF files, Email, and many others. If the file is not supported by the standard text extractor, Autopsy will fall back to a string extraction algorithm. String extraction on unknown file formats or arbitrary binary files can often extract a sizeable amount of text from a file, often enough to provide additional clues to reviewers. String extraction will not extract text strings from encrypted files.
Autopsy ships with some built-in lists that define regular expressions and enable the user to search for Phone Numbers, IP addresses, URLs and E-mail addresses. However, enabling some of these very general lists can produce a very large number of hits, and many of them can be false-positives. Regular expressions can potentially take a long time to complete.
Once files are placed in the Solr index, they can be searched quickly for specific keywords, regular expressions, or keyword search lists that can contain a mixture of keywords and regular expressions. Search queries can be executed automatically during the ingest run or at the end of the ingest, depending on the current settings and the time it takes to ingest the image.
Refer to \ref ad_hoc_keyword_search_page for more details on specifying regular expressions and other types of searches.
With the release of Autopsy 4.21.0, two types of keyword searching are supported: Solr search with full text indexing and the built-in Autopsy "In-Line" Keyword Search. For detailed information on configuring the search type, refer to \ref keyword_ingest_settings.
\subsection keyword_SolrSearch Solr Search With Indexing
Full text indexing with Solr provides users with the flexibility to perform ad-hoc manual text searches after the ingest process is complete (see \ref ad_hoc_keyword_search_page). However, it's important to note that full text indexing can significantly slow down the ingest speed for large data sources and cases. Once files are indexed in the Solr index, they can be quickly searched for specific keywords, regular expressions, or a combination of both in keyword search lists.
\subsection keyword_InlineSearch In-Line Keyword Search
On the other hand, the In-Line Keyword Search conducts keyword searches during the ingest process at the time of text extraction. It only indexes small sections of the files that contain keyword matches for display purposes. Our profiling runs indicate that, in most cases, this approach reduces the data source ingest time by half. This means that using the In-Line Keyword Search, a data source can be ingested in approximately half the time it takes to ingest and search the same data source using Solr indexing. However, a drawback of this method is that all search terms must be specified before the ingest begins, and there is no option to perform ad-hoc searches on the entire extracted text after the ingest process is complete.
\section keyword_search_configuration_dialog Keyword Search Configuration Dialog
The keyword search configuration dialog has three tabs, each with its own purpose:
\li The \ref keyword_keywordListsTab is used to add, remove, and modify keyword search lists.
\li The \ref keyword_stringExtractionTab is used to enable language scripts and extraction type.
\li The \ref keyword_generalSettingsTab is used to configure the ingest timings and display information.
\li The \ref keyword_generalSettingsTab is used to configure display information.
\subsection keyword_keywordListsTab Lists tab
@ -57,23 +65,20 @@ The user can also use the String Viewer first and try different script/language
\subsubsection keyword_nsrl NIST NSRL Support
The hash lookup ingest service can be configured to use the NIST NSRL hash set of known files. The keyword search advanced configuration dialog "General" tab contains an option to skip keyword indexing and search on files that have previously marked as "known" and uninteresting files. Selecting this option can greatly reduce size of the index and improve ingest performance. In most cases, user does not need to keyword search for "known" files.
\subsubsection keyword_update_freq Result update frequency during ingest
To control how frequently searches are executed during ingest, the user can adjust the timing setting available in the keyword search advanced configuration dialog "General" tab. Setting the number of minutes lower will result in more frequent index updates and searches being executed and the user will be able to see results more in real-time. However, more frequent updates can affect the overall performance, especially on lower-end systems, and can potentially lengthen the overall time needed for the ingest to complete.
One can also choose to have no periodic searches. This will speed up the ingest. Users choosing this option can run their keyword searches once the entire keyword search index is complete.
\section keyword_usage Using the Module
Search queries can be executed manually by the user at any time, as long as there are some files already indexed and ready to be searched. Searching before indexing is complete will naturally only search indexes that are already compiled.
See \ref ingest_page "Ingest" for more information on ingest in general.
Once there are files in the index, \ref ad_hoc_keyword_search_page will be available for use to manually search at any time.
After the ingest has completed, \ref ad_hoc_keyword_search_page will be available for manual search. The amount of files/text available for Ad Hoc Search depends on the Keyword Search module settings at the time of the ingest. See section \ref adhoc_limitations for details.
\subsection keyword_ingest_settings Ingest Settings
The Ingest Settings for the Keyword Search module allow the user to enable or disable the specific built-in search expressions, Phone Numbers, IP Addresses, Email Addresses, and URLs. Using the Advanced button (covered below), one can add custom keyword groups.
With the release of Autopsy 4.21.0, two types of keyword searching are supported: Solr search with full text indexing and the built-in Autopsy "In-Line" Keyword Search. See \ref keyword_ingest_settings on details regarding search type configuraiton. See sections \ref keyword_SolrSearch and \ref keyword_InlineSearch for details of each search type.
To select the keyword search type, you can use the "Add text to Solr Index" checkbox. When this checkbox is unchecked, Autopsy will perform the "In-Line" Keyword Search during ingest. However, most of the extracted text will not be indexed by Solr, effectively disabling the functionality described in \ref ad_hoc_keyword_search_page. On the other hand, if the checkbox is selected, Autopsy will perform the "In-Line" Keyword Search during ingest and also add all of the extracted text to the Solr index. This allows you to search the indexed text later using \ref ad_hoc_keyword_search_page.
Additionally, it's important to be aware that there might be occasional differences in the results of regular expression searches between the "In-Line" Keyword Search and Solr search. This is because the "In-Line" Keyword Search uses Java regular expressions, while Solr search employs Lucene regular expressions. You can find more details about this in \ref regex_match.
\image html keyword-search-ingest-settings.PNG
\subsubsection keyword_ocr Optical Character Recognition

View File

@ -300,7 +300,7 @@ class PstParser implements AutoCloseable{
email.setRecipients(toAddress);
email.setCc(ccAddress);
email.setBcc(bccAddress);
email.setSender(getSender(msg.getSenderName(), msg.getSentRepresentingSMTPAddress()));
email.setSender(getSender(msg.getSenderName(), (msg.getSentRepresentingSMTPAddress().isEmpty()) ? msg.getSenderEmailAddress() : msg.getSentRepresentingSMTPAddress()));
email.setSentDate(msg.getMessageDeliveryTime());
email.setTextBody(msg.getBody());
if (false == msg.getTransportMessageHeaders().isEmpty()) {
@ -318,7 +318,7 @@ class PstParser implements AutoCloseable{
email.setSubject(msg.getSubject());
email.setId(msg.getDescriptorNodeId());
email.setMessageID(msg.getInternetMessageId());
String inReplyToID = msg.getInReplyToId();
email.setInReplyToID(inReplyToID);
@ -479,7 +479,7 @@ class PstParser implements AutoCloseable{
} else if (addr.isEmpty()) {
return name;
} else {
return name + ": " + addr;
return name + " <" + addr + ">";
}
}

View File

@ -769,6 +769,14 @@ public final class ThunderbirdMboxFileIngestModule implements FileIngestModule {
addArtifactAttribute(((id < 0L) ? NbBundle.getMessage(this.getClass(), "ThunderbirdMboxFileIngestModule.notAvail") : String.valueOf(id)),
ATTRIBUTE_TYPE.TSK_MSG_ID, bbattributes);
try {
addArtifactAttribute((email.hasAttachment() ? "Yes" : ""),
blackboard.getOrAddAttributeType("EMAIL_HAS_ATTACHMENT", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Has Attachments"),
bbattributes);
} catch (Blackboard.BlackboardException ex) {
logger.log(Level.SEVERE, "Unable to create EMAIL_HAS_ATTACHMENT attribute" , ex); //NON-NLS
}
addArtifactAttribute(((localPath.isEmpty() == false) ? localPath : ""),
ATTRIBUTE_TYPE.TSK_PATH, bbattributes);