mirror of
https://github.com/overcuriousity/autopsy-flatpak.git
synced 2025-07-12 07:56:16 +00:00
Merge branch 'develop' of https://github.com/sleuthkit/autopsy into 4568-DataSourceSummary
This commit is contained in:
commit
a26d0dbf70
@ -240,4 +240,4 @@ ImageFilePanel.sha1HashTextField.text=
|
||||
ImageFilePanel.md5HashTextField.text=
|
||||
ImageFilePanel.errorLabel.text=Error Label
|
||||
ImageFilePanel.hashValuesNoteLabel.text=NOTE: These values will not be validated when the data source is added.
|
||||
ImageFilePanel.hashValuesLabel.text=Hash Values (optional):
|
||||
ImageFilePanel.hashValuesLabel.text=Hash Values (optional):
|
@ -200,4 +200,4 @@ LogicalEvidenceFilePanel.errorLabel.text=\u30a8\u30e9\u30fc\u30e9\u30d9\u30eb
|
||||
LogicalEvidenceFilePanel.logicalEvidenceFileChooser.dialogTitle=\u30ed\u30fc\u30ab\u30eb\u30d5\u30a1\u30a4\u30eb\u307e\u305f\u306f\u30d5\u30a9\u30eb\u30c0\u3092\u9078\u629e
|
||||
LogicalEvidenceFilePanel.logicalEvidenceFileChooser.approveButtonText=\u9078\u629e
|
||||
LocalDiskSelectionDialog.errorLabel.text=\u30a8\u30e9\u30fc\u30e9\u30d9\u30eb
|
||||
LocalDiskSelectionDialog.selectLocalDiskLabel.text=\u30ed\u30fc\u30ab\u30eb\u30c7\u30a3\u30b9\u30af\u3092\u9078\u629e\uff1a
|
||||
LocalDiskSelectionDialog.selectLocalDiskLabel.text=\u30ed\u30fc\u30ab\u30eb\u30c7\u30a3\u30b9\u30af\u3092\u9078\u629e\uff1a
|
@ -676,4 +676,4 @@
|
||||
</SubComponents>
|
||||
</Container>
|
||||
</SubComponents>
|
||||
</Form>
|
||||
</Form>
|
@ -85,9 +85,6 @@ final class CasePropertiesPanel extends javax.swing.JPanel {
|
||||
EamDb dbManager = EamDb.getInstance();
|
||||
if (dbManager != null) {
|
||||
CorrelationCase correlationCase = dbManager.getCase(theCase);
|
||||
if (null == correlationCase) {
|
||||
correlationCase = dbManager.newCase(theCase);
|
||||
}
|
||||
currentOrg = correlationCase.getOrg();
|
||||
}
|
||||
} catch (EamDbException ex) {
|
||||
@ -540,4 +537,4 @@ final class CasePropertiesPanel extends javax.swing.JPanel {
|
||||
private javax.swing.JTextArea taNotesText;
|
||||
// End of variables declaration//GEN-END:variables
|
||||
|
||||
}
|
||||
}
|
@ -20,6 +20,8 @@ package org.sleuthkit.autopsy.casemodule;
|
||||
|
||||
import java.awt.Window;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.CancellationException;
|
||||
import java.util.concurrent.ExecutionException;
|
||||
@ -423,6 +425,22 @@ final class LocalDiskSelectionDialog extends JDialog {
|
||||
if (disks.size() > 0) {
|
||||
localDiskTable.setEnabled(true);
|
||||
localDiskTable.clearSelection();
|
||||
|
||||
// Remove the partition the application is running on.
|
||||
String userConfigPath = PlatformUtil.getUserConfigDirectory();
|
||||
for (Iterator<LocalDisk> iterator = disks.iterator(); iterator.hasNext();) {
|
||||
LocalDisk disk = iterator.next();
|
||||
String diskPath = disk.getPath();
|
||||
if (diskPath.startsWith("\\\\.\\")) {
|
||||
// Strip out UNC prefix to get the drive letter.
|
||||
diskPath = diskPath.substring(4);
|
||||
}
|
||||
if (userConfigPath.startsWith(diskPath)) {
|
||||
iterator.remove();
|
||||
}
|
||||
}
|
||||
|
||||
Collections.sort(disks, (LocalDisk disk1, LocalDisk disk2) -> disk1.getName().compareToIgnoreCase(disk2.getName()));
|
||||
}
|
||||
fireUpdateEvent();
|
||||
ready = true;
|
||||
|
@ -92,9 +92,6 @@ final class NewCaseWizardAction extends CallableSystemAction {
|
||||
EamDb dbManager = EamDb.getInstance();
|
||||
if (dbManager != null) {
|
||||
CorrelationCase cRCase = dbManager.getCase(Case.getCurrentCaseThrows());
|
||||
if (cRCase == null) {
|
||||
cRCase = dbManager.newCase(Case.getCurrentCaseThrows());
|
||||
}
|
||||
if (!organizationName.isEmpty()) {
|
||||
for (EamOrganization org : dbManager.getOrganizations()) {
|
||||
if (org.getName().equals(organizationName)) {
|
||||
|
@ -521,4 +521,4 @@
|
||||
</SubComponents>
|
||||
</Container>
|
||||
</SubComponents>
|
||||
</Form>
|
||||
</Form>
|
@ -634,4 +634,4 @@ final class OptionalCasePropertiesPanel extends javax.swing.JPanel {
|
||||
private javax.swing.JTextField tfExaminerEmailText;
|
||||
private javax.swing.JTextField tfExaminerPhoneText;
|
||||
// End of variables declaration//GEN-END:variables
|
||||
}
|
||||
}
|
@ -1675,9 +1675,6 @@ abstract class AbstractSqlEamDb implements EamDb {
|
||||
// in the database, but we don't expect the user to be tagging large numbers
|
||||
// of items (that didn't have the CE ingest module run on them) at once.
|
||||
CorrelationCase correlationCaseWithId = getCaseByUUID(eamArtifact.getCorrelationCase().getCaseUUID());
|
||||
if (null == correlationCaseWithId) {
|
||||
correlationCaseWithId = newCase(eamArtifact.getCorrelationCase());
|
||||
}
|
||||
if (null == getDataSource(correlationCaseWithId, eamArtifact.getCorrelationDataSource().getDataSourceObjectID())) {
|
||||
newDataSource(eamArtifact.getCorrelationDataSource());
|
||||
}
|
||||
|
@ -200,9 +200,6 @@ public class EamArtifactUtil {
|
||||
|
||||
// make an instance for the BB source file
|
||||
CorrelationCase correlationCase = EamDb.getInstance().getCase(Case.getCurrentCaseThrows());
|
||||
if (null == correlationCase) {
|
||||
correlationCase = EamDb.getInstance().newCase(Case.getCurrentCaseThrows());
|
||||
}
|
||||
return new CorrelationAttributeInstance(
|
||||
correlationType,
|
||||
value,
|
||||
@ -322,9 +319,6 @@ public class EamArtifactUtil {
|
||||
CorrelationAttributeInstance.Type filesType = EamDb.getInstance().getCorrelationTypeById(CorrelationAttributeInstance.FILES_TYPE_ID);
|
||||
|
||||
CorrelationCase correlationCase = EamDb.getInstance().getCase(Case.getCurrentCaseThrows());
|
||||
if (null == correlationCase) {
|
||||
correlationCase = EamDb.getInstance().newCase(Case.getCurrentCaseThrows());
|
||||
}
|
||||
return new CorrelationAttributeInstance(
|
||||
filesType,
|
||||
af.getMd5Hash(),
|
||||
|
@ -444,9 +444,6 @@ final class CaseEventListener implements PropertyChangeListener {
|
||||
|
||||
try {
|
||||
CorrelationCase correlationCase = dbManager.getCase(openCase);
|
||||
if (null == correlationCase) {
|
||||
correlationCase = dbManager.newCase(openCase);
|
||||
}
|
||||
if (null == dbManager.getDataSource(correlationCase, newDataSource.getId())) {
|
||||
CorrelationDataSource.fromTSKDataSource(correlationCase, newDataSource);
|
||||
}
|
||||
|
@ -297,16 +297,7 @@ final class CentralRepoIngestModule implements FileIngestModule {
|
||||
} catch (EamDbException ex) {
|
||||
throw new IngestModuleException("Unable to get case from central repository database ", ex);
|
||||
}
|
||||
if (eamCase == null) {
|
||||
// ensure we have this case defined in the EAM DB
|
||||
try {
|
||||
eamCase = centralRepoDb.newCase(autopsyCase);
|
||||
} catch (EamDbException ex) {
|
||||
logger.log(Level.SEVERE, "Error creating new case in ingest module start up.", ex); // NON-NLS
|
||||
throw new IngestModuleException("Error creating new case in ingest module start up.", ex); // NON-NLS
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
try {
|
||||
eamDataSource = CorrelationDataSource.fromTSKDataSource(eamCase, context.getDataSource());
|
||||
} catch (EamDbException ex) {
|
||||
|
@ -24,6 +24,7 @@ import java.util.concurrent.CancellationException;
|
||||
import java.util.logging.Level;
|
||||
import java.awt.Dimension;
|
||||
import java.awt.Point;
|
||||
import java.util.Collections;
|
||||
import javax.swing.SwingWorker;
|
||||
import javax.swing.event.ListSelectionEvent;
|
||||
import javax.swing.event.ListSelectionListener;
|
||||
@ -388,10 +389,14 @@ class SelectDriveDialog extends javax.swing.JDialog {
|
||||
loadingDisks = false;
|
||||
disks = new ArrayList<>();
|
||||
disks.addAll(partitions);
|
||||
|
||||
if (disks.size() > 0) {
|
||||
diskTable.setEnabled(true);
|
||||
diskTable.clearSelection();
|
||||
|
||||
Collections.sort(disks, (LocalDisk disk1, LocalDisk disk2) -> disk1.getName().compareToIgnoreCase(disk2.getName()));
|
||||
}
|
||||
|
||||
ready = true;
|
||||
}
|
||||
}
|
||||
|
@ -193,11 +193,6 @@ ReportHTML.writeSum.warningMsg=<span>Warning, this report was run before ingest
|
||||
# examiner as a regex signature to skip report.html and summary.html
|
||||
#
|
||||
ReportHTML.writeSum.reportGenOn.text=HTML Report Generated on {0}
|
||||
ReportHTML.writeSum.caseName=Case\:
|
||||
ReportHTML.writeSum.caseNum=Case Number\:
|
||||
ReportHTML.writeSum.examiner=Examiner\:
|
||||
ReportHTML.writeSum.noExaminer=<i>No examiner</i>
|
||||
ReportHTML.writeSum.numImages=Number of Images\:
|
||||
ReportHTML.writeSum.imageInfoHeading=<h2>Image Information\:</h2>
|
||||
ReportHTML.writeSum.softwareInfoHeading=<h2>Software Information\:</h2>
|
||||
ReportHTML.writeSum.ingestHistoryHeading=<h2>Ingest History\:</h2>
|
||||
@ -225,7 +220,6 @@ ReportWizardPanel1.nextButton.text=Next >
|
||||
ReportWizardPanel1.finishButton.text=Finish
|
||||
ReportWizardPanel2.finishButton.text=Finish
|
||||
ReportWizardPanel2.nextButton.text=Next >
|
||||
ReportHTML.writeSum.noCaseNum=<i>No case number</i>
|
||||
ReportBodyFile.generateReport.srcModuleName.text=TSK Body File
|
||||
ReportExcel.endReport.srcModuleName.text=Excel Report
|
||||
ReportHTML.writeIndex.srcModuleName.text=HTML Report
|
||||
|
@ -26,6 +26,7 @@ import java.util.logging.Level;
|
||||
import org.apache.poi.ss.usermodel.*;
|
||||
import org.apache.poi.xssf.usermodel.XSSFWorkbook;
|
||||
import org.openide.util.NbBundle;
|
||||
import org.openide.util.NbBundle.Messages;
|
||||
import org.sleuthkit.autopsy.casemodule.Case;
|
||||
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
|
||||
import org.sleuthkit.autopsy.coreutils.Logger;
|
||||
@ -302,6 +303,15 @@ class ReportExcel implements TableReportModule {
|
||||
return text.replaceAll("[\\/\\:\\?\\*\\\\]", "_");
|
||||
}
|
||||
|
||||
@Messages({
|
||||
"ReportExcel.writeSummary.sheetName=Summary",
|
||||
"ReportExcel.writeSummary.summary=Summary",
|
||||
"ReportExcel.writeSummary.caseName=Case Name:",
|
||||
"ReportExcel.writeSummary.numImages=Number of Images:",
|
||||
"ReportExcel.writeSummary.caseNum=Case Number:",
|
||||
"ReportExcel.writeSummary.caseNotes=Case Notes:",
|
||||
"ReportExcel.writeSummary.examiner=Examiner:"
|
||||
})
|
||||
private void writeSummaryWorksheet() {
|
||||
Case currentCase;
|
||||
try {
|
||||
@ -310,12 +320,12 @@ class ReportExcel implements TableReportModule {
|
||||
logger.log(Level.SEVERE, "Exception while getting open case.", ex); //NON-NLS
|
||||
return;
|
||||
}
|
||||
sheet = wb.createSheet(NbBundle.getMessage(this.getClass(), "ReportExcel.sheetName.text"));
|
||||
sheet = wb.createSheet(Bundle.ReportExcel_writeSummary_sheetName());
|
||||
rowIndex = 0;
|
||||
|
||||
Row row = sheet.createRow(rowIndex);
|
||||
row.setRowStyle(setStyle);
|
||||
row.createCell(0).setCellValue(NbBundle.getMessage(this.getClass(), "ReportExcel.cellVal.summary"));
|
||||
row.createCell(0).setCellValue(Bundle.ReportExcel_writeSummary_summary());
|
||||
++rowIndex;
|
||||
|
||||
sheet.createRow(rowIndex);
|
||||
@ -323,25 +333,21 @@ class ReportExcel implements TableReportModule {
|
||||
|
||||
row = sheet.createRow(rowIndex);
|
||||
row.setRowStyle(setStyle);
|
||||
row.createCell(0).setCellValue(NbBundle.getMessage(this.getClass(), "ReportExcel.cellVal.caseName"));
|
||||
row.createCell(0).setCellValue(Bundle.ReportExcel_writeSummary_caseName());
|
||||
row.createCell(1).setCellValue(currentCase.getDisplayName());
|
||||
++rowIndex;
|
||||
|
||||
row = sheet.createRow(rowIndex);
|
||||
row.setRowStyle(setStyle);
|
||||
row.createCell(0).setCellValue(NbBundle.getMessage(this.getClass(), "ReportExcel.cellVal.caseNum"));
|
||||
row.createCell(1).setCellValue(currentCase.getNumber());
|
||||
++rowIndex;
|
||||
if (!currentCase.getNumber().isEmpty()) {
|
||||
row = sheet.createRow(rowIndex);
|
||||
row.setRowStyle(setStyle);
|
||||
row.createCell(0).setCellValue(Bundle.ReportExcel_writeSummary_caseNum());
|
||||
row.createCell(1).setCellValue(currentCase.getNumber());
|
||||
++rowIndex;
|
||||
}
|
||||
|
||||
row = sheet.createRow(rowIndex);
|
||||
row.setRowStyle(setStyle);
|
||||
row.createCell(0).setCellValue(NbBundle.getMessage(this.getClass(), "ReportExcel.cellVal.examiner"));
|
||||
row.createCell(1).setCellValue(currentCase.getExaminer());
|
||||
++rowIndex;
|
||||
|
||||
row = sheet.createRow(rowIndex);
|
||||
row.setRowStyle(setStyle);
|
||||
row.createCell(0).setCellValue(NbBundle.getMessage(this.getClass(), "ReportExcel.cellVal.numImages"));
|
||||
row.createCell(0).setCellValue(Bundle.ReportExcel_writeSummary_numImages());
|
||||
int numImages;
|
||||
try {
|
||||
numImages = currentCase.getDataSources().size();
|
||||
@ -351,6 +357,22 @@ class ReportExcel implements TableReportModule {
|
||||
row.createCell(1).setCellValue(numImages);
|
||||
++rowIndex;
|
||||
|
||||
if (!currentCase.getCaseNotes().isEmpty()) {
|
||||
row = sheet.createRow(rowIndex);
|
||||
row.setRowStyle(setStyle);
|
||||
row.createCell(0).setCellValue(Bundle.ReportExcel_writeSummary_caseNotes());
|
||||
row.createCell(1).setCellValue(currentCase.getCaseNotes());
|
||||
++rowIndex;
|
||||
}
|
||||
|
||||
if (!currentCase.getExaminer().isEmpty()) {
|
||||
row = sheet.createRow(rowIndex);
|
||||
row.setRowStyle(setStyle);
|
||||
row.createCell(0).setCellValue(Bundle.ReportExcel_writeSummary_examiner());
|
||||
row.createCell(1).setCellValue(currentCase.getExaminer());
|
||||
++rowIndex;
|
||||
}
|
||||
|
||||
sheet.autoSizeColumn(0);
|
||||
sheet.autoSizeColumn(1);
|
||||
}
|
||||
|
@ -51,10 +51,15 @@ import javax.swing.JPanel;
|
||||
import org.apache.commons.lang3.StringEscapeUtils;
|
||||
import org.openide.filesystems.FileUtil;
|
||||
import org.openide.util.NbBundle;
|
||||
import org.openide.util.NbBundle.Messages;
|
||||
import org.sleuthkit.autopsy.casemodule.Case;
|
||||
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
|
||||
import org.sleuthkit.autopsy.casemodule.services.Services;
|
||||
import org.sleuthkit.autopsy.casemodule.services.TagsManager;
|
||||
import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationCase;
|
||||
import org.sleuthkit.autopsy.centralrepository.datamodel.EamDb;
|
||||
import org.sleuthkit.autopsy.centralrepository.datamodel.EamDbException;
|
||||
import org.sleuthkit.autopsy.centralrepository.datamodel.EamOrganization;
|
||||
import org.sleuthkit.autopsy.coreutils.EscapeUtil;
|
||||
import org.sleuthkit.autopsy.coreutils.ImageUtils;
|
||||
import org.sleuthkit.autopsy.coreutils.Logger;
|
||||
@ -1226,6 +1231,13 @@ class ReportHTML implements TableReportModule {
|
||||
}
|
||||
}
|
||||
|
||||
@Messages({
|
||||
"ReportHTML.writeSum.case=Case:",
|
||||
"ReportHTML.writeSum.caseNumber=Case Number:",
|
||||
"ReportHTML.writeSum.caseNumImages=Number of Images:",
|
||||
"ReportHTML.writeSum.caseNotes=Notes:",
|
||||
"ReportHTML.writeSum.examiner=Examiner:"
|
||||
})
|
||||
/**
|
||||
* Write the case details section of the summary for this report.
|
||||
*
|
||||
@ -1233,16 +1245,24 @@ class ReportHTML implements TableReportModule {
|
||||
*/
|
||||
private StringBuilder writeSummaryCaseDetails() {
|
||||
StringBuilder summary = new StringBuilder();
|
||||
|
||||
final boolean agencyLogoSet = reportBranding.getAgencyLogoPath() != null && !reportBranding.getAgencyLogoPath().isEmpty();
|
||||
|
||||
// Case
|
||||
String caseName = currentCase.getDisplayName();
|
||||
String caseNumber = currentCase.getNumber();
|
||||
String examiner = currentCase.getExaminer();
|
||||
final boolean agencyLogoSet = reportBranding.getAgencyLogoPath() != null && !reportBranding.getAgencyLogoPath().isEmpty();
|
||||
int imagecount;
|
||||
try {
|
||||
imagecount = currentCase.getDataSources().size();
|
||||
} catch (TskCoreException ex) {
|
||||
imagecount = 0;
|
||||
}
|
||||
String caseNotes = currentCase.getCaseNotes();
|
||||
|
||||
// Examiner
|
||||
String examinerName = currentCase.getExaminer();
|
||||
|
||||
// Start the layout.
|
||||
summary.append("<div class=\"title\">\n"); //NON-NLS
|
||||
if (agencyLogoSet) {
|
||||
summary.append("<div class=\"left\">\n"); //NON-NLS
|
||||
@ -1254,17 +1274,31 @@ class ReportHTML implements TableReportModule {
|
||||
final String align = agencyLogoSet ? "right" : "left"; //NON-NLS NON-NLS
|
||||
summary.append("<div class=\"").append(align).append("\">\n"); //NON-NLS
|
||||
summary.append("<table>\n"); //NON-NLS
|
||||
summary.append("<tr><td>").append(NbBundle.getMessage(this.getClass(), "ReportHTML.writeSum.caseName")) //NON-NLS
|
||||
.append("</td><td>").append(caseName).append("</td></tr>\n"); //NON-NLS NON-NLS
|
||||
summary.append("<tr><td>").append(NbBundle.getMessage(this.getClass(), "ReportHTML.writeSum.caseNum")) //NON-NLS
|
||||
.append("</td><td>").append(!caseNumber.isEmpty() ? caseNumber : NbBundle //NON-NLS
|
||||
.getMessage(this.getClass(), "ReportHTML.writeSum.noCaseNum")).append("</td></tr>\n"); //NON-NLS
|
||||
summary.append("<tr><td>").append(NbBundle.getMessage(this.getClass(), "ReportHTML.writeSum.examiner")).append("</td><td>") //NON-NLS
|
||||
.append(!examiner.isEmpty() ? examiner : NbBundle
|
||||
.getMessage(this.getClass(), "ReportHTML.writeSum.noExaminer"))
|
||||
.append("</td></tr>\n"); //NON-NLS
|
||||
summary.append("<tr><td>").append(NbBundle.getMessage(this.getClass(), "ReportHTML.writeSum.numImages")) //NON-NLS
|
||||
.append("</td><td>").append(imagecount).append("</td></tr>\n"); //NON-NLS
|
||||
|
||||
// Case details
|
||||
summary.append("<tr><td>").append(Bundle.ReportHTML_writeSum_case()).append("</td><td>") //NON-NLS
|
||||
.append(formatHtmlString(caseName)).append("</td></tr>\n"); //NON-NLS
|
||||
|
||||
if (!caseNumber.isEmpty()) {
|
||||
summary.append("<tr><td>").append(Bundle.ReportHTML_writeSum_caseNumber()).append("</td><td>") //NON-NLS
|
||||
.append(formatHtmlString(caseNumber)).append("</td></tr>\n"); //NON-NLS
|
||||
}
|
||||
|
||||
summary.append("<tr><td>").append(Bundle.ReportHTML_writeSum_caseNumImages()).append("</td><td>") //NON-NLS
|
||||
.append(imagecount).append("</td></tr>\n"); //NON-NLS
|
||||
|
||||
if (!caseNotes.isEmpty()) {
|
||||
summary.append("<tr><td>").append(Bundle.ReportHTML_writeSum_caseNotes()).append("</td><td>") //NON-NLS
|
||||
.append(formatHtmlString(caseNotes)).append("</td></tr>\n"); //NON-NLS
|
||||
}
|
||||
|
||||
// Examiner details
|
||||
if (!examinerName.isEmpty()) {
|
||||
summary.append("<tr><td>").append(Bundle.ReportHTML_writeSum_examiner()).append("</td><td>") //NON-NLS
|
||||
.append(formatHtmlString(examinerName)).append("</td></tr>\n"); //NON-NLS
|
||||
}
|
||||
|
||||
// End the layout.
|
||||
summary.append("</table>\n"); //NON-NLS
|
||||
summary.append("</div>\n"); //NON-NLS
|
||||
summary.append("<div class=\"clear\"></div>\n"); //NON-NLS
|
||||
@ -1414,4 +1448,17 @@ class ReportHTML implements TableReportModule {
|
||||
+ thumbFile.getName();
|
||||
}
|
||||
|
||||
/**
|
||||
* Apply escape sequence to special characters. Line feed and carriage
|
||||
* return character combinations will be converted to HTML line breaks.
|
||||
*
|
||||
* @param text The text to format.
|
||||
*
|
||||
* @return The formatted text.
|
||||
*/
|
||||
private String formatHtmlString(String text) {
|
||||
String formattedString = StringEscapeUtils.escapeHtml4(text);
|
||||
return formattedString.replaceAll("(\r\n|\r|\n|\n\r)", "<br>");
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -1,241 +1,251 @@
|
||||
///*
|
||||
// *
|
||||
// * Autopsy Forensic Browser
|
||||
// *
|
||||
// * Copyright 2018 Basis Technology Corp.
|
||||
// * Contact: carrier <at> sleuthkit <dot> org
|
||||
// *
|
||||
// * Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// * you may not use this file except in compliance with the License.
|
||||
// * You may obtain a copy of the License at
|
||||
// *
|
||||
// * http://www.apache.org/licenses/LICENSE-2.0
|
||||
// *
|
||||
// * Unless required by applicable law or agreed to in writing, software
|
||||
// * distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// * See the License for the specific language governing permissions and
|
||||
// * limitations under the License.
|
||||
// */
|
||||
//package org.sleuthkit.autopsy.commonfilessearch;
|
||||
//
|
||||
//import java.nio.file.Path;
|
||||
//import java.sql.SQLException;
|
||||
//import junit.framework.Test;
|
||||
//import org.netbeans.junit.NbModuleSuite;
|
||||
//import org.netbeans.junit.NbTestCase;
|
||||
//import org.openide.util.Exceptions;
|
||||
//import junit.framework.Assert;
|
||||
//import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
|
||||
//import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationAttributeInstance;
|
||||
//import org.sleuthkit.autopsy.centralrepository.datamodel.EamDbException;
|
||||
//import org.sleuthkit.autopsy.commonfilesearch.AbstractCommonAttributeSearcher;
|
||||
//import org.sleuthkit.autopsy.commonfilesearch.AllInterCaseCommonAttributeSearcher;
|
||||
//import org.sleuthkit.autopsy.commonfilesearch.CommonAttributeCountSearchResults;
|
||||
//import org.sleuthkit.autopsy.commonfilesearch.SingleInterCaseCommonAttributeSearcher;
|
||||
//import static org.sleuthkit.autopsy.commonfilessearch.InterCaseTestUtils.*;
|
||||
//import org.sleuthkit.datamodel.TskCoreException;
|
||||
//
|
||||
///**
|
||||
// * Tests with case 3 as the current case.
|
||||
// *
|
||||
// * If I use the search all cases option: One node for Hash A (1_1_A.jpg,
|
||||
// * 1_2_A.jpg, 3_1_A.jpg) If I search for matches only in Case 1: One node for
|
||||
// * Hash A (1_1_A.jpg, 1_2_A.jpg, 3_1_A.jpg) If I search for matches only in Case
|
||||
// * 2: No matches If I only search in the current case (existing mode), allowing
|
||||
// * all data sources: One node for Hash C (3_1_C.jpg, 3_2_C.jpg)
|
||||
// *
|
||||
// */
|
||||
//public class IngestedWithHashAndFileTypeInterCaseTests extends NbTestCase {
|
||||
//
|
||||
// private final InterCaseTestUtils utils;
|
||||
//
|
||||
// public static Test suite() {
|
||||
// NbModuleSuite.Configuration conf = NbModuleSuite.createConfiguration(IngestedWithHashAndFileTypeInterCaseTests.class).
|
||||
// clusters(".*").
|
||||
// enableModules(".*");
|
||||
// return conf.suite();
|
||||
// }
|
||||
//
|
||||
// public IngestedWithHashAndFileTypeInterCaseTests(String name) {
|
||||
// super(name);
|
||||
// this.utils = new InterCaseTestUtils(this);
|
||||
// }
|
||||
//
|
||||
// @Override
|
||||
// public void setUp() {
|
||||
// this.utils.clearTestDir();
|
||||
// try {
|
||||
// this.utils.enableCentralRepo();
|
||||
//
|
||||
// String[] cases = new String[]{
|
||||
// CASE1,
|
||||
// CASE2,
|
||||
// CASE3};
|
||||
//
|
||||
// Path[][] paths = {
|
||||
// {this.utils.case1DataSet1Path, this.utils.case1DataSet2Path},
|
||||
// {this.utils.case2DataSet1Path, this.utils.case2DataSet2Path},
|
||||
// {this.utils.case3DataSet1Path, this.utils.case3DataSet2Path}};
|
||||
//
|
||||
// this.utils.createCases(cases, paths, this.utils.getIngestSettingsForHashAndFileType(), InterCaseTestUtils.CASE3);
|
||||
// } catch (TskCoreException | EamDbException ex) {
|
||||
// Exceptions.printStackTrace(ex);
|
||||
// Assert.fail(ex.getMessage());
|
||||
// }
|
||||
// }
|
||||
//
|
||||
// @Override
|
||||
// public void tearDown() {
|
||||
// this.utils.clearTestDir();
|
||||
// this.utils.tearDown();
|
||||
// }
|
||||
//
|
||||
// /**
|
||||
// * Search All cases with no file type filtering.
|
||||
// */
|
||||
// public void testOne() {
|
||||
// try {
|
||||
// AbstractCommonAttributeSearcher builder = new AllInterCaseCommonAttributeSearcher(false, false, this.utils.FILE_TYPE, 0);
|
||||
// CommonAttributeCountSearchResults metadata = builder.findMatchesByCount();
|
||||
//
|
||||
// assertTrue("Results should not be empty", metadata.size() != 0);
|
||||
//
|
||||
// //case 1 data set 1
|
||||
// assertEquals("Verify Existence or Count failed for HASH_0_DAT, CASE1_DATASET_1, CASE1", 0, getInstanceCount(metadata, HASH_0_DAT, CASE1_DATASET_1, CASE1));
|
||||
// assertEquals("Verify Existence or Count failed for HASH_A_PDF, CASE1_DATASET_1, CASE1", 1, getInstanceCount(metadata, HASH_A_PDF, CASE1_DATASET_1, CASE1));
|
||||
// assertEquals("Verify Existence or Count failed for HASH_A_JPG, CASE1_DATASET_1, CASE1", 1, getInstanceCount(metadata, HASH_A_JPG, CASE1_DATASET_1, CASE1));
|
||||
//
|
||||
// //case 1 data set 2
|
||||
// assertEquals("Verify Existence or Count failed for HASH_0_DAT, CASE1_DATASET_2, CASE1", 0, getInstanceCount(metadata, HASH_0_DAT, CASE1_DATASET_2, CASE1));
|
||||
// assertEquals("Verify Existence or Count failed for HASH_A_PDF, CASE1_DATASET_2, CASE1", 1, getInstanceCount(metadata, HASH_A_PDF, CASE1_DATASET_2, CASE1));
|
||||
// assertEquals("Verify Existence or Count failed for HASH_A_JPG, CASE1_DATASET_2, CASE1", 1, getInstanceCount(metadata, HASH_A_JPG, CASE1_DATASET_2, CASE1));
|
||||
//
|
||||
// //case 2 data set 1
|
||||
// assertEquals("Verify Existence or Count failed for HASH_B_PDF, CASE2_DATASET_1, CASE2", 0, getInstanceCount(metadata, HASH_B_PDF, CASE2_DATASET_1, CASE2));
|
||||
// assertEquals("Verify Existence or Count failed for HASH_B_JPG, CASE2_DATASET_1, CASE2", 0, getInstanceCount(metadata, HASH_B_JPG, CASE2_DATASET_1, CASE2));
|
||||
//
|
||||
// //case 2 data set 2
|
||||
// assertEquals("Verify Existence or Count failed for HASH_A_PDF, CASE2_DATASET_2, CASE2", 1, getInstanceCount(metadata, HASH_A_PDF, CASE2_DATASET_2, CASE2));
|
||||
// assertEquals("Verify Existence or Count failed for HASH_A_JPG, CASE2_DATASET_2, CASE2", 1, getInstanceCount(metadata, HASH_A_JPG, CASE2_DATASET_2, CASE2));
|
||||
// assertEquals("Verify Existence or Count failed for HASH_D_DOC, CASE2_DATASET_2, CASE2", 1, getInstanceCount(metadata, HASH_D_DOC, CASE2_DATASET_2, CASE2));
|
||||
//
|
||||
// //case 3 data set 1
|
||||
// assertEquals("Verify Existence or Count failed for HASH_A_JPG, CASE3_DATASET_1, CASE3", 1, getInstanceCount(metadata, HASH_A_JPG, CASE3_DATASET_1, CASE3));
|
||||
// assertEquals("Verify Existence or Count failed for HASH_A_PDF, CASE3_DATASET_1, CASE3", 1, getInstanceCount(metadata, HASH_A_PDF, CASE3_DATASET_1, CASE3));
|
||||
// assertEquals("Verify Existence or Count failed for HASH_C_JPG, CASE3_DATASET_1, CASE3", 0, getInstanceCount(metadata, HASH_C_JPG, CASE3_DATASET_1, CASE3));
|
||||
// assertEquals("Verify Existence or Count failed for HASH_C_PDF, CASE3_DATASET_1, CASE3", 0, getInstanceCount(metadata, HASH_C_PDF, CASE3_DATASET_1, CASE3));
|
||||
// assertEquals("Verify Existence or Count failed for HASH_D_JPG, CASE3_DATASET_1, CASE3", 0, getInstanceCount(metadata, HASH_D_JPG, CASE3_DATASET_1, CASE3));
|
||||
//
|
||||
// //case 3 data set 2
|
||||
// assertEquals("Verify Existence or Count failed for HASH_C_JPG, CASE3_DATASET_2, CASE3", 0, getInstanceCount(metadata, HASH_C_JPG, CASE3_DATASET_2, CASE3));
|
||||
// assertEquals("Verify Existence or Count failed for HASH_C_PDF, CASE3_DATASET_2, CASE3", 0, getInstanceCount(metadata, HASH_C_PDF, CASE3_DATASET_2, CASE3));
|
||||
// assertEquals("Verify Existence or Count failed for HASH_D_DOC, CASE3_DATASET_2, CASE3", 1, getInstanceCount(metadata, HASH_D_DOC, CASE3_DATASET_2, CASE3));
|
||||
//
|
||||
// } catch (TskCoreException | NoCurrentCaseException | SQLException | EamDbException ex) {
|
||||
// Exceptions.printStackTrace(ex);
|
||||
// Assert.fail(ex.getMessage());
|
||||
// }
|
||||
// }
|
||||
//
|
||||
// /**
|
||||
// * Search All cases with no file type filtering.
|
||||
// */
|
||||
// public void testTwo() {
|
||||
// try {
|
||||
// int matchesMustAlsoBeFoundInThisCase = this.utils.getCaseMap().get(CASE2);
|
||||
// CorrelationAttributeInstance.Type fileType = CorrelationAttributeInstance.getDefaultCorrelationTypes().get(0);
|
||||
// AbstractCommonAttributeSearcher builder = new SingleInterCaseCommonAttributeSearcher(matchesMustAlsoBeFoundInThisCase, false, false, fileType, 0);
|
||||
//
|
||||
// CommonAttributeCountSearchResults metadata = builder.findMatchesByCount();
|
||||
//
|
||||
// assertTrue("Results should not be empty", metadata.size() != 0);
|
||||
//
|
||||
// //case 1 data set 1
|
||||
// assertEquals("Verify Existence or Count failed for HASH_0_DAT, CASE1_DATASET_1, CASE1", 0, getInstanceCount(metadata, HASH_0_DAT, CASE1_DATASET_1, CASE1));
|
||||
// assertEquals("Verify Existence or Count failed for HASH_A_PDF, CASE1_DATASET_1, CASE1", 1, getInstanceCount(metadata, HASH_A_PDF, CASE1_DATASET_1, CASE1));
|
||||
// assertEquals("Verify Existence or Count failed for HASH_A_JPG, CASE1_DATASET_1, CASE1", 1, getInstanceCount(metadata, HASH_A_JPG, CASE1_DATASET_1, CASE1));
|
||||
//
|
||||
// //case 1 data set 2
|
||||
// assertEquals("Verify Existence or Count failed for HASH_0_DAT, CASE1_DATASET_2, CASE1", 0, getInstanceCount(metadata, HASH_0_DAT, CASE1_DATASET_2, CASE1));
|
||||
// assertEquals("Verify Existence or Count failed for HASH_A_PDF, CASE1_DATASET_2, CASE1", 1, getInstanceCount(metadata, HASH_A_PDF, CASE1_DATASET_2, CASE1));
|
||||
// assertEquals("Verify Existence or Count failed for HASH_A_JPG, CASE1_DATASET_2, CASE1", 1, getInstanceCount(metadata, HASH_A_JPG, CASE1_DATASET_2, CASE1));
|
||||
//
|
||||
// //case 2 data set 1
|
||||
// assertEquals("Verify Existence or Count failed for HASH_B_PDF, CASE2_DATASET_1, CASE2", 0, getInstanceCount(metadata, HASH_B_PDF, CASE2_DATASET_1, CASE2));
|
||||
// assertEquals("Verify Existence or Count failed for HASH_B_JPG, CASE2_DATASET_1, CASE2", 0, getInstanceCount(metadata, HASH_B_JPG, CASE2_DATASET_1, CASE2));
|
||||
//
|
||||
// //case 2 data set 2
|
||||
// assertEquals("Verify Existence or Count failed for HASH_A_PDF, CASE2_DATASET_2, CASE2", 1, getInstanceCount(metadata, HASH_A_PDF, CASE2_DATASET_2, CASE2));
|
||||
// assertEquals("Verify Existence or Count failed for HASH_A_JPG, CASE2_DATASET_2, CASE2", 1, getInstanceCount(metadata, HASH_A_JPG, CASE2_DATASET_2, CASE2));
|
||||
// assertEquals("Verify Existence or Count failed for HASH_D_DOC, CASE2_DATASET_2, CASE2", 1, getInstanceCount(metadata, HASH_D_DOC, CASE2_DATASET_2, CASE2));
|
||||
//
|
||||
// //case 3 data set 1
|
||||
// assertEquals("Verify Existence or Count failed for HASH_A_JPG, CASE3_DATASET_1, CASE3", 1, getInstanceCount(metadata, HASH_A_JPG, CASE3_DATASET_1, CASE3));
|
||||
// assertEquals("Verify Existence or Count failed for HASH_A_PDF, CASE3_DATASET_1, CASE3", 1, getInstanceCount(metadata, HASH_A_PDF, CASE3_DATASET_1, CASE3));
|
||||
// assertEquals("Verify Existence or Count failed for HASH_C_JPG, CASE3_DATASET_1, CASE3", 0, getInstanceCount(metadata, HASH_C_JPG, CASE3_DATASET_1, CASE3));
|
||||
// assertEquals("Verify Existence or Count failed for HASH_C_PDF, CASE3_DATASET_1, CASE3", 0, getInstanceCount(metadata, HASH_C_PDF, CASE3_DATASET_1, CASE3));
|
||||
// assertEquals("Verify Existence or Count failed for HASH_D_JPG, CASE3_DATASET_1, CASE3", 0, getInstanceCount(metadata, HASH_D_JPG, CASE3_DATASET_1, CASE3));
|
||||
//
|
||||
// //case 3 data set 2
|
||||
// assertEquals("Verify Existence or Count failed for HASH_C_JPG, CASE3_DATASET_2, CASE3", 0, getInstanceCount(metadata, HASH_C_JPG, CASE3_DATASET_2, CASE3));
|
||||
// assertEquals("Verify Existence or Count failed for HASH_C_PDF, CASE3_DATASET_2, CASE3", 0, getInstanceCount(metadata, HASH_C_PDF, CASE3_DATASET_2, CASE3));
|
||||
// assertEquals("Verify Existence or Count failed for HASH_D_DOC, CASE3_DATASET_2, CASE3", 1, getInstanceCount(metadata, HASH_D_DOC, CASE3_DATASET_2, CASE3));
|
||||
//
|
||||
// } catch (TskCoreException | NoCurrentCaseException | SQLException | EamDbException ex) {
|
||||
// Exceptions.printStackTrace(ex);
|
||||
// Assert.fail(ex.getMessage());
|
||||
// }
|
||||
// }
|
||||
//
|
||||
// /**
|
||||
// * We should be able to observe that certain files are no longer returned in
|
||||
// * the result set since they exist too frequently
|
||||
// */
|
||||
// public void testThree() {
|
||||
// try {
|
||||
//
|
||||
// CorrelationAttributeInstance.Type fileType = CorrelationAttributeInstance.getDefaultCorrelationTypes().get(0);
|
||||
// AbstractCommonAttributeSearcher builder = new AllInterCaseCommonAttributeSearcher(false, false, fileType, 50);
|
||||
//
|
||||
// CommonAttributeCountSearchResults metadata = builder.findMatchesByCount();
|
||||
// metadata.filterMetadata();
|
||||
// assertTrue("Results should not be empty", metadata.size() != 0);
|
||||
//
|
||||
// //case 1 data set 1
|
||||
// assertEquals("Verify Existence or Count failed for HASH_0_DAT, CASE1_DATASET_1, CASE1", 0, getInstanceCount(metadata, HASH_0_DAT, CASE1_DATASET_1, CASE1));
|
||||
// assertEquals("Verify Existence or Count failed for HASH_A_PDF, CASE1_DATASET_1, CASE1", 0, getInstanceCount(metadata, HASH_A_PDF, CASE1_DATASET_1, CASE1));
|
||||
// assertEquals("Verify Existence or Count failed for HASH_A_JPG, CASE1_DATASET_1, CASE1", 0, getInstanceCount(metadata, HASH_A_JPG, CASE1_DATASET_1, CASE1));
|
||||
//
|
||||
// //case 1 data set 2
|
||||
// assertEquals("Verify Existence or Count failed for HASH_0_DAT, CASE1_DATASET_2, CASE1", 0, getInstanceCount(metadata, HASH_0_DAT, CASE1_DATASET_2, CASE1));
|
||||
// assertEquals("Verify Existence or Count failed for HASH_A_PDF, CASE1_DATASET_2, CASE1", 0, getInstanceCount(metadata, HASH_A_PDF, CASE1_DATASET_2, CASE1));
|
||||
// assertEquals("Verify Existence or Count failed for HASH_A_JPG, CASE1_DATASET_2, CASE1", 0, getInstanceCount(metadata, HASH_A_JPG, CASE1_DATASET_2, CASE1));
|
||||
//
|
||||
// //case 2 data set 1
|
||||
// assertEquals("Verify Existence or Count failed for HASH_B_PDF, CASE2_DATASET_1, CASE2", 0, getInstanceCount(metadata, HASH_B_PDF, CASE2_DATASET_1, CASE2));
|
||||
// assertEquals("Verify Existence or Count failed for HASH_B_JPG, CASE2_DATASET_1, CASE2", 0, getInstanceCount(metadata, HASH_B_JPG, CASE2_DATASET_1, CASE2));
|
||||
//
|
||||
// //case 2 data set 2
|
||||
// assertEquals("Verify Existence or Count failed for HASH_A_PDF, CASE2_DATASET_2, CASE2", 0, getInstanceCount(metadata, HASH_A_PDF, CASE2_DATASET_2, CASE2));
|
||||
// assertEquals("Verify Existence or Count failed for HASH_A_JPG, CASE2_DATASET_2, CASE2", 0, getInstanceCount(metadata, HASH_A_JPG, CASE2_DATASET_2, CASE2));
|
||||
// assertEquals("Verify Existence or Count failed for HASH_D_DOC, CASE2_DATASET_2, CASE2", 1, getInstanceCount(metadata, HASH_D_DOC, CASE2_DATASET_2, CASE2));
|
||||
//
|
||||
// //case 3 data set 1
|
||||
// assertEquals("Verify Existence or Count failed for HASH_A_JPG, CASE3_DATASET_1, CASE3", 0, getInstanceCount(metadata, HASH_A_JPG, CASE3_DATASET_1, CASE3));
|
||||
// assertEquals("Verify Existence or Count failed for HASH_A_PDF, CASE3_DATASET_1, CASE3", 0, getInstanceCount(metadata, HASH_A_PDF, CASE3_DATASET_1, CASE3));
|
||||
// assertEquals("Verify Existence or Count failed for HASH_C_JPG, CASE3_DATASET_1, CASE3", 0, getInstanceCount(metadata, HASH_C_JPG, CASE3_DATASET_1, CASE3));
|
||||
// assertEquals("Verify Existence or Count failed for HASH_C_PDF, CASE3_DATASET_1, CASE3", 0, getInstanceCount(metadata, HASH_C_PDF, CASE3_DATASET_1, CASE3));
|
||||
// assertEquals("Verify Existence or Count failed for HASH_D_JPG, CASE3_DATASET_1, CASE3", 0, getInstanceCount(metadata, HASH_D_JPG, CASE3_DATASET_1, CASE3));
|
||||
//
|
||||
// //case 3 data set 2
|
||||
// assertEquals("Verify Existence or Count failed for HASH_C_JPG, CASE3_DATASET_2, CASE3", 0, getInstanceCount(metadata, HASH_C_JPG, CASE3_DATASET_2, CASE3));
|
||||
// assertEquals("Verify Existence or Count failed for HASH_C_PDF, CASE3_DATASET_2, CASE3", 0, getInstanceCount(metadata, HASH_C_PDF, CASE3_DATASET_2, CASE3));
|
||||
// assertEquals("Verify Existence or Count failed for HASH_D_DOC, CASE3_DATASET_2, CASE3", 1, getInstanceCount(metadata, HASH_D_DOC, CASE3_DATASET_2, CASE3));
|
||||
//
|
||||
// } catch (TskCoreException | NoCurrentCaseException | SQLException | EamDbException ex) {
|
||||
// Exceptions.printStackTrace(ex);
|
||||
// Assert.fail(ex.getMessage());
|
||||
// }
|
||||
// }
|
||||
//}
|
||||
/*
|
||||
*
|
||||
* Autopsy Forensic Browser
|
||||
*
|
||||
* Copyright 2018 Basis Technology Corp.
|
||||
* Contact: carrier <at> sleuthkit <dot> org
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.sleuthkit.autopsy.commonfilessearch;
|
||||
|
||||
import java.nio.file.Path;
|
||||
import java.sql.SQLException;
|
||||
import java.util.Set;
|
||||
import junit.framework.Test;
|
||||
import org.netbeans.junit.NbModuleSuite;
|
||||
import org.netbeans.junit.NbTestCase;
|
||||
import org.openide.util.Exceptions;
|
||||
import junit.framework.Assert;
|
||||
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
|
||||
import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationAttributeInstance;
|
||||
import org.sleuthkit.autopsy.centralrepository.datamodel.EamDbException;
|
||||
import org.sleuthkit.autopsy.commonfilesearch.AbstractCommonAttributeSearcher;
|
||||
import org.sleuthkit.autopsy.commonfilesearch.AllInterCaseCommonAttributeSearcher;
|
||||
import org.sleuthkit.autopsy.commonfilesearch.CommonAttributeCountSearchResults;
|
||||
import org.sleuthkit.autopsy.commonfilesearch.SingleInterCaseCommonAttributeSearcher;
|
||||
import static org.sleuthkit.autopsy.commonfilessearch.InterCaseTestUtils.*;
|
||||
import org.sleuthkit.datamodel.TskCoreException;
|
||||
|
||||
/**
|
||||
* Tests with case 3 as the current case.
|
||||
*
|
||||
* If I use the search all cases option: One node for Hash A (1_1_A.jpg,
|
||||
* 1_2_A.jpg, 3_1_A.jpg) If I search for matches only in Case 1: One node for
|
||||
* Hash A (1_1_A.jpg, 1_2_A.jpg, 3_1_A.jpg) If I search for matches only in Case
|
||||
* 2: No matches If I only search in the current case (existing mode), allowing
|
||||
* all data sources: One node for Hash C (3_1_C.jpg, 3_2_C.jpg)
|
||||
*
|
||||
*/
|
||||
public class IngestedWithHashAndFileTypeInterCaseTests extends NbTestCase {
|
||||
|
||||
private final InterCaseTestUtils utils;
|
||||
|
||||
public static Test suite() {
|
||||
NbModuleSuite.Configuration conf = NbModuleSuite.createConfiguration(IngestedWithHashAndFileTypeInterCaseTests.class).
|
||||
clusters(".*").
|
||||
enableModules(".*");
|
||||
return conf.suite();
|
||||
}
|
||||
|
||||
public IngestedWithHashAndFileTypeInterCaseTests(String name) {
|
||||
super(name);
|
||||
this.utils = new InterCaseTestUtils(this);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setUp() {
|
||||
this.utils.clearTestDir();
|
||||
try {
|
||||
this.utils.enableCentralRepo();
|
||||
|
||||
String[] cases = new String[]{
|
||||
CASE1,
|
||||
CASE2,
|
||||
CASE3};
|
||||
|
||||
Path[][] paths = {
|
||||
{this.utils.case1DataSet1Path, this.utils.case1DataSet2Path},
|
||||
{this.utils.case2DataSet1Path, this.utils.case2DataSet2Path},
|
||||
{this.utils.case3DataSet1Path, this.utils.case3DataSet2Path}};
|
||||
|
||||
this.utils.createCases(cases, paths, this.utils.getIngestSettingsForHashAndFileType(), InterCaseTestUtils.CASE3);
|
||||
} catch (TskCoreException | EamDbException ex) {
|
||||
Exceptions.printStackTrace(ex);
|
||||
Assert.fail(ex.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void tearDown() {
|
||||
this.utils.clearTestDir();
|
||||
this.utils.tearDown();
|
||||
}
|
||||
|
||||
/**
|
||||
* Search All cases with no file type filtering.
|
||||
*/
|
||||
public void testOne() {
|
||||
try {
|
||||
AbstractCommonAttributeSearcher builder = new AllInterCaseCommonAttributeSearcher(false, false, this.utils.FILE_TYPE, 0);
|
||||
CommonAttributeCountSearchResults metadata = builder.findMatchesByCount();
|
||||
|
||||
assertTrue("Results should not be empty", metadata.size() != 0);
|
||||
|
||||
//case 1 data set 1
|
||||
assertEquals("Verify Existence or Count failed for HASH_0_DAT, CASE1_DATASET_1, CASE1", 0, getInstanceCount(metadata, HASH_0_DAT, CASE1_DATASET_1, CASE1));
|
||||
assertEquals("Verify Existence or Count failed for HASH_A_PDF, CASE1_DATASET_1, CASE1", 1, getInstanceCount(metadata, HASH_A_PDF, CASE1_DATASET_1, CASE1));
|
||||
assertEquals("Verify Existence or Count failed for HASH_A_JPG, CASE1_DATASET_1, CASE1", 1, getInstanceCount(metadata, HASH_A_JPG, CASE1_DATASET_1, CASE1));
|
||||
|
||||
//case 1 data set 2
|
||||
assertEquals("Verify Existence or Count failed for HASH_0_DAT, CASE1_DATASET_2, CASE1", 0, getInstanceCount(metadata, HASH_0_DAT, CASE1_DATASET_2, CASE1));
|
||||
assertEquals("Verify Existence or Count failed for HASH_A_PDF, CASE1_DATASET_2, CASE1", 1, getInstanceCount(metadata, HASH_A_PDF, CASE1_DATASET_2, CASE1));
|
||||
assertEquals("Verify Existence or Count failed for HASH_A_JPG, CASE1_DATASET_2, CASE1", 1, getInstanceCount(metadata, HASH_A_JPG, CASE1_DATASET_2, CASE1));
|
||||
|
||||
//case 2 data set 1
|
||||
assertEquals("Verify Existence or Count failed for HASH_B_PDF, CASE2_DATASET_1, CASE2", 0, getInstanceCount(metadata, HASH_B_PDF, CASE2_DATASET_1, CASE2));
|
||||
assertEquals("Verify Existence or Count failed for HASH_B_JPG, CASE2_DATASET_1, CASE2", 0, getInstanceCount(metadata, HASH_B_JPG, CASE2_DATASET_1, CASE2));
|
||||
|
||||
//case 2 data set 2
|
||||
assertEquals("Verify Existence or Count failed for HASH_A_PDF, CASE2_DATASET_2, CASE2", 1, getInstanceCount(metadata, HASH_A_PDF, CASE2_DATASET_2, CASE2));
|
||||
assertEquals("Verify Existence or Count failed for HASH_A_JPG, CASE2_DATASET_2, CASE2", 1, getInstanceCount(metadata, HASH_A_JPG, CASE2_DATASET_2, CASE2));
|
||||
assertEquals("Verify Existence or Count failed for HASH_D_DOC, CASE2_DATASET_2, CASE2", 1, getInstanceCount(metadata, HASH_D_DOC, CASE2_DATASET_2, CASE2));
|
||||
|
||||
//case 3 data set 1
|
||||
assertEquals("Verify Existence or Count failed for HASH_A_JPG, CASE3_DATASET_1, CASE3", 1, getInstanceCount(metadata, HASH_A_JPG, CASE3_DATASET_1, CASE3));
|
||||
assertEquals("Verify Existence or Count failed for HASH_A_PDF, CASE3_DATASET_1, CASE3", 1, getInstanceCount(metadata, HASH_A_PDF, CASE3_DATASET_1, CASE3));
|
||||
assertEquals("Verify Existence or Count failed for HASH_C_JPG, CASE3_DATASET_1, CASE3", 0, getInstanceCount(metadata, HASH_C_JPG, CASE3_DATASET_1, CASE3));
|
||||
assertEquals("Verify Existence or Count failed for HASH_C_PDF, CASE3_DATASET_1, CASE3", 0, getInstanceCount(metadata, HASH_C_PDF, CASE3_DATASET_1, CASE3));
|
||||
assertEquals("Verify Existence or Count failed for HASH_D_JPG, CASE3_DATASET_1, CASE3", 0, getInstanceCount(metadata, HASH_D_JPG, CASE3_DATASET_1, CASE3));
|
||||
|
||||
//case 3 data set 2
|
||||
assertEquals("Verify Existence or Count failed for HASH_C_JPG, CASE3_DATASET_2, CASE3", 0, getInstanceCount(metadata, HASH_C_JPG, CASE3_DATASET_2, CASE3));
|
||||
assertEquals("Verify Existence or Count failed for HASH_C_PDF, CASE3_DATASET_2, CASE3", 0, getInstanceCount(metadata, HASH_C_PDF, CASE3_DATASET_2, CASE3));
|
||||
assertEquals("Verify Existence or Count failed for HASH_D_DOC, CASE3_DATASET_2, CASE3", 1, getInstanceCount(metadata, HASH_D_DOC, CASE3_DATASET_2, CASE3));
|
||||
|
||||
} catch (TskCoreException | NoCurrentCaseException | SQLException | EamDbException ex) {
|
||||
Exceptions.printStackTrace(ex);
|
||||
Assert.fail(ex.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Search All cases with no file type filtering.
|
||||
*/
|
||||
public void testTwo() {
|
||||
try {
|
||||
int matchesMustAlsoBeFoundInThisCase = 0;
|
||||
|
||||
// Filter out the time stamp to get the correct case name.
|
||||
Set<String> caseNames = this.utils.getCaseMap().keySet();
|
||||
for (String caseName : caseNames) {
|
||||
if (caseName.substring(0, caseName.length() - 20).equalsIgnoreCase(CASE2)) {
|
||||
// Case match found. Get the number of matches.
|
||||
matchesMustAlsoBeFoundInThisCase = this.utils.getCaseMap().get(caseName);
|
||||
}
|
||||
}
|
||||
CorrelationAttributeInstance.Type fileType = CorrelationAttributeInstance.getDefaultCorrelationTypes().get(0);
|
||||
AbstractCommonAttributeSearcher builder = new SingleInterCaseCommonAttributeSearcher(matchesMustAlsoBeFoundInThisCase, false, false, fileType, 0);
|
||||
|
||||
CommonAttributeCountSearchResults metadata = builder.findMatchesByCount();
|
||||
|
||||
assertTrue("Results should not be empty", metadata.size() != 0);
|
||||
|
||||
//case 1 data set 1
|
||||
assertEquals("Verify Existence or Count failed for HASH_0_DAT, CASE1_DATASET_1, CASE1", 0, getInstanceCount(metadata, HASH_0_DAT, CASE1_DATASET_1, CASE1));
|
||||
assertEquals("Verify Existence or Count failed for HASH_A_PDF, CASE1_DATASET_1, CASE1", 1, getInstanceCount(metadata, HASH_A_PDF, CASE1_DATASET_1, CASE1));
|
||||
assertEquals("Verify Existence or Count failed for HASH_A_JPG, CASE1_DATASET_1, CASE1", 1, getInstanceCount(metadata, HASH_A_JPG, CASE1_DATASET_1, CASE1));
|
||||
|
||||
//case 1 data set 2
|
||||
assertEquals("Verify Existence or Count failed for HASH_0_DAT, CASE1_DATASET_2, CASE1", 0, getInstanceCount(metadata, HASH_0_DAT, CASE1_DATASET_2, CASE1));
|
||||
assertEquals("Verify Existence or Count failed for HASH_A_PDF, CASE1_DATASET_2, CASE1", 1, getInstanceCount(metadata, HASH_A_PDF, CASE1_DATASET_2, CASE1));
|
||||
assertEquals("Verify Existence or Count failed for HASH_A_JPG, CASE1_DATASET_2, CASE1", 1, getInstanceCount(metadata, HASH_A_JPG, CASE1_DATASET_2, CASE1));
|
||||
|
||||
//case 2 data set 1
|
||||
assertEquals("Verify Existence or Count failed for HASH_B_PDF, CASE2_DATASET_1, CASE2", 0, getInstanceCount(metadata, HASH_B_PDF, CASE2_DATASET_1, CASE2));
|
||||
assertEquals("Verify Existence or Count failed for HASH_B_JPG, CASE2_DATASET_1, CASE2", 0, getInstanceCount(metadata, HASH_B_JPG, CASE2_DATASET_1, CASE2));
|
||||
|
||||
//case 2 data set 2
|
||||
assertEquals("Verify Existence or Count failed for HASH_A_PDF, CASE2_DATASET_2, CASE2", 1, getInstanceCount(metadata, HASH_A_PDF, CASE2_DATASET_2, CASE2));
|
||||
assertEquals("Verify Existence or Count failed for HASH_A_JPG, CASE2_DATASET_2, CASE2", 1, getInstanceCount(metadata, HASH_A_JPG, CASE2_DATASET_2, CASE2));
|
||||
assertEquals("Verify Existence or Count failed for HASH_D_DOC, CASE2_DATASET_2, CASE2", 1, getInstanceCount(metadata, HASH_D_DOC, CASE2_DATASET_2, CASE2));
|
||||
|
||||
//case 3 data set 1
|
||||
assertEquals("Verify Existence or Count failed for HASH_A_JPG, CASE3_DATASET_1, CASE3", 1, getInstanceCount(metadata, HASH_A_JPG, CASE3_DATASET_1, CASE3));
|
||||
assertEquals("Verify Existence or Count failed for HASH_A_PDF, CASE3_DATASET_1, CASE3", 1, getInstanceCount(metadata, HASH_A_PDF, CASE3_DATASET_1, CASE3));
|
||||
assertEquals("Verify Existence or Count failed for HASH_C_JPG, CASE3_DATASET_1, CASE3", 0, getInstanceCount(metadata, HASH_C_JPG, CASE3_DATASET_1, CASE3));
|
||||
assertEquals("Verify Existence or Count failed for HASH_C_PDF, CASE3_DATASET_1, CASE3", 0, getInstanceCount(metadata, HASH_C_PDF, CASE3_DATASET_1, CASE3));
|
||||
assertEquals("Verify Existence or Count failed for HASH_D_JPG, CASE3_DATASET_1, CASE3", 0, getInstanceCount(metadata, HASH_D_JPG, CASE3_DATASET_1, CASE3));
|
||||
|
||||
//case 3 data set 2
|
||||
assertEquals("Verify Existence or Count failed for HASH_C_JPG, CASE3_DATASET_2, CASE3", 0, getInstanceCount(metadata, HASH_C_JPG, CASE3_DATASET_2, CASE3));
|
||||
assertEquals("Verify Existence or Count failed for HASH_C_PDF, CASE3_DATASET_2, CASE3", 0, getInstanceCount(metadata, HASH_C_PDF, CASE3_DATASET_2, CASE3));
|
||||
assertEquals("Verify Existence or Count failed for HASH_D_DOC, CASE3_DATASET_2, CASE3", 1, getInstanceCount(metadata, HASH_D_DOC, CASE3_DATASET_2, CASE3));
|
||||
|
||||
} catch (TskCoreException | NoCurrentCaseException | SQLException | EamDbException ex) {
|
||||
Exceptions.printStackTrace(ex);
|
||||
Assert.fail(ex.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* We should be able to observe that certain files are no longer returned in
|
||||
* the result set since they exist too frequently
|
||||
*/
|
||||
public void testThree() {
|
||||
try {
|
||||
|
||||
CorrelationAttributeInstance.Type fileType = CorrelationAttributeInstance.getDefaultCorrelationTypes().get(0);
|
||||
AbstractCommonAttributeSearcher builder = new AllInterCaseCommonAttributeSearcher(false, false, fileType, 50);
|
||||
|
||||
CommonAttributeCountSearchResults metadata = builder.findMatchesByCount();
|
||||
metadata.filterMetadata();
|
||||
assertTrue("Results should not be empty", metadata.size() != 0);
|
||||
|
||||
//case 1 data set 1
|
||||
assertEquals("Verify Existence or Count failed for HASH_0_DAT, CASE1_DATASET_1, CASE1", 0, getInstanceCount(metadata, HASH_0_DAT, CASE1_DATASET_1, CASE1));
|
||||
assertEquals("Verify Existence or Count failed for HASH_A_PDF, CASE1_DATASET_1, CASE1", 0, getInstanceCount(metadata, HASH_A_PDF, CASE1_DATASET_1, CASE1));
|
||||
assertEquals("Verify Existence or Count failed for HASH_A_JPG, CASE1_DATASET_1, CASE1", 0, getInstanceCount(metadata, HASH_A_JPG, CASE1_DATASET_1, CASE1));
|
||||
|
||||
//case 1 data set 2
|
||||
assertEquals("Verify Existence or Count failed for HASH_0_DAT, CASE1_DATASET_2, CASE1", 0, getInstanceCount(metadata, HASH_0_DAT, CASE1_DATASET_2, CASE1));
|
||||
assertEquals("Verify Existence or Count failed for HASH_A_PDF, CASE1_DATASET_2, CASE1", 0, getInstanceCount(metadata, HASH_A_PDF, CASE1_DATASET_2, CASE1));
|
||||
assertEquals("Verify Existence or Count failed for HASH_A_JPG, CASE1_DATASET_2, CASE1", 0, getInstanceCount(metadata, HASH_A_JPG, CASE1_DATASET_2, CASE1));
|
||||
|
||||
//case 2 data set 1
|
||||
assertEquals("Verify Existence or Count failed for HASH_B_PDF, CASE2_DATASET_1, CASE2", 0, getInstanceCount(metadata, HASH_B_PDF, CASE2_DATASET_1, CASE2));
|
||||
assertEquals("Verify Existence or Count failed for HASH_B_JPG, CASE2_DATASET_1, CASE2", 0, getInstanceCount(metadata, HASH_B_JPG, CASE2_DATASET_1, CASE2));
|
||||
|
||||
//case 2 data set 2
|
||||
assertEquals("Verify Existence or Count failed for HASH_A_PDF, CASE2_DATASET_2, CASE2", 0, getInstanceCount(metadata, HASH_A_PDF, CASE2_DATASET_2, CASE2));
|
||||
assertEquals("Verify Existence or Count failed for HASH_A_JPG, CASE2_DATASET_2, CASE2", 0, getInstanceCount(metadata, HASH_A_JPG, CASE2_DATASET_2, CASE2));
|
||||
assertEquals("Verify Existence or Count failed for HASH_D_DOC, CASE2_DATASET_2, CASE2", 1, getInstanceCount(metadata, HASH_D_DOC, CASE2_DATASET_2, CASE2));
|
||||
|
||||
//case 3 data set 1
|
||||
assertEquals("Verify Existence or Count failed for HASH_A_JPG, CASE3_DATASET_1, CASE3", 0, getInstanceCount(metadata, HASH_A_JPG, CASE3_DATASET_1, CASE3));
|
||||
assertEquals("Verify Existence or Count failed for HASH_A_PDF, CASE3_DATASET_1, CASE3", 0, getInstanceCount(metadata, HASH_A_PDF, CASE3_DATASET_1, CASE3));
|
||||
assertEquals("Verify Existence or Count failed for HASH_C_JPG, CASE3_DATASET_1, CASE3", 0, getInstanceCount(metadata, HASH_C_JPG, CASE3_DATASET_1, CASE3));
|
||||
assertEquals("Verify Existence or Count failed for HASH_C_PDF, CASE3_DATASET_1, CASE3", 0, getInstanceCount(metadata, HASH_C_PDF, CASE3_DATASET_1, CASE3));
|
||||
assertEquals("Verify Existence or Count failed for HASH_D_JPG, CASE3_DATASET_1, CASE3", 0, getInstanceCount(metadata, HASH_D_JPG, CASE3_DATASET_1, CASE3));
|
||||
|
||||
//case 3 data set 2
|
||||
assertEquals("Verify Existence or Count failed for HASH_C_JPG, CASE3_DATASET_2, CASE3", 0, getInstanceCount(metadata, HASH_C_JPG, CASE3_DATASET_2, CASE3));
|
||||
assertEquals("Verify Existence or Count failed for HASH_C_PDF, CASE3_DATASET_2, CASE3", 0, getInstanceCount(metadata, HASH_C_PDF, CASE3_DATASET_2, CASE3));
|
||||
assertEquals("Verify Existence or Count failed for HASH_D_DOC, CASE3_DATASET_2, CASE3", 1, getInstanceCount(metadata, HASH_D_DOC, CASE3_DATASET_2, CASE3));
|
||||
|
||||
} catch (TskCoreException | NoCurrentCaseException | SQLException | EamDbException ex) {
|
||||
Exceptions.printStackTrace(ex);
|
||||
Assert.fail(ex.getMessage());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -59,6 +59,7 @@ import org.sleuthkit.autopsy.commonfilesearch.CommonAttributeCountSearchResults;
|
||||
import org.sleuthkit.autopsy.datamodel.utils.DataSourceLoader;
|
||||
import org.sleuthkit.autopsy.commonfilesearch.CommonAttributeValue;
|
||||
import org.sleuthkit.autopsy.commonfilesearch.CommonAttributeValueList;
|
||||
import org.sleuthkit.autopsy.coreutils.TimeStampUtils;
|
||||
import org.sleuthkit.autopsy.datamodel.DisplayableItemNode;
|
||||
import org.sleuthkit.autopsy.modules.dataSourceIntegrity.DataSourceIntegrityModuleFactory;
|
||||
import org.sleuthkit.autopsy.modules.embeddedfileextractor.EmbeddedFileExtractorModuleFactory;
|
||||
@ -426,7 +427,7 @@ class InterCaseTestUtils {
|
||||
|
||||
boolean sameFileName = testFileName.equalsIgnoreCase(fileName);
|
||||
boolean sameDataSource = testDataSource.equalsIgnoreCase(dataSource);
|
||||
boolean sameCrCase = testCaseName.equalsIgnoreCase(crCase);
|
||||
boolean sameCrCase = TimeStampUtils.removeTimeStamp(testCaseName).equalsIgnoreCase(crCase);
|
||||
|
||||
if (sameFileName && sameDataSource && sameCrCase) {
|
||||
tally++;
|
||||
@ -443,7 +444,7 @@ class InterCaseTestUtils {
|
||||
final String testDataSource = node.getDataSource();
|
||||
|
||||
boolean sameFileName = testFileName.equalsIgnoreCase(fileName);
|
||||
boolean sameCaseName = testCaseName.equalsIgnoreCase(crCase);
|
||||
boolean sameCaseName = TimeStampUtils.removeTimeStamp(testCaseName).equalsIgnoreCase(crCase);
|
||||
boolean sameDataSource = testDataSource.equalsIgnoreCase(dataSource);
|
||||
|
||||
if (sameFileName && sameDataSource && sameCaseName) {
|
||||
|
Loading…
x
Reference in New Issue
Block a user