mirror of
https://github.com/overcuriousity/autopsy-flatpak.git
synced 2025-07-17 10:17:41 +00:00
Creating a test input file and running Logical Files DSP on it
This commit is contained in:
parent
041e5172f8
commit
583c79b845
@ -676,6 +676,11 @@ public final class MultiUserSettingsPanel extends javax.swing.JPanel {
|
||||
}//GEN-LAST:event_bnTestSolrActionPerformed
|
||||
|
||||
private void bnTestMultiUserActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_bnTestMultiUserActionPerformed
|
||||
|
||||
// save the configuration that user has entered in other fields (i.e. Solr, Postgres, ActiveMQ).
|
||||
// we need it stored in the settings file in order to create a case and connect to database/kws.
|
||||
store();
|
||||
|
||||
String resultsFolderPath = getNormalizedFolderPath(outputPathTextField.getText().trim());
|
||||
String testResult = MultiUserTestTool.runTest(resultsFolderPath);
|
||||
if (testResult.equals(MultiUserTestTool.RESULT_SUCCESS)) {
|
||||
|
@ -19,17 +19,28 @@
|
||||
package org.sleuthkit.autopsy.corecomponents;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.nio.charset.Charset;
|
||||
import java.nio.file.Paths;
|
||||
import java.sql.ResultSet;
|
||||
import java.sql.SQLException;
|
||||
import java.util.UUID;
|
||||
import java.util.logging.Level;
|
||||
import org.openide.util.Exceptions;
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.sleuthkit.autopsy.casemodule.Case;
|
||||
import org.sleuthkit.autopsy.casemodule.CaseActionException;
|
||||
import org.sleuthkit.autopsy.casemodule.CaseDetails;
|
||||
import org.sleuthkit.autopsy.corecomponentinterfaces.DataSourceProcessorCallback;
|
||||
import static org.sleuthkit.autopsy.corecomponentinterfaces.DataSourceProcessorCallback.DataSourceProcessorResult.CRITICAL_ERRORS;
|
||||
import org.sleuthkit.autopsy.corecomponentinterfaces.DataSourceProcessorProgressMonitor;
|
||||
import org.sleuthkit.autopsy.coreutils.Logger;
|
||||
import org.sleuthkit.autopsy.coreutils.TimeStampUtils;
|
||||
import org.sleuthkit.autopsy.datasourceprocessors.AddDataSourceCallback;
|
||||
import org.sleuthkit.autopsy.datasourceprocessors.AutoIngestDataSource;
|
||||
import org.sleuthkit.autopsy.datasourceprocessors.AutoIngestDataSourceProcessor;
|
||||
import org.sleuthkit.datamodel.SleuthkitCase;
|
||||
import org.sleuthkit.datamodel.TskCoreException;
|
||||
import org.sleuthkit.autopsy.casemodule.LocalFilesDSProcessor;
|
||||
|
||||
/**
|
||||
* Test tool that creates a multi user case, database, KWS index, runs ingest,
|
||||
@ -43,6 +54,8 @@ class MultiUserTestTool {
|
||||
|
||||
static final String RESULT_SUCCESS = "Success";
|
||||
|
||||
private static final Object INGEST_LOCK = new Object();
|
||||
|
||||
static String runTest(String rootOutputDirectory) {
|
||||
|
||||
// 1 (MH) Creates a case in the output folder. Could be hard coded name/time stamp thing.
|
||||
@ -83,10 +96,29 @@ class MultiUserTestTool {
|
||||
}
|
||||
|
||||
// 3 (NTH) Makes a text file in a temp folder with just the text "Test" in it.
|
||||
// 4 (NTH) Adds it as a logical file set data source.
|
||||
// 5 (NTH) Runs ingest on that data source and reports errors if the modules could not start.
|
||||
} catch (Throwable ex) {
|
||||
String tempFilePath = caseForJob.getTempDirectory() + File.separator + "Test.txt";
|
||||
try {
|
||||
FileUtils.writeStringToFile(new File(tempFilePath), "Test", Charset.forName("UTF-8"));
|
||||
} catch (IOException ex) {
|
||||
LOGGER.log(Level.SEVERE, "Unable to create a file in case output directory", ex);
|
||||
return "Unable to create a file in case output directory";
|
||||
}
|
||||
|
||||
// 4 (NTH) Adds it as a logical file set data source.
|
||||
AutoIngestDataSource dataSource = new AutoIngestDataSource("", Paths.get(tempFilePath));
|
||||
try {
|
||||
String error = runLogicalFilesDSP(caseForJob, dataSource);
|
||||
if (!error.isEmpty()) {
|
||||
LOGGER.log(Level.SEVERE, error);
|
||||
return error;
|
||||
}
|
||||
} catch (InterruptedException ex) {
|
||||
LOGGER.log(Level.SEVERE, "Unable to add test file as data source to case", ex);
|
||||
return "Unable to add test file as data source to case";
|
||||
}
|
||||
|
||||
// 5 (NTH) Runs ingest on that data source and reports errors if the modules could not start.
|
||||
//} catch (Throwable ex) {
|
||||
} finally {
|
||||
// 6 (MH) Close and delete the case.
|
||||
try {
|
||||
@ -114,4 +146,79 @@ class MultiUserTestTool {
|
||||
return caseForJob;
|
||||
}
|
||||
|
||||
/**
|
||||
* Passes the data source for the current job Logical Files data source
|
||||
* processor that adds it to the case database.
|
||||
*
|
||||
* @param caseForJob The case
|
||||
* @param dataSource The data source.
|
||||
*
|
||||
* @return Error String if there was an error, empty string if the data source was added successfully
|
||||
*
|
||||
* @throws InterruptedException if the thread running the job processing
|
||||
* task is interrupted while blocked, i.e., if ingest is shutting down.
|
||||
*/
|
||||
private static String runLogicalFilesDSP(Case caseForJob, AutoIngestDataSource dataSource) throws InterruptedException {
|
||||
|
||||
AutoIngestDataSourceProcessor selectedProcessor = new LocalFilesDSProcessor();
|
||||
DataSourceProcessorProgressMonitor progressMonitor = new DoNothingDSPProgressMonitor();
|
||||
synchronized (INGEST_LOCK) {
|
||||
UUID taskId = UUID.randomUUID();
|
||||
caseForJob.notifyAddingDataSource(taskId);
|
||||
DataSourceProcessorCallback callBack = new AddDataSourceCallback(caseForJob, dataSource, taskId, INGEST_LOCK);
|
||||
caseForJob.notifyAddingDataSource(taskId);
|
||||
selectedProcessor.process(dataSource.getDeviceId(), dataSource.getPath(), progressMonitor, callBack);
|
||||
INGEST_LOCK.wait();
|
||||
|
||||
// at this point we got the content object(s) from the DSP.
|
||||
// check whether the data source was processed successfully
|
||||
if (dataSource.getContent().isEmpty()) {
|
||||
return "Test data source failed to produce content";
|
||||
}
|
||||
|
||||
if ((dataSource.getResultDataSourceProcessorResultCode() == CRITICAL_ERRORS)) {
|
||||
for (String errorMessage : dataSource.getDataSourceProcessorErrorMessages()) {
|
||||
LOGGER.log(Level.SEVERE, "Critical error running data source processor on test data source: {0}", errorMessage);
|
||||
}
|
||||
return "Critical error running data source processor on test data source: " + dataSource.getDataSourceProcessorErrorMessages().get(0);
|
||||
}
|
||||
|
||||
return "";
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* A data source processor progress monitor does nothing. There is currently
|
||||
* no mechanism for showing or recording data source processor progress
|
||||
* during an ingest job.
|
||||
*/
|
||||
private static class DoNothingDSPProgressMonitor implements DataSourceProcessorProgressMonitor {
|
||||
|
||||
/**
|
||||
* Does nothing.
|
||||
*
|
||||
* @param indeterminate
|
||||
*/
|
||||
@Override
|
||||
public void setIndeterminate(final boolean indeterminate) {
|
||||
}
|
||||
|
||||
/**
|
||||
* Does nothing.
|
||||
*
|
||||
* @param progress
|
||||
*/
|
||||
@Override
|
||||
public void setProgress(final int progress) {
|
||||
}
|
||||
|
||||
/**
|
||||
* Does nothing.
|
||||
*
|
||||
* @param text
|
||||
*/
|
||||
@Override
|
||||
public void setProgressText(final String text) {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
Loading…
x
Reference in New Issue
Block a user