mirror of
https://github.com/overcuriousity/autopsy-flatpak.git
synced 2025-07-17 10:17:41 +00:00
Merge branch 'elastic-computing' of github.com:sleuthkit/autopsy into wizard_infrastructure_5359
This commit is contained in:
commit
4db2137d55
@ -24,7 +24,6 @@ import java.awt.event.ActionListener;
|
||||
import java.io.File;
|
||||
import java.util.concurrent.ExecutionException;
|
||||
import java.util.logging.Level;
|
||||
import javax.swing.JDialog;
|
||||
import javax.swing.JFileChooser;
|
||||
import javax.swing.JOptionPane;
|
||||
import javax.swing.SwingWorker;
|
||||
@ -62,8 +61,6 @@ public final class CaseOpenAction extends CallableSystemAction implements Action
|
||||
private static final String DISPLAY_NAME = Bundle.CTL_CaseOpenAction();
|
||||
private static final String PROP_BASECASE = "LBL_BaseCase_PATH"; //NON-NLS
|
||||
private static final Logger LOGGER = Logger.getLogger(CaseOpenAction.class.getName());
|
||||
private static JDialog multiUserCaseWindow;
|
||||
private final JFileChooser fileChooser = new JFileChooser();
|
||||
private final FileFilter caseMetadataFileFilter;
|
||||
|
||||
/**
|
||||
@ -74,13 +71,6 @@ public final class CaseOpenAction extends CallableSystemAction implements Action
|
||||
*/
|
||||
public CaseOpenAction() {
|
||||
caseMetadataFileFilter = new FileNameExtensionFilter(NbBundle.getMessage(CaseOpenAction.class, "CaseOpenAction.autFilter.title", Version.getName(), CaseMetadata.getFileExtension()), CaseMetadata.getFileExtension().substring(1));
|
||||
fileChooser.setDragEnabled(false);
|
||||
fileChooser.setFileSelectionMode(JFileChooser.FILES_ONLY);
|
||||
fileChooser.setMultiSelectionEnabled(false);
|
||||
fileChooser.setFileFilter(caseMetadataFileFilter);
|
||||
if (null != ModuleSettings.getConfigSetting(ModuleSettings.MAIN_SETTINGS, PROP_BASECASE)) {
|
||||
fileChooser.setCurrentDirectory(new File(ModuleSettings.getConfigSetting("Case", PROP_BASECASE))); //NON-NLS
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@ -88,7 +78,16 @@ public final class CaseOpenAction extends CallableSystemAction implements Action
|
||||
* metadata file (.aut file). Upon confirming the selection, it will attempt
|
||||
* to open the case described by the file.
|
||||
*/
|
||||
void openCaseSelectionWindow() {
|
||||
void openCaseSelectionWindow() {
|
||||
JFileChooser fileChooser = new JFileChooser();
|
||||
fileChooser.setDragEnabled(false);
|
||||
fileChooser.setFileSelectionMode(JFileChooser.FILES_ONLY);
|
||||
fileChooser.setMultiSelectionEnabled(false);
|
||||
fileChooser.setFileFilter(caseMetadataFileFilter);
|
||||
if (null != ModuleSettings.getConfigSetting(ModuleSettings.MAIN_SETTINGS, PROP_BASECASE)) {
|
||||
fileChooser.setCurrentDirectory(new File(ModuleSettings.getConfigSetting("Case", PROP_BASECASE))); //NON-NLS
|
||||
}
|
||||
|
||||
String optionsDlgTitle = NbBundle.getMessage(Case.class, "CloseCaseWhileIngesting.Warning.title");
|
||||
String optionsDlgMessage = NbBundle.getMessage(Case.class, "CloseCaseWhileIngesting.Warning");
|
||||
if (IngestRunningCheck.checkAndConfirmProceed(optionsDlgTitle, optionsDlgMessage)) {
|
||||
@ -106,9 +105,7 @@ public final class CaseOpenAction extends CallableSystemAction implements Action
|
||||
/*
|
||||
* Close the Open Multi-User Case window, if it is open.
|
||||
*/
|
||||
if (multiUserCaseWindow != null) {
|
||||
multiUserCaseWindow.setVisible(false);
|
||||
}
|
||||
OpenMultiUserCaseDialog.getInstance().setVisible(false);
|
||||
|
||||
/*
|
||||
* Try to open the case associated with the case metadata file
|
||||
@ -160,9 +157,7 @@ public final class CaseOpenAction extends CallableSystemAction implements Action
|
||||
if (UserPreferences.getIsMultiUserModeEnabled()) {
|
||||
WindowManager.getDefault().getMainWindow().setCursor(Cursor.getPredefinedCursor(Cursor.WAIT_CURSOR));
|
||||
|
||||
if (multiUserCaseWindow == null) {
|
||||
multiUserCaseWindow = OpenMultiUserCaseDialog.getInstance();
|
||||
}
|
||||
OpenMultiUserCaseDialog multiUserCaseWindow = OpenMultiUserCaseDialog.getInstance();
|
||||
multiUserCaseWindow.setLocationRelativeTo(WindowManager.getDefault().getMainWindow());
|
||||
multiUserCaseWindow.setVisible(true);
|
||||
|
||||
|
@ -42,6 +42,7 @@ import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil;
|
||||
import org.sleuthkit.autopsy.coreutils.PlatformUtil;
|
||||
import org.sleuthkit.autopsy.datamodel.ContentUtils;
|
||||
import org.sleuthkit.autopsy.coreutils.FileTypeUtils.FileTypeCategory;
|
||||
import org.sleuthkit.autopsy.report.caseuco.CaseUcoFormatExporter;
|
||||
import org.sleuthkit.datamodel.AbstractFile;
|
||||
import org.sleuthkit.datamodel.BlackboardArtifact;
|
||||
import org.sleuthkit.datamodel.BlackboardArtifactTag;
|
||||
@ -355,6 +356,19 @@ class PortableCaseReportModule implements ReportModule {
|
||||
return;
|
||||
}
|
||||
|
||||
File reportsFolder = Paths.get(caseFolder.toString(), "Reports").toFile();
|
||||
if(!reportsFolder.mkdir()) {
|
||||
handleError("Could not make report folder", "Could not make report folder", null, progressPanel); // NON-NLS
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
CaseUcoFormatExporter.export(tagNames, setNames, reportsFolder, progressPanel);
|
||||
} catch (IOException | SQLException | NoCurrentCaseException | TskCoreException ex) {
|
||||
handleError("Problem while generating CASE-UCO report",
|
||||
"Problem while generating CASE-UCO report", ex, progressPanel); // NON-NLS
|
||||
}
|
||||
|
||||
// Compress the case (if desired)
|
||||
if (options.shouldCompress()) {
|
||||
progressPanel.updateStatusLabel(Bundle.PortableCaseReportModule_generateReport_compressingCase());
|
||||
|
@ -1,3 +1,6 @@
|
||||
CaseUcoFormatExporter.datasourceMsg=Generating CASE-UCO Report for %s
|
||||
CaseUcoFormatExporter.finishMsg=Finished generating CASE-UCO Report
|
||||
CaseUcoFormatExporter.startMsg=Generating CASE-UCO Report
|
||||
OpenIDE-Module-Name=CaseUcoModule
|
||||
ReportCaseUco.getName.text=CASE-UCO
|
||||
ReportCaseUco.getDesc.text=CASE-UCO format report with basic property fields for every file.
|
||||
|
@ -23,24 +23,39 @@ import com.fasterxml.jackson.core.JsonFactory;
|
||||
import com.fasterxml.jackson.core.JsonGenerator;
|
||||
import com.fasterxml.jackson.core.util.DefaultIndenter;
|
||||
import com.fasterxml.jackson.core.util.DefaultPrettyPrinter;
|
||||
import com.google.common.collect.Lists;
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.Paths;
|
||||
import java.sql.ResultSet;
|
||||
import java.sql.SQLException;
|
||||
import java.util.List;
|
||||
import java.util.SimpleTimeZone;
|
||||
import java.util.TimeZone;
|
||||
import java.util.logging.Level;
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.openide.util.NbBundle;
|
||||
import org.sleuthkit.autopsy.casemodule.Case;
|
||||
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
|
||||
import org.sleuthkit.autopsy.casemodule.services.TagsManager;
|
||||
import org.sleuthkit.autopsy.coreutils.Logger;
|
||||
import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil;
|
||||
import org.sleuthkit.autopsy.datamodel.ContentUtils;
|
||||
import org.sleuthkit.autopsy.ingest.IngestManager;
|
||||
import org.sleuthkit.autopsy.report.ReportProgressPanel;
|
||||
import org.sleuthkit.datamodel.AbstractFile;
|
||||
import org.sleuthkit.datamodel.BlackboardArtifact;
|
||||
import org.sleuthkit.datamodel.BlackboardArtifactTag;
|
||||
import org.sleuthkit.datamodel.BlackboardAttribute;
|
||||
import org.sleuthkit.datamodel.Content;
|
||||
import org.sleuthkit.datamodel.ContentTag;
|
||||
import org.sleuthkit.datamodel.DataSource;
|
||||
import org.sleuthkit.datamodel.SleuthkitCase;
|
||||
import org.sleuthkit.datamodel.TskCoreException;
|
||||
import org.sleuthkit.datamodel.TskData;
|
||||
import org.sleuthkit.datamodel.TagName;
|
||||
|
||||
/**
|
||||
* Generates CASE-UCO report file for a data source
|
||||
@ -49,6 +64,11 @@ public final class CaseUcoFormatExporter {
|
||||
|
||||
private static final Logger logger = Logger.getLogger(CaseUcoFormatExporter.class.getName());
|
||||
|
||||
private static final BlackboardAttribute.Type SET_NAME = new BlackboardAttribute.Type(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME);
|
||||
private static final BlackboardArtifact.ARTIFACT_TYPE INTERESTING_FILE_HIT = BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT;
|
||||
private static final BlackboardArtifact.ARTIFACT_TYPE INTERESTING_ARTIFACT_HIT = BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_ARTIFACT_HIT;
|
||||
private static final String TEMP_DIR_NAME = "case_uco_tmp";
|
||||
|
||||
private CaseUcoFormatExporter() {
|
||||
}
|
||||
|
||||
@ -177,6 +197,136 @@ public final class CaseUcoFormatExporter {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Exports files that are tagged w/ the following TagNames and that belong to
|
||||
* the following interesting file sets (set name attributes of TSK_INTERSTING_FILE_HIT
|
||||
* and TSK_INTERESTING_ARTIFACT_HIT). Artifacts that are tagged with
|
||||
* the following TagNames also have their associated source files included.
|
||||
*
|
||||
* Duplicate files are excluded.
|
||||
*
|
||||
* @param tagTypes Collection of TagNames to match
|
||||
* @param interestingItemSets Collection of SET_NAMEs to match on in TSK_INTERESTING_FILE_HITs
|
||||
* and TSK_INTERESTING_ARTIFACT_HITs.
|
||||
* @param outputFilePath Path to the folder that the CASE-UCO report should be written into
|
||||
* @param progressPanel UI Component to be updated with current processing status
|
||||
*/
|
||||
@NbBundle.Messages({
|
||||
"CaseUcoFormatExporter.startMsg=Generating CASE-UCO Report",
|
||||
"CaseUcoFormatExporter.datasourceMsg=Generating CASE-UCO Report for %s",
|
||||
"CaseUcoFormatExporter.finishMsg=Finished generating CASE-UCO Report"
|
||||
})
|
||||
public static void export(List<TagName> tagTypes, List<String> interestingItemSets,
|
||||
File caseReportFolder, ReportProgressPanel progressPanel) throws IOException, SQLException,
|
||||
NoCurrentCaseException, TskCoreException {
|
||||
|
||||
progressPanel.updateStatusLabel(Bundle.CaseUcoFormatExporter_startMsg());
|
||||
//Acquire references for file discovery
|
||||
Case currentCase = Case.getCurrentCaseThrows();
|
||||
String caseTempDirectory = currentCase.getTempDirectory();
|
||||
SleuthkitCase skCase = currentCase.getSleuthkitCase();
|
||||
TagsManager tagsManager = currentCase.getServices().getTagsManager();
|
||||
|
||||
//Create temp directory to filter out duplicate files.
|
||||
Path tmpDir = Paths.get(caseTempDirectory, TEMP_DIR_NAME);
|
||||
FileUtils.deleteDirectory(tmpDir.toFile());
|
||||
Files.createDirectory(tmpDir);
|
||||
|
||||
//Create our report file
|
||||
Path reportFile = Paths.get(caseReportFolder.toString(),
|
||||
ReportCaseUco.getReportFileName());
|
||||
|
||||
//Timezone for formatting file creation, modification, and accessed times
|
||||
SimpleTimeZone timeZone = new SimpleTimeZone(0, "GMT");
|
||||
|
||||
try (JsonGenerator jsonGenerator = createJsonGenerator(reportFile.toFile())) {
|
||||
initializeJsonOutputFile(jsonGenerator);
|
||||
//Make the case the first entity in the report file.
|
||||
String caseTraceId = saveCaseInfo(skCase, jsonGenerator);
|
||||
|
||||
for (DataSource ds : skCase.getDataSources()) {
|
||||
progressPanel.updateStatusLabel(String.format(
|
||||
Bundle.CaseUcoFormatExporter_datasourceMsg(), ds.getName()));
|
||||
String dataSourceTraceId = saveDataSourceInfo(ds.getId(),
|
||||
caseTraceId, skCase, jsonGenerator);
|
||||
for (TagName tn : tagTypes) {
|
||||
for (ContentTag ct : tagsManager.getContentTagsByTagName(tn, ds.getId())) {
|
||||
saveUniqueFilesToCaseUcoFormat(ct.getContent(), tmpDir,
|
||||
jsonGenerator, timeZone, dataSourceTraceId);
|
||||
}
|
||||
for (BlackboardArtifactTag bat : tagsManager.getBlackboardArtifactTagsByTagName(tn, ds.getId())) {
|
||||
saveUniqueFilesToCaseUcoFormat(bat.getContent(), tmpDir,
|
||||
jsonGenerator, timeZone, dataSourceTraceId);
|
||||
}
|
||||
}
|
||||
if(!interestingItemSets.isEmpty()) {
|
||||
List<BlackboardArtifact.ARTIFACT_TYPE> typesToQuery = Lists.newArrayList(
|
||||
INTERESTING_FILE_HIT, INTERESTING_ARTIFACT_HIT);
|
||||
for(BlackboardArtifact.ARTIFACT_TYPE artType : typesToQuery) {
|
||||
for(BlackboardArtifact bArt : skCase.getBlackboardArtifacts(artType)) {
|
||||
if(bArt.getDataSource().getId() != ds.getId()) {
|
||||
continue;
|
||||
}
|
||||
BlackboardAttribute setAttr = bArt.getAttribute(SET_NAME);
|
||||
if (interestingItemSets.contains(setAttr.getValueString())) {
|
||||
Content content = skCase.getContentById(bArt.getObjectID());
|
||||
saveUniqueFilesToCaseUcoFormat(content, tmpDir,
|
||||
jsonGenerator, timeZone, dataSourceTraceId);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
finilizeJsonOutputFile(jsonGenerator);
|
||||
progressPanel.updateStatusLabel(Bundle.CaseUcoFormatExporter_finishMsg());
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Saves only unique abstract files to the report. Uniqueness is
|
||||
* determined by object id. The tmpDir Path is used to stored object
|
||||
* ids that have already been visited.
|
||||
*
|
||||
* @param content Abstractfile isntance
|
||||
* @param tmpDir Directory to write object ids
|
||||
* @param jsonGenerator Report generator
|
||||
* @param timeZone Time zore for ctime, atime, and mtime formatting
|
||||
* @param dataSourceTraceId TraceID number for the parent data source
|
||||
* @throws IOException
|
||||
*/
|
||||
private static void saveUniqueFilesToCaseUcoFormat(Content content, Path tmpDir, JsonGenerator jsonGenerator,
|
||||
TimeZone timeZone, String dataSourceTraceId) throws IOException {
|
||||
if (content instanceof AbstractFile && !(content instanceof DataSource)) {
|
||||
AbstractFile absFile = (AbstractFile) content;
|
||||
Path filePath = tmpDir.resolve(Long.toString(absFile.getId()));
|
||||
if (!Files.exists(filePath) && !absFile.isDir()) {
|
||||
saveFileInCaseUcoFormat(
|
||||
absFile.getId(),
|
||||
absFile.getName(),
|
||||
absFile.getParentPath(),
|
||||
absFile.getMd5Hash(),
|
||||
absFile.getMIMEType(),
|
||||
absFile.getSize(),
|
||||
ContentUtils.getStringTimeISO8601(absFile.getCtime(), timeZone),
|
||||
ContentUtils.getStringTimeISO8601(absFile.getAtime(), timeZone),
|
||||
ContentUtils.getStringTimeISO8601(absFile.getMtime(), timeZone),
|
||||
absFile.getNameExtension(),
|
||||
jsonGenerator,
|
||||
dataSourceTraceId
|
||||
);
|
||||
filePath.toFile().createNewFile();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static JsonGenerator createJsonGenerator(File reportFile) throws IOException {
|
||||
JsonFactory jsonGeneratorFactory = new JsonFactory();
|
||||
JsonGenerator jsonGenerator = jsonGeneratorFactory.createGenerator(reportFile, JsonEncoding.UTF8);
|
||||
// instert \n after each field for more readable formatting
|
||||
jsonGenerator.setPrettyPrinter(new DefaultPrettyPrinter().withObjectIndenter(new DefaultIndenter(" ", "\n")));
|
||||
return jsonGenerator;
|
||||
}
|
||||
|
||||
private static void initializeJsonOutputFile(JsonGenerator catalog) throws IOException {
|
||||
catalog.writeStartObject();
|
||||
catalog.writeFieldName("@graph");
|
||||
|
@ -561,10 +561,13 @@
|
||||
leading wildcard queries. -->
|
||||
<!--<field name="text_rev" type="text_general_rev" indexed="true" stored="false" multiValued="true"/>-->
|
||||
|
||||
<!-- field with white-space tokenized words for TermsComponent regex search (useful for fast search of IP addresses, URLs, certain phone numbers)
|
||||
also be useful for Lucene based queries containing special characters-->
|
||||
<!-- populated via copyField -->
|
||||
<!-- field with white-space tokenized words for TermsComponent regex search.
|
||||
No longer being populated by Autopsy, but remains for backward compatability.
|
||||
content_str is used instead to better deal with regex that have white space. -->
|
||||
<field name="content_ws" type="text_ws" indexed="true" stored="false" multiValued="true" />
|
||||
|
||||
<!-- field that contains the raw string form of the chunk. Used for regular expression
|
||||
matching. Populated by Autopsy using lower-case text and a copyField for file name. -->
|
||||
<field name="content_str" type="string" indexed="true" stored="true" multiValued="true" />
|
||||
|
||||
<!-- Uncommenting the following will create a "timestamp" field using
|
||||
|
@ -101,25 +101,29 @@ public class Server {
|
||||
return "image_id"; //NON-NLS
|
||||
}
|
||||
},
|
||||
// This is not stored or index . it is copied to Text and Content_Ws
|
||||
// This is not stored or indexed. it is copied to text by the schema
|
||||
CONTENT {
|
||||
@Override
|
||||
public String toString() {
|
||||
return "content"; //NON-NLS
|
||||
}
|
||||
},
|
||||
// String representation for regular expression searching
|
||||
CONTENT_STR {
|
||||
@Override
|
||||
public String toString() {
|
||||
return "content_str"; //NON-NLS
|
||||
}
|
||||
},
|
||||
// default search field. Populated by schema
|
||||
TEXT {
|
||||
@Override
|
||||
public String toString() {
|
||||
return "text"; //NON-NLS
|
||||
}
|
||||
},
|
||||
// no longer populated. Was used for regular expression searching.
|
||||
// Should not be used.
|
||||
CONTENT_WS {
|
||||
@Override
|
||||
public String toString() {
|
||||
@ -132,28 +136,28 @@ public class Server {
|
||||
return "file_name"; //NON-NLS
|
||||
}
|
||||
},
|
||||
// note that we no longer index this field
|
||||
// note that we no longer store or index this field
|
||||
CTIME {
|
||||
@Override
|
||||
public String toString() {
|
||||
return "ctime"; //NON-NLS
|
||||
}
|
||||
},
|
||||
// note that we no longer index this field
|
||||
// note that we no longer store or index this field
|
||||
ATIME {
|
||||
@Override
|
||||
public String toString() {
|
||||
return "atime"; //NON-NLS
|
||||
}
|
||||
},
|
||||
// note that we no longer index this field
|
||||
// note that we no longer store or index this field
|
||||
MTIME {
|
||||
@Override
|
||||
public String toString() {
|
||||
return "mtime"; //NON-NLS
|
||||
}
|
||||
},
|
||||
// note that we no longer index this field
|
||||
// note that we no longer store or index this field
|
||||
CRTIME {
|
||||
@Override
|
||||
public String toString() {
|
||||
|
@ -437,7 +437,26 @@ def normalize_db_entry(line, files_table, vs_parts_table, vs_info_table, fs_info
|
||||
ig_groups_seen_index = line.find('INSERT INTO "image_gallery_groups_seen"') > -1 or line.find('INSERT INTO image_gallery_groups_seen ') > -1
|
||||
|
||||
parens = line[line.find('(') + 1 : line.rfind(')')]
|
||||
fields_list = list(csv.reader([parens.replace(" ", "")], quotechar="'"))[0]
|
||||
no_space_parens = parens.replace(" ", "")
|
||||
fields_list = list(csv.reader([no_space_parens], quotechar="'"))[0]
|
||||
#Add back in the quotechar for values that were originally wrapped (csv reader consumes this character)
|
||||
fields_list_with_quotes = []
|
||||
ptr = 0
|
||||
for field in fields_list:
|
||||
if(len(field) == 0):
|
||||
field = "'" + field + "'"
|
||||
else:
|
||||
start = no_space_parens.find(field, ptr)
|
||||
if((start - 1) >= 0 and no_space_parens[start - 1] == '\''):
|
||||
if((start + len(field)) < len(no_space_parens) and no_space_parens[start + len(field)] == '\''):
|
||||
field = "'" + field + "'"
|
||||
fields_list_with_quotes.append(field)
|
||||
if(ptr > 0):
|
||||
#Add one for each comma that is used to separate values in the original string
|
||||
ptr+=1
|
||||
ptr += len(field)
|
||||
|
||||
fields_list = fields_list_with_quotes
|
||||
|
||||
# remove object ID
|
||||
if files_index:
|
||||
|
Loading…
x
Reference in New Issue
Block a user