diff --git a/Core/build.xml b/Core/build.xml index 32fff13e6e..bbf612c3d9 100644 --- a/Core/build.xml +++ b/Core/build.xml @@ -48,6 +48,11 @@ + + + + + diff --git a/Core/src/org/sleuthkit/autopsy/datasourcesummary/datamodel/TopProgramsSummary.java b/Core/src/org/sleuthkit/autopsy/datasourcesummary/datamodel/TopProgramsSummary.java index 490f1217bc..1fb73c0bb7 100644 --- a/Core/src/org/sleuthkit/autopsy/datasourcesummary/datamodel/TopProgramsSummary.java +++ b/Core/src/org/sleuthkit/autopsy/datasourcesummary/datamodel/TopProgramsSummary.java @@ -285,6 +285,7 @@ public class TopProgramsSummary implements DefaultArtifactUpdateGovernor { * Determines a short folder name if any. Otherwise, returns empty string. * * @param strPath The string path. + * @param applicationName The application name. * * @return The short folder name or empty string if not found. */ diff --git a/Core/src/org/sleuthkit/autopsy/modules/ileappanalyzer/Bundle.properties b/Core/src/org/sleuthkit/autopsy/modules/ileappanalyzer/Bundle.properties new file mode 100644 index 0000000000..0036d4dd6f --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/modules/ileappanalyzer/Bundle.properties @@ -0,0 +1,3 @@ +ILeappAnalyzerIngestModule.init.exception.msg=Unable to find {0}. +ILeappAnalyzerIngestModule.processing.file=Processing file {0} +ILeappAnalyzerIngestModule.parsing.file=Parsing file {0} \ No newline at end of file diff --git a/Core/src/org/sleuthkit/autopsy/modules/ileappanalyzer/Bundle.properties-MERGED b/Core/src/org/sleuthkit/autopsy/modules/ileappanalyzer/Bundle.properties-MERGED new file mode 100644 index 0000000000..8638a2c121 --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/modules/ileappanalyzer/Bundle.properties-MERGED @@ -0,0 +1,29 @@ +ILeappAnalyzerIngestModule.completed=iLeapp Processing Completed +ILeappAnalyzerIngestModule.error.creating.output.dir=Error creating iLeapp module output directory. +ILeappAnalyzerIngestModule.error.ileapp.file.processor.init=Failure to initialize ILeappProcessFile +ILeappAnalyzerIngestModule.error.running.iLeapp=Error running iLeapp, see log file. +ILeappAnalyzerIngestModule.executable.not.found=iLeapp Executable Not Found. +ILeappAnalyzerIngestModule.has.run=iLeapp +ILeappAnalyzerIngestModule.iLeapp.cancelled=iLeapp run was canceled +ILeappAnalyzerIngestModule.init.exception.msg=Unable to find {0}. +ILeappAnalyzerIngestModule.processing.file=Processing file {0} +ILeappAnalyzerIngestModule.parsing.file=Parsing file {0} +ILeappAnalyzerIngestModule.report.name=iLeapp Html Report +ILeappAnalyzerIngestModule.requires.windows=iLeapp module requires windows. +ILeappAnalyzerIngestModule.running.iLeapp=Running iLeapp +ILeappAnalyzerIngestModule.starting.iLeapp=Starting iLeapp +ILeappAnalyzerModuleFactory_moduleDesc=Runs iLeapp against files. +ILeappAnalyzerModuleFactory_moduleName=ILeapp Analyzer +ILeappFileProcessor.cannot.load.artifact.xml=Cannor load xml artifact file. +ILeappFileProcessor.cannotBuildXmlParser=Cannot buld an XML parser. +ILeappFileProcessor.completed=iLeapp Processing Completed +ILeappFileProcessor.error.creating.new.artifacts=Error creating new artifacts. +ILeappFileProcessor.error.creating.output.dir=Error creating iLeapp module output directory. +ILeappFileProcessor.error.reading.iLeapp.directory=Error reading iLeapp Output Directory +ILeappFileProcessor.error.running.iLeapp=Error running iLeapp, see log file. +ILeappFileProcessor.has.run=iLeapp +ILeappFileProcessor.iLeapp.cancelled=iLeapp run was canceled +ILeappFileProcessor.postartifacts_error=Error posting Blackboard Artifact +ILeappFileProcessor.running.iLeapp=Running iLeapp +ILeappFileProcessor.starting.iLeapp=Starting iLeapp +ILeappFileProcessor_cannotParseXml=Cannot Parse XML file. diff --git a/Core/src/org/sleuthkit/autopsy/modules/ileappanalyzer/ILeappAnalyzerIngestModule.java b/Core/src/org/sleuthkit/autopsy/modules/ileappanalyzer/ILeappAnalyzerIngestModule.java new file mode 100644 index 0000000000..a35dc5a2c0 --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/modules/ileappanalyzer/ILeappAnalyzerIngestModule.java @@ -0,0 +1,259 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2020 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.modules.ileappanalyzer; + +import java.io.File; +import java.io.FileNotFoundException; +import java.io.IOException; +import java.io.UncheckedIOException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.text.SimpleDateFormat; +import java.util.List; +import java.util.ArrayList; +import java.util.Locale; +import java.util.logging.Level; +import java.util.stream.Collectors; +import java.util.stream.Stream; +import org.openide.modules.InstalledFileLocator; +import org.openide.util.NbBundle; +import org.sleuthkit.autopsy.casemodule.Case; +import static org.sleuthkit.autopsy.casemodule.Case.getCurrentCase; +import org.sleuthkit.autopsy.casemodule.services.FileManager; +import org.sleuthkit.autopsy.coreutils.ExecUtil; +import org.sleuthkit.autopsy.coreutils.Logger; +import org.sleuthkit.autopsy.coreutils.PlatformUtil; +import org.sleuthkit.autopsy.ingest.DataSourceIngestModule; +import org.sleuthkit.autopsy.ingest.DataSourceIngestModuleProcessTerminator; +import org.sleuthkit.autopsy.ingest.DataSourceIngestModuleProgress; +import org.sleuthkit.autopsy.ingest.IngestJobContext; +import org.sleuthkit.autopsy.ingest.IngestMessage; +import org.sleuthkit.autopsy.ingest.IngestServices; +import org.sleuthkit.autopsy.ingest.IngestModule.IngestModuleException; +import org.sleuthkit.datamodel.AbstractFile; +import org.sleuthkit.datamodel.Content; +import org.sleuthkit.datamodel.TskCoreException; + +/** + * Data source ingest module that runs iLeapp against logical iOS files. + */ +public class ILeappAnalyzerIngestModule implements DataSourceIngestModule { + + private static final Logger logger = Logger.getLogger(ILeappAnalyzerIngestModule.class.getName()); + private static final String MODULE_NAME = ILeappAnalyzerModuleFactory.getModuleName(); + + private static final String ILEAPP = "iLeapp"; //NON-NLS + private static final String ILEAPP_EXECUTABLE = "ileapp.exe";//NON-NLS + + private File iLeappExecutable; + + private IngestJobContext context; + + private ILeappFileProcessor iLeappFileProcessor; + + ILeappAnalyzerIngestModule() { + // This constructor is intentionally empty. Nothing special is needed here. + } + + @NbBundle.Messages({ + "ILeappAnalyzerIngestModule.executable.not.found=iLeapp Executable Not Found.", + "ILeappAnalyzerIngestModule.requires.windows=iLeapp module requires windows.", + "ILeappAnalyzerIngestModule.error.ileapp.file.processor.init=Failure to initialize ILeappProcessFile"}) + @Override + public void startUp(IngestJobContext context) throws IngestModuleException { + this.context = context; + + if (false == PlatformUtil.isWindowsOS()) { + throw new IngestModuleException(Bundle.ILeappAnalyzerIngestModule_requires_windows()); + } + + try { + iLeappFileProcessor = new ILeappFileProcessor(); + } catch (IOException | IngestModuleException ex) { + throw new IngestModuleException(Bundle.ILeappAnalyzerIngestModule_error_ileapp_file_processor_init(), ex); + } + + try { + iLeappExecutable = locateExecutable(ILEAPP_EXECUTABLE); + } catch (FileNotFoundException exception) { + logger.log(Level.WARNING, "iLeapp executable not found.", exception); //NON-NLS + throw new IngestModuleException(Bundle.ILeappAnalyzerIngestModule_executable_not_found(), exception); + } + + } + + @NbBundle.Messages({ + "ILeappAnalyzerIngestModule.error.running.iLeapp=Error running iLeapp, see log file.", + "ILeappAnalyzerIngestModule.error.creating.output.dir=Error creating iLeapp module output directory.", + "ILeappAnalyzerIngestModule.starting.iLeapp=Starting iLeapp", + "ILeappAnalyzerIngestModule.running.iLeapp=Running iLeapp", + "ILeappAnalyzerIngestModule.has.run=iLeapp", + "ILeappAnalyzerIngestModule.iLeapp.cancelled=iLeapp run was canceled", + "ILeappAnalyzerIngestModule.completed=iLeapp Processing Completed", + "ILeappAnalyzerIngestModule.report.name=iLeapp Html Report"}) + @Override + public ProcessResult process(Content dataSource, DataSourceIngestModuleProgress statusHelper) { + + statusHelper.progress(Bundle.ILeappAnalyzerIngestModule_starting_iLeapp(), 0); + + List iLeappFilesToProcess = findiLeappFilesToProcess(dataSource); + + statusHelper.switchToDeterminate(iLeappFilesToProcess.size()); + + Integer filesProcessedCount = 0; + + Case currentCase = Case.getCurrentCase(); + for (AbstractFile iLeappFile : iLeappFilesToProcess) { + + String currentTime = new SimpleDateFormat("yyyy-MM-dd HH-mm-ss z", Locale.US).format(System.currentTimeMillis());//NON-NLS + Path moduleOutputPath = Paths.get(currentCase.getModuleDirectory(), ILEAPP, currentTime); + try { + Files.createDirectories(moduleOutputPath); + } catch (IOException ex) { + logger.log(Level.SEVERE, String.format("Error creating iLeapp output directory %s", moduleOutputPath.toString()), ex); + return ProcessResult.ERROR; + } + + statusHelper.progress(NbBundle.getMessage(this.getClass(), "ILeappAnalyzerIngestModule.processing.file", iLeappFile.getName()), filesProcessedCount); + ProcessBuilder iLeappCommand = buildiLeappCommand(moduleOutputPath, iLeappFile.getLocalAbsPath(), iLeappFile.getNameExtension()); + try { + int result = ExecUtil.execute(iLeappCommand, new DataSourceIngestModuleProcessTerminator(context)); + if (result != 0) { + logger.log(Level.SEVERE, String.format("Error running iLeapp, error code returned %d", result)); //NON-NLS + return ProcessResult.ERROR; + } + + addILeappReportToReports(moduleOutputPath, currentCase); + + } catch (IOException ex) { + logger.log(Level.SEVERE, String.format("Error when trying to execute iLeapp program against file %s", iLeappFile.getLocalAbsPath()), ex); + return ProcessResult.ERROR; + } + + if (context.dataSourceIngestIsCancelled()) { + logger.log(Level.INFO, "ILeapp Analyser ingest module run was canceled"); //NON-NLS + return ProcessResult.OK; + } + + ProcessResult fileProcessorResult = iLeappFileProcessor.processFiles(dataSource, moduleOutputPath, iLeappFile); + + if (fileProcessorResult == ProcessResult.ERROR) { + return ProcessResult.ERROR; + } + + filesProcessedCount++; + } + + IngestMessage message = IngestMessage.createMessage(IngestMessage.MessageType.DATA, + Bundle.ILeappAnalyzerIngestModule_has_run(), + Bundle.ILeappAnalyzerIngestModule_completed()); + IngestServices.getInstance().postMessage(message); + return ProcessResult.OK; + } + + /** + * Find the files that will be processed by the iLeapp program + * + * @param dataSource + * + * @return List of abstract files to process. + */ + private List findiLeappFilesToProcess(Content dataSource) { + + List iLeappFiles = new ArrayList<>(); + + FileManager fileManager = getCurrentCase().getServices().getFileManager(); + + // findFiles use the SQL wildcard % in the file name + try { + iLeappFiles = fileManager.findFiles(dataSource, "%", "/"); //NON-NLS + } catch (TskCoreException ex) { + logger.log(Level.WARNING, "No files found to process"); //NON-NLS + return iLeappFiles; + } + + List iLeappFilesToProcess = new ArrayList<>(); + for (AbstractFile iLeappFile : iLeappFiles) { + if ((iLeappFile.getName().toLowerCase().contains(".zip") || (iLeappFile.getName().toLowerCase().contains(".tar")) + || iLeappFile.getName().toLowerCase().contains(".tgz"))) { + iLeappFilesToProcess.add(iLeappFile); + } + } + + return iLeappFilesToProcess; + } + + private ProcessBuilder buildiLeappCommand(Path moduleOutputPath, String sourceFilePath, String iLeappFileSystemType) { + + ProcessBuilder processBuilder = buildProcessWithRunAsInvoker( + "\"" + iLeappExecutable + "\"", //NON-NLS + "-t", iLeappFileSystemType, //NON-NLS + "-i", sourceFilePath, //NON-NLS + "-o", moduleOutputPath.toString() + ); + processBuilder.redirectError(moduleOutputPath.resolve("iLeapp_err.txt").toFile()); //NON-NLS + processBuilder.redirectOutput(moduleOutputPath.resolve("iLeapp_out.txt").toFile()); //NON-NLS + return processBuilder; + } + + static private ProcessBuilder buildProcessWithRunAsInvoker(String... commandLine) { + ProcessBuilder processBuilder = new ProcessBuilder(commandLine); + /* + * Add an environment variable to force log2timeline/psort to run with + * the same permissions Autopsy uses. + */ + processBuilder.environment().put("__COMPAT_LAYER", "RunAsInvoker"); //NON-NLS + return processBuilder; + } + + private static File locateExecutable(String executableName) throws FileNotFoundException { + String executableToFindName = Paths.get(ILEAPP, executableName).toString(); + + File exeFile = InstalledFileLocator.getDefault().locate(executableToFindName, ILeappAnalyzerIngestModule.class.getPackage().getName(), false); + if (null == exeFile || exeFile.canExecute() == false) { + throw new FileNotFoundException(executableName + " executable not found."); + } + return exeFile; + } + + /** + * Find the index.html file in the iLeapp output directory so it can be + * added to reports + */ + private void addILeappReportToReports(Path iLeappOutputDir, Case currentCase) { + List allIndexFiles = new ArrayList<>(); + + try (Stream walk = Files.walk(iLeappOutputDir)) { + + allIndexFiles = walk.map(x -> x.toString()) + .filter(f -> f.toLowerCase().endsWith("index.html")).collect(Collectors.toList()); + + if (!allIndexFiles.isEmpty()) { + currentCase.addReport(allIndexFiles.get(0), MODULE_NAME, Bundle.ILeappAnalyzerIngestModule_report_name()); + } + + } catch (IOException | UncheckedIOException | TskCoreException ex) { + // catch the error and continue on as report is not added + logger.log(Level.WARNING, String.format("Error finding index file in path %s", iLeappOutputDir.toString()), ex); + } + + } + +} diff --git a/Core/src/org/sleuthkit/autopsy/modules/ileappanalyzer/ILeappAnalyzerModuleFactory.java b/Core/src/org/sleuthkit/autopsy/modules/ileappanalyzer/ILeappAnalyzerModuleFactory.java new file mode 100644 index 0000000000..5da165392f --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/modules/ileappanalyzer/ILeappAnalyzerModuleFactory.java @@ -0,0 +1,67 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2020 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.modules.ileappanalyzer; + +import org.openide.util.NbBundle; +import org.openide.util.lookup.ServiceProvider; +import org.sleuthkit.autopsy.coreutils.Version; +import org.sleuthkit.autopsy.ingest.DataSourceIngestModule; +import org.sleuthkit.autopsy.ingest.IngestModuleFactory; +import org.sleuthkit.autopsy.ingest.IngestModuleFactoryAdapter; +import org.sleuthkit.autopsy.ingest.IngestModuleIngestJobSettings; + +/** + * A factory that creates data source ingest modules that will run iLeapp + * against logical files and saves the output to module output. + */ +@ServiceProvider(service = IngestModuleFactory.class) +public class ILeappAnalyzerModuleFactory extends IngestModuleFactoryAdapter { + + @NbBundle.Messages({"ILeappAnalyzerModuleFactory_moduleName=ILeapp Analyzer"}) + static String getModuleName() { + return Bundle.ILeappAnalyzerModuleFactory_moduleName(); + } + + @Override + public String getModuleDisplayName() { + return getModuleName(); + } + + @NbBundle.Messages({"ILeappAnalyzerModuleFactory_moduleDesc=Runs iLeapp against files."}) + @Override + public String getModuleDescription() { + return Bundle.ILeappAnalyzerModuleFactory_moduleDesc(); + } + + @Override + public String getModuleVersionNumber() { + return Version.getVersion(); + } + + @Override + public boolean isDataSourceIngestModuleFactory() { + return true; + } + + @Override + public DataSourceIngestModule createDataSourceIngestModule(IngestModuleIngestJobSettings ingestJobOptions) { + return new ILeappAnalyzerIngestModule(); + } + +} diff --git a/Core/src/org/sleuthkit/autopsy/modules/ileappanalyzer/ILeappFileProcessor.java b/Core/src/org/sleuthkit/autopsy/modules/ileappanalyzer/ILeappFileProcessor.java new file mode 100644 index 0000000000..1bc80c9019 --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/modules/ileappanalyzer/ILeappFileProcessor.java @@ -0,0 +1,446 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2020 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.modules.ileappanalyzer; + +import java.io.BufferedReader; +import java.io.File; +import java.io.FileNotFoundException; +import java.io.FileReader; +import java.io.IOException; +import java.io.UncheckedIOException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.text.ParseException; +import java.text.SimpleDateFormat; +import java.util.List; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Date; +import java.util.HashMap; +import static java.util.Locale.US; +import java.util.Map; +import java.util.logging.Level; +import java.util.stream.Collectors; +import java.util.stream.Stream; +import javax.xml.parsers.DocumentBuilder; +import javax.xml.parsers.DocumentBuilderFactory; +import javax.xml.parsers.ParserConfigurationException; +import org.apache.commons.io.FilenameUtils; +import org.openide.util.NbBundle; +import org.sleuthkit.autopsy.casemodule.Case; +import org.sleuthkit.autopsy.coreutils.Logger; +import org.sleuthkit.autopsy.coreutils.PlatformUtil; +import org.sleuthkit.autopsy.ingest.IngestModule.IngestModuleException; +import org.sleuthkit.autopsy.ingest.IngestModule.ProcessResult; +import org.sleuthkit.datamodel.AbstractFile; +import org.sleuthkit.datamodel.Blackboard; +import org.sleuthkit.datamodel.BlackboardArtifact; +import org.sleuthkit.datamodel.BlackboardAttribute; +import org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE; +import org.sleuthkit.datamodel.Content; +import org.sleuthkit.datamodel.TskCoreException; +import org.sleuthkit.datamodel.TskException; +import org.w3c.dom.Document; +import org.w3c.dom.NamedNodeMap; +import org.w3c.dom.NodeList; +import org.xml.sax.SAXException; + +/** + * Find and process output from iLeapp program and bring into Autopsy + */ +public final class ILeappFileProcessor { + + private static final Logger logger = Logger.getLogger(ILeappFileProcessor.class.getName()); + private static final String MODULE_NAME = ILeappAnalyzerModuleFactory.getModuleName(); + + private static final String XMLFILE = "ileap-artifact-attribute-reference.xml"; //NON-NLS + + private final Map tsvFiles; + private final Map tsvFileArtifacts; + private final Map tsvFileArtifactComments; + private final Map>> tsvFileAttributes; + + public ILeappFileProcessor() throws IOException, IngestModuleException { + this.tsvFiles = new HashMap<>(); + this.tsvFileArtifacts = new HashMap<>(); + this.tsvFileArtifactComments = new HashMap<>(); + this.tsvFileAttributes = new HashMap<>(); + + configExtractor(); + loadConfigFile(); + + } + + @NbBundle.Messages({ + "ILeappFileProcessor.error.running.iLeapp=Error running iLeapp, see log file.", + "ILeappFileProcessor.error.creating.output.dir=Error creating iLeapp module output directory.", + "ILeappFileProcessor.starting.iLeapp=Starting iLeapp", + "ILeappFileProcessor.running.iLeapp=Running iLeapp", + "ILeappFileProcessor.has.run=iLeapp", + "ILeappFileProcessor.iLeapp.cancelled=iLeapp run was canceled", + "ILeappFileProcessor.completed=iLeapp Processing Completed", + "ILeappFileProcessor.error.reading.iLeapp.directory=Error reading iLeapp Output Directory"}) + + public ProcessResult processFiles(Content dataSource, Path moduleOutputPath, AbstractFile iLeappFile) { + + try { + List iLeappTsvOutputFiles = findTsvFiles(moduleOutputPath); + processiLeappFiles(iLeappTsvOutputFiles, iLeappFile); + } catch (IOException | IngestModuleException ex) { + logger.log(Level.SEVERE, String.format("Error trying to process iLeapp output files in directory %s. ", moduleOutputPath.toString()), ex); //NON-NLS + return ProcessResult.ERROR; + } + + return ProcessResult.OK; + } + + /** + * Find the tsv files in the iLeapp output directory and match them to files + * we know we want to process and return the list to process those files. + */ + private List findTsvFiles(Path iLeappOutputDir) throws IngestModuleException { + List allTsvFiles = new ArrayList<>(); + List foundTsvFiles = new ArrayList<>(); + + try (Stream walk = Files.walk(iLeappOutputDir)) { + + allTsvFiles = walk.map(x -> x.toString()) + .filter(f -> f.toLowerCase().endsWith(".tsv")).collect(Collectors.toList()); + + for (String tsvFile : allTsvFiles) { + if (tsvFiles.containsKey(FilenameUtils.getName(tsvFile))) { + foundTsvFiles.add(tsvFile); + } + } + + } catch (IOException | UncheckedIOException e) { + throw new IngestModuleException(Bundle.ILeappFileProcessor_error_reading_iLeapp_directory() + iLeappOutputDir.toString(), e); + } + + return foundTsvFiles; + + } + + /** + * Process the iLeapp files that were found that match the xml mapping file + * + * @param iLeappFilesToProcess List of files to process + * @param iLeappImageFile Abstract file to create artifact for + * @param statusHelper progress bar update + * + * @throws FileNotFoundException + * @throws IOException + */ + private void processiLeappFiles(List iLeappFilesToProcess, AbstractFile iLeappImageFile) throws FileNotFoundException, IOException, IngestModuleException { + List bbartifacts = new ArrayList<>(); + + for (String iLeappFileName : iLeappFilesToProcess) { + String fileName = FilenameUtils.getName(iLeappFileName); + File iLeappFile = new File(iLeappFileName); + if (tsvFileAttributes.containsKey(fileName)) { + List> attrList = tsvFileAttributes.get(fileName); + try { + BlackboardArtifact.Type artifactType = Case.getCurrentCase().getSleuthkitCase().getArtifactType(tsvFileArtifacts.get(fileName)); + + processFile(iLeappFile, attrList, fileName, artifactType, bbartifacts, iLeappImageFile); + + } catch (TskCoreException ex) { + // check this + throw new IngestModuleException(String.format("Error getting Blackboard Artifact Type for %s", tsvFileArtifacts.get(fileName)), ex); + } + } + + } + + if (!bbartifacts.isEmpty()) { + postArtifacts(bbartifacts); + } + + } + + private void processFile(File iLeappFile, List> attrList, String fileName, BlackboardArtifact.Type artifactType, + List bbartifacts, AbstractFile iLeappImageFile) throws FileNotFoundException, IOException, IngestModuleException { + try (BufferedReader reader = new BufferedReader(new FileReader(iLeappFile))) { + String line = reader.readLine(); + // Check first line, if it is null then no heading so nothing to match to, close and go to next file. + if (line != null) { + Map columnNumberToProcess = findColumnsToProcess(line, attrList); + line = reader.readLine(); + while (line != null) { + Collection bbattributes = processReadLine(line, columnNumberToProcess, fileName); + if (!bbattributes.isEmpty()) { + BlackboardArtifact bbartifact = createArtifactWithAttributes(artifactType.getTypeID(), iLeappImageFile, bbattributes); + if (bbartifact != null) { + bbartifacts.add(bbartifact); + } + } + line = reader.readLine(); + } + } + } + + } + + /** + * Process the line read and create the necessary attributes for it + * + * @param line a tsv line to process that was read + * @param columnNumberToProcess Which columns to process in the tsv line + * + * @return + */ + private Collection processReadLine(String line, Map columnNumberToProcess, String fileName) throws IngestModuleException { + String[] columnValues = line.split("\\t"); + + Collection bbattributes = new ArrayList(); + + for (Map.Entry columnToProcess : columnNumberToProcess.entrySet()) { + Integer columnNumber = columnToProcess.getKey(); + String attributeName = columnToProcess.getValue(); + + try { + BlackboardAttribute.Type attributeType = Case.getCurrentCase().getSleuthkitCase().getAttributeType(attributeName.toUpperCase()); + if (attributeType == null) { + break; + } + String attrType = attributeType.getValueType().getLabel().toUpperCase(); + checkAttributeType(bbattributes, attrType, columnValues, columnNumber, attributeType); + } catch (TskCoreException ex) { + throw new IngestModuleException(String.format("Error getting Attribute type for Attribute Name %s", attributeName), ex); //NON-NLS + } + } + + if (tsvFileArtifactComments.containsKey(fileName)) { + bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_COMMENT, MODULE_NAME, tsvFileArtifactComments.get(fileName))); + } + + return bbattributes; + + } + + private void checkAttributeType(Collection bbattributes, String attrType, String[] columnValues, Integer columnNumber, BlackboardAttribute.Type attributeType) { + if (attrType.matches("STRING")) { + bbattributes.add(new BlackboardAttribute(attributeType, MODULE_NAME, columnValues[columnNumber])); + } else if (attrType.matches("INTEGER")) { + bbattributes.add(new BlackboardAttribute(attributeType, MODULE_NAME, Integer.valueOf(columnValues[columnNumber]))); + } else if (attrType.matches("LONG")) { + bbattributes.add(new BlackboardAttribute(attributeType, MODULE_NAME, Long.valueOf(columnValues[columnNumber]))); + } else if (attrType.matches("DOUBLE")) { + bbattributes.add(new BlackboardAttribute(attributeType, MODULE_NAME, Double.valueOf(columnValues[columnNumber]))); + } else if (attrType.matches("BYTE")) { + bbattributes.add(new BlackboardAttribute(attributeType, MODULE_NAME, Byte.valueOf(columnValues[columnNumber]))); + } else if (attrType.matches("DATETIME")) { + // format of data should be the same in all the data and the format is 2020-03-28 01:00:17 + SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-d HH:mm:ss", US); + Long dateLong = Long.valueOf(0); + try { + Date newDate = dateFormat.parse(columnValues[columnNumber]); + dateLong = newDate.getTime() / 1000; + bbattributes.add(new BlackboardAttribute(attributeType, MODULE_NAME, dateLong)); + } catch (ParseException ex) { + // catching error and displaying date that could not be parsed + // we set the timestamp to 0 and continue on processing + logger.log(Level.WARNING, String.format("Failed to parse date/time %s for attribute.", columnValues[columnNumber]), ex); //NON-NLS + } + } else if (attrType.matches("JSON")) { + + bbattributes.add(new BlackboardAttribute(attributeType, MODULE_NAME, columnValues[columnNumber])); + } else { + // Log this and continue on with processing + logger.log(Level.WARNING, String.format("Attribute Type %s not defined.", attrType)); //NON-NLS + } + + } + + /** + * Process the first line of the tsv file which has the headings. Match the + * headings to the columns in the XML mapping file so we know which columns + * to process. + * + * @param line a tsv heading line of the columns in the file + * @param attrList the list of headings we want to process + * + * @return the numbered column(s) and attribute(s) we want to use for the + * column(s) + */ + private Map findColumnsToProcess(String line, List> attrList) { + String[] columnNames = line.split("\\t"); + HashMap columnsToProcess = new HashMap<>(); + + Integer columnPosition = 0; + for (String columnName : columnNames) { + // for some reason the first column of the line has unprintable characters so removing them + String cleanColumnName = columnName.replaceAll("[^\\n\\r\\t\\p{Print}]", ""); + for (List atList : attrList) { + if (atList.contains(cleanColumnName.toLowerCase())) { + columnsToProcess.put(columnPosition, atList.get(0)); + break; + } + } + columnPosition++; + } + + return columnsToProcess; + } + + @NbBundle.Messages({ + "ILeappFileProcessor.cannot.load.artifact.xml=Cannor load xml artifact file.", + "ILeappFileProcessor.cannotBuildXmlParser=Cannot buld an XML parser.", + "ILeappFileProcessor_cannotParseXml=Cannot Parse XML file.", + "ILeappFileProcessor.postartifacts_error=Error posting Blackboard Artifact", + "ILeappFileProcessor.error.creating.new.artifacts=Error creating new artifacts." + }) + + /** + * Read the XML config file and load the mappings into maps + */ + private void loadConfigFile() throws IngestModuleException { + Document xmlinput; + try { + String path = PlatformUtil.getUserConfigDirectory() + File.separator + XMLFILE; + File f = new File(path); + DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance(); + DocumentBuilder db = dbf.newDocumentBuilder(); + xmlinput = db.parse(f); + + } catch (IOException e) { + throw new IngestModuleException(Bundle.ILeappFileProcessor_cannot_load_artifact_xml() + e.getLocalizedMessage(), e); //NON-NLS + } catch (ParserConfigurationException pce) { + throw new IngestModuleException(Bundle.ILeappFileProcessor_cannotBuildXmlParser() + pce.getLocalizedMessage(), pce); //NON-NLS + } catch (SAXException sxe) { + throw new IngestModuleException(Bundle.ILeappFileProcessor_cannotParseXml() + sxe.getLocalizedMessage(), sxe); //NON-NLS + } + + getFileNode(xmlinput); + getArtifactNode(xmlinput); + getAttributeNodes(xmlinput); + + } + + private void getFileNode(Document xmlinput) { + + NodeList nlist = xmlinput.getElementsByTagName("FileName"); //NON-NLS + + for (int i = 0; i < nlist.getLength(); i++) { + NamedNodeMap nnm = nlist.item(i).getAttributes(); + tsvFiles.put(nnm.getNamedItem("filename").getNodeValue(), nnm.getNamedItem("description").getNodeValue()); + + } + + } + + private void getArtifactNode(Document xmlinput) { + + NodeList artifactNlist = xmlinput.getElementsByTagName("ArtifactName"); //NON-NLS + for (int k = 0; k < artifactNlist.getLength(); k++) { + NamedNodeMap nnm = artifactNlist.item(k).getAttributes(); + String artifactName = nnm.getNamedItem("artifactname").getNodeValue(); + String comment = nnm.getNamedItem("comment").getNodeValue(); + String parentName = artifactNlist.item(k).getParentNode().getAttributes().getNamedItem("filename").getNodeValue(); + + tsvFileArtifacts.put(parentName, artifactName); + + if (!comment.toLowerCase().matches("null")) { + tsvFileArtifactComments.put(parentName, comment); + } + } + + } + + private void getAttributeNodes(Document xmlinput) { + + NodeList attributeNlist = xmlinput.getElementsByTagName("AttributeName"); //NON-NLS + for (int k = 0; k < attributeNlist.getLength(); k++) { + List attributeList = new ArrayList<>(); + NamedNodeMap nnm = attributeNlist.item(k).getAttributes(); + String attributeName = nnm.getNamedItem("attributename").getNodeValue(); + if (!attributeName.toLowerCase().matches("null")) { + String columnName = nnm.getNamedItem("columnName").getNodeValue(); + String required = nnm.getNamedItem("required").getNodeValue(); + String parentName = attributeNlist.item(k).getParentNode().getParentNode().getAttributes().getNamedItem("filename").getNodeValue(); + + attributeList.add(attributeName.toLowerCase()); + attributeList.add(columnName.toLowerCase()); + attributeList.add(required.toLowerCase()); + + if (tsvFileAttributes.containsKey(parentName)) { + List> attrList = tsvFileAttributes.get(parentName); + attrList.add(attributeList); + tsvFileAttributes.replace(parentName, attrList); + } else { + List> attrList = new ArrayList<>(); + attrList.add(attributeList); + tsvFileAttributes.put(parentName, attrList); + } + } + + } + } + /** + * Generic method for creating a blackboard artifact with attributes + * + * @param type is a blackboard.artifact_type enum to determine + * which type the artifact should be + * @param content is the Content object that needs to have the + * artifact added for it + * @param bbattributes is the collection of blackboard attributes that + * need to be added to the artifact after the + * artifact has been created + * + * @return The newly-created artifact, or null on error + */ + private BlackboardArtifact createArtifactWithAttributes(int type, AbstractFile abstractFile, Collection bbattributes) { + try { + BlackboardArtifact bbart = abstractFile.newArtifact(type); + bbart.addAttributes(bbattributes); + return bbart; + } catch (TskException ex) { + logger.log(Level.WARNING, Bundle.ILeappFileProcessor_error_creating_new_artifacts(), ex); //NON-NLS + } + return null; + } + + /** + * Method to post a list of BlackboardArtifacts to the blackboard. + * + * @param artifacts A list of artifacts. IF list is empty or null, the + * function will return. + */ + void postArtifacts(Collection artifacts) { + if (artifacts == null || artifacts.isEmpty()) { + return; + } + + try { + Case.getCurrentCase().getSleuthkitCase().getBlackboard().postArtifacts(artifacts, MODULE_NAME); + } catch (Blackboard.BlackboardException ex) { + logger.log(Level.SEVERE, Bundle.ILeappFileProcessor_postartifacts_error(), ex); //NON-NLS + } + } + + /** + * Extract the iLeapp config xml file to the user directory to process + * + * @throws org.sleuthkit.autopsy.ingest.IngestModule.IngestModuleException + */ + private void configExtractor() throws IOException { + PlatformUtil.extractResourceToUserConfigDir(ILeappFileProcessor.class, XMLFILE, true); + } + +} diff --git a/Core/src/org/sleuthkit/autopsy/modules/ileappanalyzer/ileap-artifact-attribute-reference.xml b/Core/src/org/sleuthkit/autopsy/modules/ileappanalyzer/ileap-artifact-attribute-reference.xml new file mode 100644 index 0000000000..2fa8174e2e --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/modules/ileappanalyzer/ileap-artifact-attribute-reference.xml @@ -0,0 +1,105 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/Core/src/org/sleuthkit/autopsy/report/modules/portablecase/PortableCaseReportModule.java b/Core/src/org/sleuthkit/autopsy/report/modules/portablecase/PortableCaseReportModule.java index 3843eb29f4..83bfb2fdae 100644 --- a/Core/src/org/sleuthkit/autopsy/report/modules/portablecase/PortableCaseReportModule.java +++ b/Core/src/org/sleuthkit/autopsy/report/modules/portablecase/PortableCaseReportModule.java @@ -1195,11 +1195,10 @@ public class PortableCaseReportModule implements ReportModule { } /** - * Copy the sorceFolder to destBaseFolder\appName. + * Copy the sorceFolder to destBaseFolder/appName. * * @param sourceFolder Autopsy installation directory. * @param destBaseFolder Report base direction. - * @param appName Name of the application being copied. * * @throws IOException */ diff --git a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestJobLogger.java b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestJobLogger.java index 5bc3f46fac..4a7eb4d975 100644 --- a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestJobLogger.java +++ b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestJobLogger.java @@ -70,7 +70,7 @@ final class AutoIngestJobLogger { */ INFO, /** - * Qualifies a log message about an unexpected event or condtion during + * Qualifies a log message about an unexpected event or condition during * automated ingest processing. */ WARNING, @@ -208,6 +208,18 @@ final class AutoIngestJobLogger { void logDataSourceProcessorSelected(String dsp) throws AutoIngestJobLoggerException, InterruptedException { log(MessageCategory.INFO, "Using data source processor: " + dsp); } + + /** + * Log that a data source is being skipped. + * + * @param dataSourceName The name of the data source + * + * @throws AutoIngestJobLogger.AutoIngestJobLoggerException + * @throws InterruptedException + */ + void logSkippingDataSource(String dataSourceName) throws AutoIngestJobLoggerException, InterruptedException { + log(MessageCategory.INFO, "File type can not currently be processed"); + } /** * Logs the failure of the selected data source processor. diff --git a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestManager.java b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestManager.java index 7c817e7739..a6ece45667 100644 --- a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestManager.java +++ b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestManager.java @@ -1040,7 +1040,7 @@ final class AutoIngestManager extends Observable implements PropertyChangeListen } } - + /** * A task that submits an input directory scan task to the input directory * scan task executor. @@ -2440,6 +2440,15 @@ final class AutoIngestManager extends Observable implements PropertyChangeListen currentJob.setProcessingStage(AutoIngestJob.Stage.COMPLETED, Date.from(Instant.now())); return; } + + if (SupportedDataSources.shouldSkipFile(dataSource.getPath().toString())) { + Manifest manifest = currentJob.getManifest(); + AutoIngestJobLogger jobLogger = new AutoIngestJobLogger(manifest.getFilePath(), manifest.getDataSourceFileName(), currentJob.getCaseDirectoryPath()); + jobLogger.logSkippingDataSource(dataSource.getPath().toString()); + sysLogger.log(Level.INFO, "Skipping data source that can not be processed ({0})", dataSource.getPath().toString()); + currentJob.setProcessingStage(AutoIngestJob.Stage.COMPLETED, Date.from(Instant.now())); + return; + } if (currentJob.isCanceled() || jobProcessingTaskFuture.isCancelled()) { return; diff --git a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/SupportedDataSources.java b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/SupportedDataSources.java new file mode 100644 index 0000000000..9df1587215 --- /dev/null +++ b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/SupportedDataSources.java @@ -0,0 +1,53 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2020 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.experimental.autoingest; + +import java.util.Arrays; +import java.util.List; +import org.apache.commons.io.FilenameUtils; + +/** + * Utility class for checking whether a data source/file should be processed + * in an automated setting. The goal is to not spend time analyzing large + * files that Autopsy can not handle yet. + */ +public final class SupportedDataSources { + + private static final List UNSUPPORTED_EXTENSIONS = Arrays.asList("xry", "dar"); + + /** + * Check whether a file should be added to a case, either as a data source or part of a + * logical file set. + * + * @param fileName The name of the file. + * + * @return true if the file is currently unsupported and should be skipped, false otherwise. + */ + public static boolean shouldSkipFile(String fileName) { + String ext = FilenameUtils.getExtension(fileName); + if (ext == null) { + return false; + } + return UNSUPPORTED_EXTENSIONS.contains(ext.toLowerCase()); + } + + private SupportedDataSources() { + // Static class + } +} diff --git a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/Bundle.properties-MERGED b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/Bundle.properties-MERGED index 44db83039a..95066b41d1 100755 --- a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/Bundle.properties-MERGED +++ b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/Bundle.properties-MERGED @@ -36,7 +36,7 @@ KeywordSearchResultFactory.createNodeForKey.noResultsFound.text=No results found KeywordSearchResultFactory.query.exception.msg=Could not perform the query OpenIDE-Module-Display-Category=Ingest Module -OpenIDE-Module-Long-Description=Keyword Search ingest module.\n\nThe module indexes files found in the disk image at ingest time.\nIt then periodically runs the search on the indexed files using one or more keyword lists (containing pure words and/or regular expressions) and posts results.\n\n\The module also contains additional tools integrated in the main GUI, such as keyword list configuration, keyword search bar in the top-right corner, extracted text viewer and search results viewer showing highlighted keywords found. +OpenIDE-Module-Long-Description=Keyword Search ingest module.\n\nThe module indexes files found in the disk image at ingest time.\nIt then periodically runs the search on the indexed files using one or more keyword lists (containing pure words and/or regular expressions) and posts results.\n\nThe module also contains additional tools integrated in the main GUI, such as keyword list configuration, keyword search bar in the top-right corner, extracted text viewer and search results viewer showing highlighted keywords found. OpenIDE-Module-Name=KeywordSearch OptionsCategory_Name_KeywordSearchOptions=Keyword Search OptionsCategory_Keywords_KeywordSearchOptions=Keyword Search diff --git a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/Server.java b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/Server.java index 05a02bf11b..a456b2fb32 100644 --- a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/Server.java +++ b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/Server.java @@ -18,6 +18,7 @@ */ package org.sleuthkit.autopsy.keywordsearch; +import com.google.common.util.concurrent.ThreadFactoryBuilder; import java.awt.event.ActionEvent; import java.beans.PropertyChangeListener; import java.io.BufferedReader; @@ -42,9 +43,11 @@ import java.util.Collection; import java.util.Iterator; import java.util.List; import java.util.Random; +import java.util.concurrent.ScheduledThreadPoolExecutor; import java.util.concurrent.TimeUnit; import java.util.concurrent.locks.ReentrantReadWriteLock; import java.util.logging.Level; +import static java.util.stream.Collectors.toList; import javax.swing.AbstractAction; import org.apache.solr.client.solrj.SolrQuery; import org.apache.solr.client.solrj.SolrRequest; @@ -1468,6 +1471,15 @@ public class Server { // the server to access a core needs to be built from a URL with the // core in it, and is only good for core-specific operations private final HttpSolrServer solrCore; + + private final int maxBufferSize; + private final List buffer; + private final Object bufferLock; + + private final ScheduledThreadPoolExecutor periodicTasksExecutor; + private static final long PERIODIC_BATCH_SEND_INTERVAL_MINUTES = 10; + private static final int NUM_BATCH_UPDATE_RETRIES = 10; + private static final long SLEEP_BETWEEN_RETRIES_MS = 10000; // 10 seconds private final int QUERY_TIMEOUT_MILLISECONDS = 86400000; // 24 Hours = 86,400,000 Milliseconds @@ -1475,6 +1487,7 @@ public class Server { this.name = name; this.caseType = caseType; this.textIndex = index; + bufferLock = new Object(); this.solrCore = new HttpSolrServer(currentSolrServer.getBaseURL() + "/" + name); //NON-NLS @@ -1490,7 +1503,45 @@ public class Server { solrCore.setAllowCompression(true); solrCore.setParser(new XMLResponseParser()); // binary parser is used by default + // document batching + maxBufferSize = org.sleuthkit.autopsy.keywordsearch.UserPreferences.getDocumentsQueueSize(); + logger.log(Level.INFO, "Using Solr document queue size = {0}", maxBufferSize); //NON-NLS + buffer = new ArrayList<>(maxBufferSize); + periodicTasksExecutor = new ScheduledThreadPoolExecutor(1, new ThreadFactoryBuilder().setNameFormat("periodic-batched-document-task-%d").build()); //NON-NLS + periodicTasksExecutor.scheduleWithFixedDelay(new SendBatchedDocumentsTask(), PERIODIC_BATCH_SEND_INTERVAL_MINUTES, PERIODIC_BATCH_SEND_INTERVAL_MINUTES, TimeUnit.MINUTES); } + + /** + * A task that periodically sends batched documents to Solr. Batched documents + * get sent automatically as soon as the batching buffer is gets full. However, + * if the buffer is not full, we want to periodically send the batched documents + * so that users are able to see them in their keyword searches. + */ + private final class SendBatchedDocumentsTask implements Runnable { + + @Override + public void run() { + List clone; + synchronized (bufferLock) { + + if (buffer.isEmpty()) { + return; + } + + // Buffer is full. Make a clone and release the lock, so that we don't + // hold other ingest threads + clone = buffer.stream().collect(toList()); + buffer.clear(); + } + + try { + // send the cloned list to Solr + sendBufferedDocs(clone); + } catch (KeywordSearchModuleException ex) { + logger.log(Level.SEVERE, "Periodic batched document update failed", ex); //NON-NLS + } + } + } /** * Get the name of the core @@ -1531,6 +1582,20 @@ public class Server { } private void commit() throws SolrServerException { + List clone; + synchronized (bufferLock) { + // Make a clone and release the lock, so that we don't + // hold other ingest threads + clone = buffer.stream().collect(toList()); + buffer.clear(); + } + + try { + sendBufferedDocs(clone); + } catch (KeywordSearchModuleException ex) { + throw new SolrServerException(NbBundle.getMessage(this.getClass(), "Server.commit.exception.msg"), ex); + } + try { //commit and block solrCore.commit(true, true); @@ -1548,14 +1613,77 @@ public class Server { solrCore.deleteByQuery(deleteQuery); } + /** + * Add a Solr document for indexing. Documents get batched instead of + * being immediately sent to Solr (unless batch size = 1). + * + * @param doc Solr document to be indexed. + * + * @throws KeywordSearchModuleException + */ void addDocument(SolrInputDocument doc) throws KeywordSearchModuleException { + + List clone; + synchronized (bufferLock) { + buffer.add(doc); + // buffer documents if the buffer is not full + if (buffer.size() < maxBufferSize) { + return; + } + + // Buffer is full. Make a clone and release the lock, so that we don't + // hold other ingest threads + clone = buffer.stream().collect(toList()); + buffer.clear(); + } + + // send the cloned list to Solr + sendBufferedDocs(clone); + } + + /** + * Send a list of buffered documents to Solr. + * + * @param docBuffer List of buffered Solr documents + * + * @throws KeywordSearchModuleException + */ + private void sendBufferedDocs(List docBuffer) throws KeywordSearchModuleException { + + if (docBuffer.isEmpty()) { + return; + } + try { - solrCore.add(doc); - } catch (Exception ex) { - // Solr throws a lot of unexpected exception types - logger.log(Level.SEVERE, "Could not add document to index via update handler: " + doc.getField("id"), ex); //NON-NLS - throw new KeywordSearchModuleException( - NbBundle.getMessage(this.getClass(), "Server.addDoc.exception.msg", doc.getField("id")), ex); //NON-NLS + boolean success = true; + for (int reTryAttempt = 0; reTryAttempt < NUM_BATCH_UPDATE_RETRIES; reTryAttempt++) { + try { + success = true; + solrCore.add(docBuffer); + } catch (Exception ex) { + success = false; + if (reTryAttempt < NUM_BATCH_UPDATE_RETRIES - 1) { + logger.log(Level.WARNING, "Unable to send document batch to Solr. Re-trying...", ex); //NON-NLS + try { + Thread.sleep(SLEEP_BETWEEN_RETRIES_MS); + } catch (InterruptedException ignore) { + throw new KeywordSearchModuleException( + NbBundle.getMessage(this.getClass(), "Server.addDocBatch.exception.msg"), ex); //NON-NLS + } + } + } + if (success) { + if (reTryAttempt > 0) { + logger.log(Level.INFO, "Batch update suceeded after {0} re-try", reTryAttempt); //NON-NLS + } + return; + } + } + // if we are here, it means all re-try attempts failed + logger.log(Level.SEVERE, "Unable to send document batch to Solr. All re-try attempts failed!"); //NON-NLS + throw new KeywordSearchModuleException(NbBundle.getMessage(this.getClass(), "Server.addDocBatch.exception.msg")); //NON-NLS + } finally { + docBuffer.clear(); } } diff --git a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/UserPreferences.java b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/UserPreferences.java new file mode 100755 index 0000000000..11f7654f8c --- /dev/null +++ b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/UserPreferences.java @@ -0,0 +1,76 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2020 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.keywordsearch; + +import java.util.prefs.BackingStoreException; +import java.util.prefs.PreferenceChangeListener; +import java.util.prefs.Preferences; +import org.openide.util.NbPreferences; + +/** + * Provides convenient access to a Preferences node for user preferences with + * default values. + */ +final class UserPreferences { + + private static final Preferences preferences = NbPreferences.forModule(UserPreferences.class); + private static final String INDEXING_DOC_QUEUE_SIZE = "IndexingDocumentQueueSize"; //NON-NLS + private static final int DEFAULT_INDEXING_DOC_QUEUE_SIZE = 30; //NON-NLS + + // Prevent instantiation. + private UserPreferences() { + } + + /** + * Reload all preferences from disk. This is only needed if the preferences + * file is being directly modified on disk while Autopsy is running. + * + * @throws BackingStoreException + */ + public static void reloadFromStorage() throws BackingStoreException { + preferences.sync(); + } + + /** + * Saves the current preferences to storage. This is only needed if the + * preferences files are going to be copied to another location while + * Autopsy is running. + * + * @throws BackingStoreException + */ + public static void saveToStorage() throws BackingStoreException { + preferences.flush(); + } + + public static void addChangeListener(PreferenceChangeListener listener) { + preferences.addPreferenceChangeListener(listener); + } + + public static void removeChangeListener(PreferenceChangeListener listener) { + preferences.removePreferenceChangeListener(listener); + } + + public static void setDocumentsQueueSize(int size) { + preferences.putInt(INDEXING_DOC_QUEUE_SIZE, size); + } + + public static int getDocumentsQueueSize() { + return preferences.getInt(INDEXING_DOC_QUEUE_SIZE, DEFAULT_INDEXING_DOC_QUEUE_SIZE); + } +} \ No newline at end of file diff --git a/branding/core/core.jar/org/netbeans/core/startup/Bundle.properties b/branding/core/core.jar/org/netbeans/core/startup/Bundle.properties index 35138509d8..9f493a25f3 100644 --- a/branding/core/core.jar/org/netbeans/core/startup/Bundle.properties +++ b/branding/core/core.jar/org/netbeans/core/startup/Bundle.properties @@ -1,5 +1,5 @@ #Updated by build script -#Wed, 08 Jul 2020 15:15:46 -0400 +#Wed, 09 Sep 2020 10:39:20 -0400 LBL_splash_window_title=Starting Autopsy SPLASH_HEIGHT=314 SPLASH_WIDTH=538 diff --git a/branding/modules/org-netbeans-core-windows.jar/org/netbeans/core/windows/view/ui/Bundle.properties b/branding/modules/org-netbeans-core-windows.jar/org/netbeans/core/windows/view/ui/Bundle.properties index cf36e85b33..0f09178514 100644 --- a/branding/modules/org-netbeans-core-windows.jar/org/netbeans/core/windows/view/ui/Bundle.properties +++ b/branding/modules/org-netbeans-core-windows.jar/org/netbeans/core/windows/view/ui/Bundle.properties @@ -1,4 +1,4 @@ #Updated by build script -#Wed, 08 Jul 2020 15:15:46 -0400 +#Wed, 09 Sep 2020 10:39:20 -0400 CTL_MainWindow_Title=Autopsy 4.16.0 CTL_MainWindow_Title_No_Project=Autopsy 4.16.0 diff --git a/thirdparty/iLeapp/LICENSE b/thirdparty/iLeapp/LICENSE new file mode 100644 index 0000000000..ae8fc549fc --- /dev/null +++ b/thirdparty/iLeapp/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2020 Brigs + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/thirdparty/iLeapp/ileapp.exe b/thirdparty/iLeapp/ileapp.exe new file mode 100644 index 0000000000..8176b4f679 Binary files /dev/null and b/thirdparty/iLeapp/ileapp.exe differ