From f4b6d23e234bde22870ccfd49d9a5778da64a423 Mon Sep 17 00:00:00 2001 From: Raman Date: Thu, 7 Feb 2019 06:33:21 -0500 Subject: [PATCH 01/80] 1184: Chrome cache parsing - interim commit --- .../autopsy/recentactivity/Bundle.properties | 1 + .../autopsy/recentactivity/Chrome.java | 3 + .../recentactivity/ChromeCacheExtractor.java | 989 ++++++++++++++++++ 3 files changed, 993 insertions(+) create mode 100644 RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ChromeCacheExtractor.java diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Bundle.properties b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Bundle.properties index 1750a4287c..30e2189452 100644 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Bundle.properties +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Bundle.properties @@ -24,6 +24,7 @@ Chrome.getLogin.errMsg.errGettingFiles=Error when trying to get Chrome history f Chrome.getLogin.errMsg.errAnalyzingFiles={0}\: Error while trying to analyze file\:{1} Chrome.getAutofill.errMsg.errGettingFiles=Error when trying to get Chrome Web Data files. Chrome.getAutofill.errMsg.errAnalyzingFiles={0}\: Error while trying to analyze file\:{1} +ChromeCacheExtractor.moduleName=ChromeCacheExtractor Extract.dbConn.errMsg.failedToQueryDb={0}\: Failed to query database. ExtractIE.moduleName.text=Internet Explorer ExtractIE.getBookmark.errMsg.errGettingBookmarks={0}\: Error getting Internet Explorer Bookmarks. diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Chrome.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Chrome.java index 481b1ec3f4..57ee0fd62b 100644 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Chrome.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Chrome.java @@ -97,6 +97,9 @@ class Chrome extends Extract { this.getLogins(); this.getAutofill(); this.getDownload(); + + ChromeCacheExtractor chromeCacheExtractor = new ChromeCacheExtractor(dataSource, context); + chromeCacheExtractor.getCache(); } /** diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ChromeCacheExtractor.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ChromeCacheExtractor.java new file mode 100644 index 0000000000..ebb34fca30 --- /dev/null +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ChromeCacheExtractor.java @@ -0,0 +1,989 @@ +/* + * + * Autopsy Forensic Browser + * + * Copyright 2019 Basis Technology Corp. + * + * Project Contact/Architect: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.recentactivity; + +import java.io.File; +import java.io.IOException; +import java.io.RandomAccessFile; +import java.nio.ByteBuffer; +import java.nio.ByteOrder; +import java.nio.channels.FileChannel; +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.logging.Level; +import org.openide.util.Exceptions; +import org.openide.util.NbBundle; +import org.sleuthkit.autopsy.casemodule.Case; +import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; +import org.sleuthkit.autopsy.casemodule.services.FileManager; +import org.sleuthkit.autopsy.coreutils.Logger; +import org.sleuthkit.autopsy.datamodel.ContentUtils; +import org.sleuthkit.autopsy.ingest.IngestJobContext; +import org.sleuthkit.autopsy.ingest.IngestModule.IngestModuleException; +import org.sleuthkit.autopsy.modules.filetypeid.FileTypeDetector; +import org.sleuthkit.datamodel.AbstractFile; +import org.sleuthkit.datamodel.BlackboardArtifact; +import org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE; +import org.sleuthkit.datamodel.BlackboardAttribute; +import org.sleuthkit.datamodel.Content; +import org.sleuthkit.datamodel.ReadContentInputStream; +import org.sleuthkit.datamodel.SleuthkitCase; +import org.sleuthkit.datamodel.TimeUtilities; +import org.sleuthkit.datamodel.TskCoreException; +import org.sleuthkit.datamodel.TskException; + +/** + * Extracts and parses Chrome Cache files. + */ +final class ChromeCacheExtractor { + + private final long UINT32_MASK = 0xFFFFFFFFl; + + private final int INDEXFILE_HDR_SIZE = 92*4; + private final int DATAFILE_HDR_SIZE = 8192; + + private final String moduleName; + private final Logger logger = Logger.getLogger(this.getClass().getName()); + private String outputFolderName; + + private final Content dataSource; + private final IngestJobContext context; + private Case currentCase; + private SleuthkitCase tskCase; + private FileManager fileManager; + private FileTypeDetector fileTypeDetector; + + private Map filesTable = new HashMap<>(); + + final class CacheFileCopy { + + private AbstractFile abstractFile; + + // RAMAN TBD: - save the plain File here. so it can be deleted later. + // Caller can create an RandomAccessFile as well as ByteBuffer as needed + private RandomAccessFile fileCopy; + private ByteBuffer byteBuffer; + + CacheFileCopy (AbstractFile abstractFile, RandomAccessFile fileCopy, ByteBuffer buffer ) { + this.abstractFile = abstractFile; + this.fileCopy = fileCopy; + this.byteBuffer = buffer; + } + + public RandomAccessFile getFileCopy() { + return fileCopy; + } + public ByteBuffer getByteBuffer() { + return byteBuffer; + } + AbstractFile getAbstractFile() { + return abstractFile; + } + } + + ChromeCacheExtractor(Content dataSource, IngestJobContext context ) { + moduleName = NbBundle.getMessage(ChromeCacheExtractor.class, "ChromeCacheExtractor.moduleName"); + this.dataSource = dataSource; + this.context = context; + } + + + /** + * Initializes Chrome cache extractor module + * + * @throws org.sleuthkit.autopsy.ingest.IngestModule.IngestModuleException + */ + void init() throws IngestModuleException { + + try { + currentCase = Case.getCurrentCaseThrows(); + tskCase = currentCase.getSleuthkitCase(); + fileManager = currentCase.getServices().getFileManager(); + fileTypeDetector = new FileTypeDetector(); + + // Create an output folder to save derived files + outputFolderName = RAImageIngestModule.getRAOutputPath(currentCase, moduleName); + File dir = new File(outputFolderName); + if (dir.exists() == false) { + dir.mkdirs(); + } + + } catch (NoCurrentCaseException ex) { + String msg = "Failed to get current case."; + throw new IngestModuleException(msg, ex); + } catch (FileTypeDetector.FileTypeDetectorInitException ex) { + String msg = "Failed to get FileTypeDetector."; + throw new IngestModuleException(msg, ex); + } + } + + void cleanup () { + + // RAMAN TBD: delete all files in the table + + // Cant delete the RandomAccessFile. May need to switch the CacheFileCopy to ony store the "File" + // And create a RandomAcessfile and ByteBuffer on it when and as needed. + + + + } + + /** + * Returns the location of output folder for this module + * + * @return + */ + private String getOutputFolderName() { + return outputFolderName; + } + + /** + * Extracts the data from Chrome cache + */ + void getCache() { + + try { + init(); + } catch (IngestModuleException ex) { + + // TBD: show the error to Autospy error console?? + String msg = "Failed to initialize ChromeCacheExtractor."; + logger.log(Level.SEVERE, msg, ex); + return; + } + + + Optional indexFile; + try { + // find the index file + indexFile = findAndCopyCacheFile("index"); + if (!indexFile.isPresent()) { + return; + } + + for (int i = 0; i < 4; i ++) { + Optional dataFile = findAndCopyCacheFile(String.format("data_%1d",i)); + if (!dataFile.isPresent()) { + return; + } + } + + } catch (TskCoreException | IngestModuleException ex) { + String msg = "Failed to find cache files"; + logger.log(Level.SEVERE, msg, ex); + return; + } + + logger.log(Level.INFO, "{0}- Now reading Cache index file", new Object[]{moduleName}); //NON-NLS + + + ByteBuffer indexFileROBuffer = indexFile.get().getByteBuffer(); + IndexFileHeader indexHdr = new IndexFileHeader(indexFileROBuffer); + + // seek past the header + indexFileROBuffer.position(INDEXFILE_HDR_SIZE); + + for (int i = 0; i < indexHdr.getTableLen(); i++) { + + CacheAddress addr = new CacheAddress(indexFileROBuffer.getInt() & UINT32_MASK); + if (addr.isInitialized()) { + + String fileName = addr.getFilename(); + try { + Optional cacheFileCopy = this.getCacheFileCopy(fileName); + if (!cacheFileCopy.isPresent()) { + logger.log(Level.SEVERE, String.format("Failed to get cache entry at address %s", addr)); //NON-NLS + } + + // Get the cache entry at this address + CacheEntry cacheEntry = new CacheEntry(addr, cacheFileCopy.get() ); + + // Get the data segments - each entry can have up to 4 data segments + List dataEntries = cacheEntry.getData(); + for (int j = 0; j < dataEntries.size(); j++) { + CacheData data = dataEntries.get(j); + + // Todo: extract the data if we are going to do something with it in the future + + //data.extract(); + + if (data.isInExternalFile() ) { + + String externalFilename = data.getAddress().getFilename(); + Optional externalFile = this.findCacheFile(externalFilename); + if (externalFile.isPresent()) { + + try { + Collection bbattributes = new ArrayList<>(); + bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_URL, + moduleName, + ((cacheEntry.getKey() != null) ? cacheEntry.getKey() : ""))); //NON-NLS + + BlackboardArtifact bbart = externalFile.get().newArtifact(ARTIFACT_TYPE.TSK_SOURCE_ARTIFACT); + if (bbart != null) { + bbart.addAttributes(bbattributes); + } + + } catch (TskException ex) { + logger.log(Level.SEVERE, "Error while trying to add an artifact", ex); //NON-NLS + } + } + } + } + + } catch (TskCoreException ex) { + logger.log(Level.SEVERE, String.format("Failed to get cache entry at address %s", addr)); //NON-NLS + } catch (IngestModuleException ex) { + Exceptions.printStackTrace(ex); + } + } + } + + } + + /** + * Finds abstract file for cache file with a specified name + * + * @param cacheFileName + * @return Opt + * @throws TskCoreException + */ + Optional findCacheFile(String cacheFileName) throws TskCoreException { + + List cacheFiles = fileManager.findFiles(dataSource, cacheFileName, "default/cache"); //NON-NLS + if (!cacheFiles.isEmpty()) { + if (cacheFiles.size() > 1 ) { + logger.log(Level.WARNING, String.format("Found multiple matches for filename = %s", cacheFileName)); + } + return Optional.of(cacheFiles.get(0)); + } + + return Optional.empty(); + } + + /** + * + * @param cacheFileName + * @return + * @throws TskCoreException + */ + Optional getCacheFileCopy(String cacheFileName) throws TskCoreException, IngestModuleException { + + // Check if the file is already in the table + if (filesTable.containsKey(cacheFileName)) + return Optional.of(filesTable.get(cacheFileName)); + + return findAndCopyCacheFile(cacheFileName); + } + + /** + * Finds the specified cache file and makes a temporary copy + * + * @param cacheFileName + * @return Cache file copy + * @throws TskCoreException + */ + Optional findAndCopyCacheFile(String cacheFileName) throws TskCoreException, IngestModuleException { + + Optional cacheFileOptional = findCacheFile(cacheFileName); + if (!cacheFileOptional.isPresent()) { + return Optional.empty(); + } + + AbstractFile cacheFile = cacheFileOptional.get(); + String tempIndexFilePath = RAImageIngestModule.getRATempPath(currentCase, "chrome") + File.separator + cacheFile.getName(); //NON-NLS + try { + ContentUtils.writeToFile(cacheFile, new File(tempIndexFilePath), context::dataSourceIngestIsCancelled); + + RandomAccessFile randomAccessFile; + FileChannel roChannel; + ByteBuffer cacheFileROBuf; + + randomAccessFile = new RandomAccessFile(tempIndexFilePath, "r"); + roChannel = randomAccessFile.getChannel(); + cacheFileROBuf = roChannel.map(FileChannel.MapMode.READ_ONLY, 0, + (int) roChannel.size()); + + cacheFileROBuf.order(ByteOrder.nativeOrder()); + CacheFileCopy cacheFileCopy = new CacheFileCopy(cacheFile, randomAccessFile, cacheFileROBuf ); + + if (!cacheFileName.startsWith("f_")) { + filesTable.put(cacheFileName, cacheFileCopy); + } + + return Optional.of(cacheFileCopy); + } + catch (ReadContentInputStream.ReadContentInputStreamException ex) { + String msg = String.format("Error reading Chrome cache file '%s' (id=%d).", + cacheFile.getName(), cacheFile.getId()); + throw new IngestModuleException(msg, ex); + } catch (IOException ex) { + String msg = String.format("Error writing temp Chrome cache file '%s' (id=%d).", + cacheFile.getName(), cacheFile.getId()); + throw new IngestModuleException(msg, ex); + } + } + + /** + * Encapsulates the header found in the index file + */ + final class IndexFileHeader { + + private final long magic; + private final int version; + private final int numEntries; + private final int numBytes; + private final int lastFile; + private final int tableLen; + + IndexFileHeader(ByteBuffer indexFileROBuf) { + + magic = indexFileROBuf.getInt() & UINT32_MASK; + + indexFileROBuf.position(indexFileROBuf.position()+2); + + version = indexFileROBuf.getShort(); + numEntries = indexFileROBuf.getInt(); + numBytes = indexFileROBuf.getInt(); + lastFile = indexFileROBuf.getInt(); + + indexFileROBuf.position(indexFileROBuf.position()+4); // this_id + indexFileROBuf.position(indexFileROBuf.position()+4); // stats cache address + + tableLen = indexFileROBuf.getInt(); + } + + public long getMagic() { + return magic; + } + + public int getVersion() { + return version; + } + + public int getNumEntries() { + return numEntries; + } + + public int getNumBytes() { + return numBytes; + } + + public int getLastFile() { + return lastFile; + } + + public int getTableLen() { + return tableLen; + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + + sb.append(String.format("Index Header:")); + sb.append(String.format("\tMagic = %x" , getMagic()) ); + sb.append(String.format("\tVersion = %x" , getVersion()) ); + sb.append(String.format("\tNumEntries = %x" , getNumEntries()) ); + sb.append(String.format("\tNumBytes = %x" , getNumBytes()) ); + sb.append(String.format("\tLastFile = %x" , getLastFile()) ); + sb.append(String.format("\tTableLen = %x" , getTableLen()) ); + + return sb.toString(); + } + } + + enum CacheFileTypeEnum { + EXTERNAL, + RANKINGS, + BLOCK_256, + BLOCK_1K, + BLOCK_4K, + BLOCK_FILES, + BLOCK_ENTRIES, + BLOCK_EVICTED + } + + // CacheAddress is a unsigned 32 bit number + // + // Header: + // 1000 0000 0000 0000 0000 0000 0000 0000 : initialized bit + // 0111 0000 0000 0000 0000 0000 0000 0000 : file type + // + // If separate file: + // 0000 1111 1111 1111 1111 1111 1111 1111 : file# 0 - 268,435,456 (2^28) + // + // If block file: + // 0000 1100 0000 0000 0000 0000 0000 0000 : reserved bits + // 0000 0011 0000 0000 0000 0000 0000 0000 : number of contiguous blocks 1-4 + // 0000 0000 1111 1111 0000 0000 0000 0000 : file selector 0 - 255 + // 0000 0000 0000 0000 1111 1111 1111 1111 : block# 0 - 65,535 (2^16) + + final class CacheAddress { + // sundry constants to parse the bit fields in address + private final long ADDR_INITIALIZED_MASK = 0x80000000l; + private final long FILE_TYPE_MASK = 0x70000000; + private final long FILE_TYPE_OFFSET = 28; + private final long NUM_BLOCKS_MASK = 0x03000000; + private final long NUM_BLOCKS_OFFSET = 24; + private final long FILE_SELECTOR_MASK = 0x00ff0000; + private final long FILE_SELECTOR_OFFSET = 16; + private final long START_BLOCK_MASK = 0x0000FFFF; + private final long EXTERNAL_FILE_NAME_MASK = 0x0FFFFFFF; + + private final long uint32CacheAddr; + private final CacheFileTypeEnum fileType; + private final int numBlocks; + private final int startBlock; + private final String fileName; + private final int fileNumber; + + + CacheAddress(long uint32) { + + uint32CacheAddr = uint32; + int fileTypeEnc = (int)(uint32CacheAddr & FILE_TYPE_MASK) >> FILE_TYPE_OFFSET; + fileType = CacheFileTypeEnum.values()[fileTypeEnc]; + + if (isInitialized()) { + if (isInExternalFile()) { + fileNumber = (int)(uint32CacheAddr & EXTERNAL_FILE_NAME_MASK); + fileName = String.format("f_%06x", getFileNumber() ); + numBlocks = 0; + startBlock = 0; + } else { + fileNumber = (int)((uint32CacheAddr & FILE_SELECTOR_MASK) >> FILE_SELECTOR_OFFSET); + fileName = String.format("data_%d", getFileNumber() ); + numBlocks = (int)(uint32CacheAddr & NUM_BLOCKS_MASK >> NUM_BLOCKS_OFFSET); + startBlock = (int)(uint32CacheAddr & START_BLOCK_MASK); + } + } + else { + fileName = null; + fileNumber = 0; + numBlocks = 0; + startBlock = 0; + } + } + + boolean isInitialized() { + return ((uint32CacheAddr & ADDR_INITIALIZED_MASK) != 0); + } + + CacheFileTypeEnum getFileType() { + return fileType; + } + + String getFilename() { + return fileName; + } + + boolean isInExternalFile() { + return (fileType == CacheFileTypeEnum.EXTERNAL); + } + + int getFileNumber() { + return fileNumber; + } + + int getStartBlock() { + return startBlock; + } + + int getNumBlocks() { + return numBlocks; + } + + int getBlockSize() { + switch (fileType) { + case RANKINGS: + return 36; + case BLOCK_256: + return 256; + case BLOCK_1K: + return 1024; + case BLOCK_4K: + return 4096; + case BLOCK_FILES: + return 8; + case BLOCK_ENTRIES: + return 104; + case BLOCK_EVICTED: + return 48; + default: + return 0; + } + } + + public long getUint32CacheAddr() { + return uint32CacheAddr; + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(String.format("CacheAddr %08x : %s : filename %s", + uint32CacheAddr, + isInitialized() ? "Initialized" : "UnInitialized", + getFilename())); + + if ((fileType == CacheFileTypeEnum.BLOCK_256) || + (fileType == CacheFileTypeEnum.BLOCK_1K) || + (fileType == CacheFileTypeEnum.BLOCK_4K) ) { + sb.append(String.format(" (%d blocks starting at %08X)", + this.getNumBlocks(), + this.getStartBlock() + )); + } + + return sb.toString(); + } + + } + + + enum CacheDataTypeEnum { + HTTP_HEADER, + UNKNOWN, + }; + + /** + * Encapsulates a cached data segment. + * + * A data segment may have HTTP headers, scripts, image files (png, gif), or JSON files + * + * A data segment may be stored in one of the data_x files or an external file - f_xxxxxx + * + * A data segment may be compressed - GZIP and BRotli are the two commonly used methods. + */ + final class CacheData { + + private int length; + private final CacheAddress address; + private CacheDataTypeEnum type; + + private boolean isHTTPHeaderHint; + + private CacheFileCopy cacheFileCopy = null; + private byte[] data = null; + + // mime type of the data segment helps determine if it is compressed + private String mimeType = ""; + + CacheData(CacheAddress cacheAdress, int len) { + this(cacheAdress, len, false); + } + + CacheData(CacheAddress cacheAdress, int len, boolean isHTTPHeader ) { + this.type = CacheDataTypeEnum.UNKNOWN; + this.length = len; + this.address = cacheAdress; + this.isHTTPHeaderHint = isHTTPHeader; + } + + boolean isInExternalFile() { + return address.isInExternalFile(); + } + + boolean isCompressedFile() { + if (isInExternalFile()) { + return mimeType.equalsIgnoreCase("application/octet-stream"); + } + else { + return false; + } + } + + String getMimeType() { + return mimeType; + } + + /** + * Extracts the data segment from the file + * + * @throws TskCoreException + */ + void extract() throws TskCoreException, IngestModuleException { + + // do nothing if already extracted, + if (data != null) { + return; + } + + cacheFileCopy = getCacheFileCopy(address.getFilename()).get(); + if (!address.isInExternalFile() ) { + + this.data = new byte [length]; + ByteBuffer buf = cacheFileCopy.getByteBuffer(); + int dataOffset = DATAFILE_HDR_SIZE + address.getStartBlock() * address.getBlockSize(); + buf.position(dataOffset); + buf.get(data, 0, length); + + // if this might be a HTPP header, lets try to parse it as such + if ((isHTTPHeaderHint)) { + // Check if we can find the http headers + String strData = new String(data); + if (strData.contains("HTTP")) { + + // TBD parse header + // Past some bytes there's the HTTP headers + // General Parsing algo: + // - Find start of HTTP header by searching for string "HTTP" + // - Skip to the first 0x00 ti get to the end of the HTTP response line, this makrs start of headers section + // - Find the end of the end by searching for 0x00 0x00 bytes + // - Extract the headers section + // - Parse the headers section - each null terminated string is a header + // - Each header is of the format "name: value" e.g. + + type = CacheDataTypeEnum.HTTP_HEADER; + } + } + + } else { + // Handle external f_* files + + // External files may or may not be compressed + // They may be compresswed with GZIP, which our other ingest modules recognize and decpress + // Alternatively thay may be compressed with Brotli, in that case we may want to decopmress them + + // TBD: In future if we want to do anything with contents of file. +// this.data = new byte [length]; +// +// ByteBuffer buf = cacheFileCopy.getByteBuffer(); +// buf.position(0); +// buf.get(data, 0, length); +// +// // get mime type, to determine if the file is compressed or not +// AbstractFile abstractFile = cacheFileCopy.getAbstractFile(); +// mimeType = fileTypeDetector.getMIMEType(abstractFile); + + } + } + + String getDataString() throws TskCoreException, IngestModuleException { + if (data == null) { + extract(); + } + return new String(data); + } + + byte[] getDataBytes() throws TskCoreException, IngestModuleException { + if (data == null) { + extract(); + } + return data; + } + + int getDataLength() { + return this.length; + } + + CacheDataTypeEnum getType() { + return type; + } + + CacheAddress getAddress() { + return address; + } + + // RAMAN TBD: save needs to return something that can be used to add a derived file +// void save() throws TskCoreException, IngestModuleException { +// String fileName; +// +// if (address.isInExternalFile()) { +// fileName = address.getFilename(); +// } else { +// fileName = String.format("%s__%08x", address.getFilename(), address.getUint32CacheAddr()); +// } +// save(getOutputFolderName() + File.separator + fileName); +// } + + // TBD: save needs to return something that can be used to add a derived file +// void save(String filePathName) throws TskCoreException, IngestModuleException { +// +// // Save the data to specified file +// if (data == null) { +// extract(); +// } +// +// if (!this.isInExternalFile() || +// !this.isCompressedFile()) { +// // write the +// try (FileOutputStream stream = new FileOutputStream(filePathName)) { +// stream.write(data); +// } catch (IOException ex) { +// throw new TskCoreException(String.format("Failed to write output file %s", filePathName), ex); +// } +// } +// else { +// if (mimeType.toLowerCase().contains("gzip")) { +// //if (mimeType.equalsIgnoreCase("application/gzip")) { +// try { +// ByteArrayInputStream byteInputStream = new ByteArrayInputStream(data); +// GZIPInputStream in = new GZIPInputStream(byteInputStream); +// FileOutputStream out = new FileOutputStream(filePathName); +// byte[] buffer = new byte[2048]; +// int len; +// while((len = in.read(buffer)) != -1){ +// out.write(buffer, 0, len); +// } +// out.close(); +// +// } catch (IOException ex) { +// throw new TskCoreException(String.format("Failed to write output file %s", filePathName), ex); +// } +// } +// else { +// // TBD: how to uncompress Brotli ?? +// System.out.println("TBD Dont know how to uncompress Brotli yet" ); +// } +// } +// } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(String.format("\t\tData type = : %s, Data Len = %d", + this.type.toString(), this.length )); + sb.append("\n"); + sb.append(String.format("\t\tData = : %s ", new String(data) )); + + return sb.toString(); + } + + } + + + enum EntryStateEnum { + ENTRY_NORMAL, + ENTRY_EVICTED, + ENTRY_DOOMED + }; + + +// Main structure for an entry on the backing storage. +// +// Each entry has a key, identifying the URL the cache entry pertains to. +// If the key is longer than +// what can be stored on this structure, it will be extended on consecutive +// blocks (adding 256 bytes each time), up to 4 blocks (1024 - 32 - 1 chars). +// After that point, the whole key will be stored as a data block or external +// file. +// +// Each entry can have upto 4 data segments +// +// struct EntryStore { +// uint32 hash; // Full hash of the key. +// CacheAddr next; // Next entry with the same hash or bucket. +// CacheAddr rankings_node; // Rankings node for this entry. +// int32 reuse_count; // How often is this entry used. +// int32 refetch_count; // How often is this fetched from the net. +// int32 state; // Current state. +// uint64 creation_time; +// int32 key_len; +// CacheAddr long_key; // Optional address of a long key. +// int32 data_size[4]; // We can store up to 4 data streams for each +// CacheAddr data_addr[4]; // entry. +// uint32 flags; // Any combination of EntryFlags. +// int32 pad[4]; +// uint32 self_hash; // The hash of EntryStore up to this point. +// char key[256 - 24 * 4]; // null terminated +// }; + + /** + * Encapsulates a Cache Entry + */ + final class CacheEntry { + + // each entry is 256 bytes. The last section of the entry, after all the other fields is a null terminated key + private final int MAX_KEY_LEN = 256-24*4; + + private final CacheAddress selfAddress; + private final CacheFileCopy cacheFileCopy; + + private long hash; + private CacheAddress nextAddress; + private CacheAddress rankingsNodeAddress; + + private int reuseCount; + private int refetchCount; + private EntryStateEnum state; + + private long creationTime; + private int keyLen; + + private CacheAddress longKeyAddresses; // address of the key, if the key is external to the entry + + private int dataSizes[] = new int[4]; + private CacheAddress dataAddresses[] = new CacheAddress[4]; + + private long flags; + private int pad[] = new int[4]; + + private long selfHash; // hash of the entry itself so far. + private String key; // Key may be found within the entry or may be external + + CacheEntry(CacheAddress cacheAdress, CacheFileCopy cacheFileCopy ) { + this.selfAddress = cacheAdress; + this.cacheFileCopy = cacheFileCopy; + + ByteBuffer fileROBuf = cacheFileCopy.getByteBuffer(); + + int entryOffset = DATAFILE_HDR_SIZE + cacheAdress.getStartBlock() * cacheAdress.getBlockSize(); + + // reposition the buffer to the the correct offset + fileROBuf.position(entryOffset); + + hash = fileROBuf.getInt() & UINT32_MASK; + + long uint32 = fileROBuf.getInt() & UINT32_MASK; + nextAddress = (uint32 != 0) ? new CacheAddress(uint32) : null; + + uint32 = fileROBuf.getInt() & UINT32_MASK; + rankingsNodeAddress = (uint32 != 0) ? new CacheAddress(uint32) : null; + + reuseCount = fileROBuf.getInt(); + refetchCount = fileROBuf.getInt(); + + state = EntryStateEnum.values()[fileROBuf.getInt()]; + creationTime = (fileROBuf.getLong() / 1000000) - Long.valueOf("11644473600"); + + keyLen = fileROBuf.getInt(); + + uint32 = fileROBuf.getInt() & UINT32_MASK; + longKeyAddresses = (uint32 != 0) ? new CacheAddress(uint32) : null; + + for (int i = 0; i < 4; i++) { + dataSizes[i] = fileROBuf.getInt(); + } + for (int i = 0; i < 4; i++) { + dataAddresses[i] = new CacheAddress(fileROBuf.getInt() & UINT32_MASK); + } + + flags = fileROBuf.getInt() & UINT32_MASK; + for (int i = 0; i < 4; i++) { + pad[i] = fileROBuf.getInt(); + } + + selfHash = fileROBuf.getInt() & UINT32_MASK; + + // get the key + if (longKeyAddresses != null) { + // Key is stored outside of the entry + try { + CacheData data = new CacheData(longKeyAddresses, this.keyLen, true); + key = data.getDataString(); + } catch (TskCoreException | IngestModuleException ex) { + logger.log(Level.SEVERE, String.format("Failed to get external key from address %s", longKeyAddresses)); //NON-NLS + } + } + else { // key stored within entry + StringBuilder sb = new StringBuilder(MAX_KEY_LEN); + int i = 0; + while (fileROBuf.remaining() > 0 && i < MAX_KEY_LEN) { + char c = (char)fileROBuf.get(); + if (c == '\0') { + break; + } + sb.append(c); + } + + key = sb.toString(); + } + } + + public CacheAddress getAddress() { + return selfAddress; + } + + public long getHash() { + return hash; + } + + public CacheAddress getNextAddress() { + return nextAddress; + } + + public int getReuseCount() { + return reuseCount; + } + + public int getRefetchCount() { + return refetchCount; + } + + public EntryStateEnum getState() { + return state; + } + + public long getCreationTime() { + return creationTime; + } + + public long getFlags() { + return flags; + } + + public String getKey() { + return key; + } + + public ArrayList getData() { + ArrayList list = new ArrayList<>(); + for (int i = 0; i < 4; i++) { + if (dataSizes[i] > 0) { + CacheData cacheData = new CacheData(dataAddresses[i], dataSizes[i], true ); + list.add(cacheData); + } + } + return list; + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(String.format("Entry = Hash: %08x, State: %s, ReuseCount: %d, RefetchCount: %d", + this.hash, this.state.toString(), this.reuseCount, this.refetchCount )); + + sb.append(String.format("\n\tKey: %s, Keylen: %d", + this.key, this.keyLen, this.reuseCount, this.refetchCount )); + + sb.append(String.format("\n\tCreationTime: %s", + TimeUtilities.epochToTime(this.creationTime) )); + + sb.append(String.format("\n\tNext Address: %s", + (nextAddress != null) ? nextAddress.toString() : "None")); + + for (int i = 0; i < 4; i++) { + if (dataSizes[i] > 0) { + sb.append(String.format("\n\tData %d: %8d bytes at cache address = %s", + i, dataSizes[i], dataAddresses[i] )); + } + } + + return sb.toString(); + } + } +} From 9796ccfffb970aebf38c8a636b34b9ccb0f4229a Mon Sep 17 00:00:00 2001 From: Raman Date: Thu, 7 Feb 2019 12:11:55 -0500 Subject: [PATCH 02/80] 1184: Parse Chrome cache - sundry cleanup to previous commit. --- .../recentactivity/ChromeCacheExtractor.java | 83 ++++++++++--------- 1 file changed, 45 insertions(+), 38 deletions(-) diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ChromeCacheExtractor.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ChromeCacheExtractor.java index ebb34fca30..4cebe28158 100644 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ChromeCacheExtractor.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ChromeCacheExtractor.java @@ -31,6 +31,7 @@ import java.util.Collection; import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.Map.Entry; import java.util.Optional; import java.util.logging.Level; import org.openide.util.Exceptions; @@ -71,18 +72,19 @@ final class ChromeCacheExtractor { private final Content dataSource; private final IngestJobContext context; private Case currentCase; - private SleuthkitCase tskCase; private FileManager fileManager; private FileTypeDetector fileTypeDetector; + private Map filesTable = new HashMap<>(); + /** + * Encapsulates abstract file for a cache file as well as a temp file copy + * that can be accessed as a random access file. + */ final class CacheFileCopy { private AbstractFile abstractFile; - - // RAMAN TBD: - save the plain File here. so it can be deleted later. - // Caller can create an RandomAccessFile as well as ByteBuffer as needed private RandomAccessFile fileCopy; private ByteBuffer byteBuffer; @@ -119,17 +121,15 @@ final class ChromeCacheExtractor { try { currentCase = Case.getCurrentCaseThrows(); - tskCase = currentCase.getSleuthkitCase(); fileManager = currentCase.getServices().getFileManager(); fileTypeDetector = new FileTypeDetector(); - // Create an output folder to save derived files + // Create an output folder to save any derived files outputFolderName = RAImageIngestModule.getRAOutputPath(currentCase, moduleName); File dir = new File(outputFolderName); if (dir.exists() == false) { dir.mkdirs(); } - } catch (NoCurrentCaseException ex) { String msg = "Failed to get current case."; throw new IngestModuleException(msg, ex); @@ -139,15 +139,26 @@ final class ChromeCacheExtractor { } } + /** + * Cleans up after the module is done + * + */ void cleanup () { - // RAMAN TBD: delete all files in the table - - // Cant delete the RandomAccessFile. May need to switch the CacheFileCopy to ony store the "File" - // And create a RandomAcessfile and ByteBuffer on it when and as needed. - - - + for (Entry entry : this.filesTable.entrySet()) { + String tempFilePathname = RAImageIngestModule.getRATempPath(currentCase, moduleName) + File.separator + entry.getKey(); + try { + entry.getValue().getFileCopy().getChannel().close(); + entry.getValue().getFileCopy().close(); + + File tmpFile = new File(tempFilePathname); + if (!tmpFile.delete()) { + tmpFile.deleteOnExit(); + } + } catch (IOException ex) { + logger.log(Level.SEVERE, String.format("Failed to delete cache file copy %s", tempFilePathname), ex); + } + } } /** @@ -198,18 +209,17 @@ final class ChromeCacheExtractor { logger.log(Level.INFO, "{0}- Now reading Cache index file", new Object[]{moduleName}); //NON-NLS - ByteBuffer indexFileROBuffer = indexFile.get().getByteBuffer(); IndexFileHeader indexHdr = new IndexFileHeader(indexFileROBuffer); // seek past the header indexFileROBuffer.position(INDEXFILE_HDR_SIZE); + // Process each address in the table for (int i = 0; i < indexHdr.getTableLen(); i++) { - CacheAddress addr = new CacheAddress(indexFileROBuffer.getInt() & UINT32_MASK); + if (addr.isInitialized()) { - String fileName = addr.getFilename(); try { Optional cacheFileCopy = this.getCacheFileCopy(fileName); @@ -230,11 +240,10 @@ final class ChromeCacheExtractor { //data.extract(); if (data.isInExternalFile() ) { - String externalFilename = data.getAddress().getFilename(); Optional externalFile = this.findCacheFile(externalFilename); + if (externalFile.isPresent()) { - try { Collection bbattributes = new ArrayList<>(); bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_URL, @@ -245,7 +254,6 @@ final class ChromeCacheExtractor { if (bbart != null) { bbart.addAttributes(bbattributes); } - } catch (TskException ex) { logger.log(Level.SEVERE, "Error while trying to add an artifact", ex); //NON-NLS } @@ -253,14 +261,13 @@ final class ChromeCacheExtractor { } } - } catch (TskCoreException ex) { + } catch (TskCoreException | IngestModuleException ex) { logger.log(Level.SEVERE, String.format("Failed to get cache entry at address %s", addr)); //NON-NLS - } catch (IngestModuleException ex) { - Exceptions.printStackTrace(ex); - } + } } } - + + cleanup(); } /** @@ -284,22 +291,25 @@ final class ChromeCacheExtractor { } /** + * Returns CacheFileCopy for the specified file from the file table. + * Find the file and creates a copy if it isnt already in the table. * * @param cacheFileName - * @return + * @return CacheFileCopy * @throws TskCoreException */ Optional getCacheFileCopy(String cacheFileName) throws TskCoreException, IngestModuleException { // Check if the file is already in the table - if (filesTable.containsKey(cacheFileName)) + if (filesTable.containsKey(cacheFileName)) { return Optional.of(filesTable.get(cacheFileName)); + } return findAndCopyCacheFile(cacheFileName); } /** - * Finds the specified cache file and makes a temporary copy + * Finds the specified cache file and makes a temporary copy. * * @param cacheFileName * @return Cache file copy @@ -313,18 +323,15 @@ final class ChromeCacheExtractor { } AbstractFile cacheFile = cacheFileOptional.get(); - String tempIndexFilePath = RAImageIngestModule.getRATempPath(currentCase, "chrome") + File.separator + cacheFile.getName(); //NON-NLS + String tempFilePathname = RAImageIngestModule.getRATempPath(currentCase, moduleName) + File.separator + cacheFile.getName(); //NON-NLS try { - ContentUtils.writeToFile(cacheFile, new File(tempIndexFilePath), context::dataSourceIngestIsCancelled); + File newFile = new File(tempFilePathname); + ContentUtils.writeToFile(cacheFile, newFile, context::dataSourceIngestIsCancelled); - RandomAccessFile randomAccessFile; - FileChannel roChannel; - ByteBuffer cacheFileROBuf; - - randomAccessFile = new RandomAccessFile(tempIndexFilePath, "r"); - roChannel = randomAccessFile.getChannel(); - cacheFileROBuf = roChannel.map(FileChannel.MapMode.READ_ONLY, 0, - (int) roChannel.size()); + RandomAccessFile randomAccessFile = new RandomAccessFile(tempFilePathname, "r"); + FileChannel roChannel = randomAccessFile.getChannel(); + ByteBuffer cacheFileROBuf = roChannel.map(FileChannel.MapMode.READ_ONLY, 0, + (int) roChannel.size()); cacheFileROBuf.order(ByteOrder.nativeOrder()); CacheFileCopy cacheFileCopy = new CacheFileCopy(cacheFile, randomAccessFile, cacheFileROBuf ); From b36b5d7d63bb6ff4f53cb1cb73cda0460eada20b Mon Sep 17 00:00:00 2001 From: Raman Date: Fri, 8 Feb 2019 11:19:19 -0500 Subject: [PATCH 03/80] 1184: Chrome cache - Extract and save data segments from data_x files. --- .../recentactivity/ChromeCacheExtractor.java | 157 ++++++++++++------ .../recentactivity/RAImageIngestModule.java | 11 ++ 2 files changed, 121 insertions(+), 47 deletions(-) diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ChromeCacheExtractor.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ChromeCacheExtractor.java index 4cebe28158..c59bd77c69 100644 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ChromeCacheExtractor.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ChromeCacheExtractor.java @@ -21,6 +21,7 @@ package org.sleuthkit.autopsy.recentactivity; import java.io.File; +import java.io.FileOutputStream; import java.io.IOException; import java.io.RandomAccessFile; import java.nio.ByteBuffer; @@ -43,16 +44,21 @@ import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.datamodel.ContentUtils; import org.sleuthkit.autopsy.ingest.IngestJobContext; import org.sleuthkit.autopsy.ingest.IngestModule.IngestModuleException; +import org.sleuthkit.autopsy.ingest.IngestServices; +import org.sleuthkit.autopsy.ingest.ModuleContentEvent; +import org.sleuthkit.autopsy.ingest.ModuleDataEvent; import org.sleuthkit.autopsy.modules.filetypeid.FileTypeDetector; import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.BlackboardArtifact; import org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE; import org.sleuthkit.datamodel.BlackboardAttribute; import org.sleuthkit.datamodel.Content; +import org.sleuthkit.datamodel.DerivedFile; import org.sleuthkit.datamodel.ReadContentInputStream; import org.sleuthkit.datamodel.SleuthkitCase; import org.sleuthkit.datamodel.TimeUtilities; import org.sleuthkit.datamodel.TskCoreException; +import org.sleuthkit.datamodel.TskData; import org.sleuthkit.datamodel.TskException; /** @@ -65,12 +71,17 @@ final class ChromeCacheExtractor { private final int INDEXFILE_HDR_SIZE = 92*4; private final int DATAFILE_HDR_SIZE = 8192; - private final String moduleName; private final Logger logger = Logger.getLogger(this.getClass().getName()); - private String outputFolderName; + + private static final String VERSION_NUMBER = "1.0.0"; + private final String moduleName; + + private String absOutputFolderName; + private String relOutputFolderName; private final Content dataSource; private final IngestJobContext context; + private IngestServices services = IngestServices.getInstance(); private Case currentCase; private FileManager fileManager; private FileTypeDetector fileTypeDetector; @@ -106,7 +117,7 @@ final class ChromeCacheExtractor { } ChromeCacheExtractor(Content dataSource, IngestJobContext context ) { - moduleName = NbBundle.getMessage(ChromeCacheExtractor.class, "ChromeCacheExtractor.moduleName"); + moduleName = NbBundle.getMessage(ChromeCacheExtractor.class, "ChromeCacheExtractor.moduleName"); this.dataSource = dataSource; this.context = context; } @@ -125,8 +136,9 @@ final class ChromeCacheExtractor { fileTypeDetector = new FileTypeDetector(); // Create an output folder to save any derived files - outputFolderName = RAImageIngestModule.getRAOutputPath(currentCase, moduleName); - File dir = new File(outputFolderName); + absOutputFolderName = RAImageIngestModule.getRAOutputPath(currentCase, moduleName); + relOutputFolderName = RAImageIngestModule.getRelModuleOutputPath() + File.separator + moduleName; + File dir = new File(absOutputFolderName); if (dir.exists() == false) { dir.mkdirs(); } @@ -166,8 +178,17 @@ final class ChromeCacheExtractor { * * @return */ - private String getOutputFolderName() { - return outputFolderName; + private String getAbsOutputFolderName() { + return absOutputFolderName; + } + + /** + * Returns the relative location of output folder for this module + * + * @return + */ + private String getRelOutputFolderName() { + return relOutputFolderName; } /** @@ -185,7 +206,6 @@ final class ChromeCacheExtractor { return; } - Optional indexFile; try { // find the index file @@ -209,6 +229,8 @@ final class ChromeCacheExtractor { logger.log(Level.INFO, "{0}- Now reading Cache index file", new Object[]{moduleName}); //NON-NLS + List derivedFiles = new ArrayList<>(); + ByteBuffer indexFileROBuffer = indexFile.get().getByteBuffer(); IndexFileHeader indexHdr = new IndexFileHeader(indexFileROBuffer); @@ -238,26 +260,53 @@ final class ChromeCacheExtractor { // Todo: extract the data if we are going to do something with it in the future //data.extract(); + String dataFilename = data.getAddress().getFilename(); + Optional dataFile = this.findCacheFile(dataFilename); - if (data.isInExternalFile() ) { - String externalFilename = data.getAddress().getFilename(); - Optional externalFile = this.findCacheFile(externalFilename); - - if (externalFile.isPresent()) { - try { - Collection bbattributes = new ArrayList<>(); + Collection bbattributes = new ArrayList<>(); bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_URL, moduleName, ((cacheEntry.getKey() != null) ? cacheEntry.getKey() : ""))); //NON-NLS - - BlackboardArtifact bbart = externalFile.get().newArtifact(ARTIFACT_TYPE.TSK_SOURCE_ARTIFACT); + + if (dataFile.isPresent()) { + if (data.isInExternalFile() ) { + try { + BlackboardArtifact bbart = dataFile.get().newArtifact(ARTIFACT_TYPE.TSK_SOURCE_ARTIFACT); if (bbart != null) { bbart.addAttributes(bbattributes); } } catch (TskException ex) { logger.log(Level.SEVERE, "Error while trying to add an artifact", ex); //NON-NLS } - } + } else { + + // extract data segment and save it as derived file + data.extract(); + String filename = data.save(); + String relPathname = getRelOutputFolderName() + File.separator + filename; + + // TBD: check if data segment is compressed? With Brotli? + DerivedFile derivedFile = fileManager.addDerivedFile(filename, relPathname, + data.getDataLength(), + cacheEntry.getCreationTime(), cacheEntry.getCreationTime(), cacheEntry.getCreationTime(), cacheEntry.getCreationTime(), // TBD + true, + dataFile.get(), + "", + moduleName, + VERSION_NUMBER, + "", + TskData.EncodingType.NONE); + + derivedFiles.add(derivedFile); + try { + BlackboardArtifact bbart = derivedFile.newArtifact(ARTIFACT_TYPE.TSK_SOURCE_ARTIFACT); + if (bbart != null) { + bbart.addAttributes(bbattributes); + } + } catch (TskException ex) { + logger.log(Level.SEVERE, "Error while trying to add an artifact", ex); //NON-NLS + } + } } } @@ -267,6 +316,15 @@ final class ChromeCacheExtractor { } } + if (derivedFiles.isEmpty() == false) { + for (AbstractFile derived : derivedFiles) { + services.fireModuleContentEvent(new ModuleContentEvent(derived)); + } + } + + context.addFilesToJob(derivedFiles); + services.fireModuleDataEvent(new ModuleDataEvent(moduleName, BlackboardArtifact.ARTIFACT_TYPE.TSK_SOURCE_ARTIFACT)); + cleanup(); } @@ -715,35 +773,40 @@ final class ChromeCacheExtractor { return address; } - // RAMAN TBD: save needs to return something that can be used to add a derived file -// void save() throws TskCoreException, IngestModuleException { -// String fileName; -// -// if (address.isInExternalFile()) { -// fileName = address.getFilename(); -// } else { -// fileName = String.format("%s__%08x", address.getFilename(), address.getUint32CacheAddr()); -// } -// save(getOutputFolderName() + File.separator + fileName); -// } - // TBD: save needs to return something that can be used to add a derived file -// void save(String filePathName) throws TskCoreException, IngestModuleException { -// -// // Save the data to specified file -// if (data == null) { -// extract(); -// } -// -// if (!this.isInExternalFile() || -// !this.isCompressedFile()) { -// // write the -// try (FileOutputStream stream = new FileOutputStream(filePathName)) { -// stream.write(data); -// } catch (IOException ex) { -// throw new TskCoreException(String.format("Failed to write output file %s", filePathName), ex); -// } -// } + String save() throws TskCoreException, IngestModuleException { + String fileName; + + if (address.isInExternalFile()) { + fileName = address.getFilename(); + } else { + fileName = String.format("%s__%08x", address.getFilename(), address.getUint32CacheAddr()); + } + + String filePathName = getAbsOutputFolderName() + File.separator + fileName; + save(filePathName); + + return fileName; + } + + + void save(String filePathName) throws TskCoreException, IngestModuleException { + + // Save the data to specified file + if (data == null) { + extract(); + } + + if (!this.isInExternalFile() || + !this.isCompressedFile()) { + + // write the + try (FileOutputStream stream = new FileOutputStream(filePathName)) { + stream.write(data); + } catch (IOException ex) { + throw new TskCoreException(String.format("Failed to write output file %s", filePathName), ex); + } + } // else { // if (mimeType.toLowerCase().contains("gzip")) { // //if (mimeType.equalsIgnoreCase("application/gzip")) { @@ -767,7 +830,7 @@ final class ChromeCacheExtractor { // System.out.println("TBD Dont know how to uncompress Brotli yet" ); // } // } -// } + } @Override public String toString() { diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/RAImageIngestModule.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/RAImageIngestModule.java index 7d17030809..105331dde6 100644 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/RAImageIngestModule.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/RAImageIngestModule.java @@ -225,4 +225,15 @@ public final class RAImageIngestModule implements DataSourceIngestModule { } return tmpDir; } + + /** + * Get relative path for module output folder. + * + * @throws NoCurrentCaseException if there is no open case. + * @return the relative path of the module output folder + */ + static String getRelModuleOutputPath() throws NoCurrentCaseException { + return Case.getCurrentCaseThrows().getModuleOutputDirectoryRelativePath() + File.separator + + "RecentActivity"; + } } From 4c4a6fa3fc9d496891af58bcc7e19242b3a8ead4 Mon Sep 17 00:00:00 2001 From: Raman Date: Fri, 8 Feb 2019 12:51:36 -0500 Subject: [PATCH 04/80] Address Codacy comments. --- .../recentactivity/ChromeCacheExtractor.java | 113 +++++++++--------- 1 file changed, 59 insertions(+), 54 deletions(-) diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ChromeCacheExtractor.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ChromeCacheExtractor.java index c59bd77c69..65dba83207 100644 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ChromeCacheExtractor.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ChromeCacheExtractor.java @@ -35,7 +35,6 @@ import java.util.Map; import java.util.Map.Entry; import java.util.Optional; import java.util.logging.Level; -import org.openide.util.Exceptions; import org.openide.util.NbBundle; import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; @@ -55,7 +54,6 @@ import org.sleuthkit.datamodel.BlackboardAttribute; import org.sleuthkit.datamodel.Content; import org.sleuthkit.datamodel.DerivedFile; import org.sleuthkit.datamodel.ReadContentInputStream; -import org.sleuthkit.datamodel.SleuthkitCase; import org.sleuthkit.datamodel.TimeUtilities; import org.sleuthkit.datamodel.TskCoreException; import org.sleuthkit.datamodel.TskData; @@ -66,10 +64,10 @@ import org.sleuthkit.datamodel.TskException; */ final class ChromeCacheExtractor { - private final long UINT32_MASK = 0xFFFFFFFFl; + private final static long UINT32_MASK = 0xFFFFFFFFl; - private final int INDEXFILE_HDR_SIZE = 92*4; - private final int DATAFILE_HDR_SIZE = 8192; + private final static int INDEXFILE_HDR_SIZE = 92*4; + private final static int DATAFILE_HDR_SIZE = 8192; private final Logger logger = Logger.getLogger(this.getClass().getName()); @@ -87,7 +85,7 @@ final class ChromeCacheExtractor { private FileTypeDetector fileTypeDetector; - private Map filesTable = new HashMap<>(); + private final Map filesTable = new HashMap<>(); /** * Encapsulates abstract file for a cache file as well as a temp file copy @@ -95,9 +93,9 @@ final class ChromeCacheExtractor { */ final class CacheFileCopy { - private AbstractFile abstractFile; - private RandomAccessFile fileCopy; - private ByteBuffer byteBuffer; + private final AbstractFile abstractFile; + private final RandomAccessFile fileCopy; + private final ByteBuffer byteBuffer; CacheFileCopy (AbstractFile abstractFile, RandomAccessFile fileCopy, ByteBuffer buffer ) { this.abstractFile = abstractFile; @@ -468,18 +466,21 @@ final class ChromeCacheExtractor { public String toString() { StringBuilder sb = new StringBuilder(); - sb.append(String.format("Index Header:")); - sb.append(String.format("\tMagic = %x" , getMagic()) ); - sb.append(String.format("\tVersion = %x" , getVersion()) ); - sb.append(String.format("\tNumEntries = %x" , getNumEntries()) ); - sb.append(String.format("\tNumBytes = %x" , getNumBytes()) ); - sb.append(String.format("\tLastFile = %x" , getLastFile()) ); - sb.append(String.format("\tTableLen = %x" , getTableLen()) ); + sb.append(String.format("Index Header:")) + .append(String.format("\tMagic = %x" , getMagic()) ) + .append(String.format("\tVersion = %x" , getVersion()) ) + .append(String.format("\tNumEntries = %x" , getNumEntries()) ) + .append(String.format("\tNumBytes = %x" , getNumBytes()) ) + .append(String.format("\tLastFile = %x" , getLastFile()) ) + .append(String.format("\tTableLen = %x" , getTableLen()) ); return sb.toString(); } } + /** + * Cache file type enum - as encoded the address + */ enum CacheFileTypeEnum { EXTERNAL, RANKINGS, @@ -508,15 +509,15 @@ final class ChromeCacheExtractor { final class CacheAddress { // sundry constants to parse the bit fields in address - private final long ADDR_INITIALIZED_MASK = 0x80000000l; - private final long FILE_TYPE_MASK = 0x70000000; - private final long FILE_TYPE_OFFSET = 28; - private final long NUM_BLOCKS_MASK = 0x03000000; - private final long NUM_BLOCKS_OFFSET = 24; - private final long FILE_SELECTOR_MASK = 0x00ff0000; - private final long FILE_SELECTOR_OFFSET = 16; - private final long START_BLOCK_MASK = 0x0000FFFF; - private final long EXTERNAL_FILE_NAME_MASK = 0x0FFFFFFF; + private static final long ADDR_INITIALIZED_MASK = 0x80000000l; + private static final long FILE_TYPE_MASK = 0x70000000; + private static final long FILE_TYPE_OFFSET = 28; + private static final long NUM_BLOCKS_MASK = 0x03000000; + private static final long NUM_BLOCKS_OFFSET = 24; + private static final long FILE_SELECTOR_MASK = 0x00ff0000; + private static final long FILE_SELECTOR_OFFSET = 16; + private static final long START_BLOCK_MASK = 0x0000FFFF; + private static final long EXTERNAL_FILE_NAME_MASK = 0x0FFFFFFF; private final long uint32CacheAddr; private final CacheFileTypeEnum fileType; @@ -628,7 +629,9 @@ final class ChromeCacheExtractor { } - + /** + * Enum for data type in a data segment. + */ enum CacheDataTypeEnum { HTTP_HEADER, UNKNOWN, @@ -758,7 +761,7 @@ final class ChromeCacheExtractor { if (data == null) { extract(); } - return data; + return data.clone(); } int getDataLength() { @@ -836,9 +839,9 @@ final class ChromeCacheExtractor { public String toString() { StringBuilder sb = new StringBuilder(); sb.append(String.format("\t\tData type = : %s, Data Len = %d", - this.type.toString(), this.length )); - sb.append("\n"); - sb.append(String.format("\t\tData = : %s ", new String(data) )); + this.type.toString(), this.length )) + .append("\n") + .append(String.format("\t\tData = : %s ", new String(data) )); return sb.toString(); } @@ -846,6 +849,9 @@ final class ChromeCacheExtractor { } + /** + * State of cache entry + */ enum EntryStateEnum { ENTRY_NORMAL, ENTRY_EVICTED, @@ -888,31 +894,31 @@ final class ChromeCacheExtractor { final class CacheEntry { // each entry is 256 bytes. The last section of the entry, after all the other fields is a null terminated key - private final int MAX_KEY_LEN = 256-24*4; + private static final int MAX_KEY_LEN = 256-24*4; private final CacheAddress selfAddress; private final CacheFileCopy cacheFileCopy; - private long hash; - private CacheAddress nextAddress; - private CacheAddress rankingsNodeAddress; + private final long hash; + private final CacheAddress nextAddress; + private final CacheAddress rankingsNodeAddress; - private int reuseCount; - private int refetchCount; - private EntryStateEnum state; + private final int reuseCount; + private final int refetchCount; + private final EntryStateEnum state; - private long creationTime; - private int keyLen; + private final long creationTime; + private final int keyLen; - private CacheAddress longKeyAddresses; // address of the key, if the key is external to the entry + private final CacheAddress longKeyAddresses; // address of the key, if the key is external to the entry - private int dataSizes[] = new int[4]; - private CacheAddress dataAddresses[] = new CacheAddress[4]; + private final int dataSizes[]; + private final CacheAddress dataAddresses[]; - private long flags; - private int pad[] = new int[4]; + private final long flags; + private final int pad[] = new int[4]; - private long selfHash; // hash of the entry itself so far. + private final long selfHash; // hash of the entry itself so far. private String key; // Key may be found within the entry or may be external CacheEntry(CacheAddress cacheAdress, CacheFileCopy cacheFileCopy ) { @@ -945,9 +951,11 @@ final class ChromeCacheExtractor { uint32 = fileROBuf.getInt() & UINT32_MASK; longKeyAddresses = (uint32 != 0) ? new CacheAddress(uint32) : null; + dataSizes= new int[4]; for (int i = 0; i < 4; i++) { dataSizes[i] = fileROBuf.getInt(); } + dataAddresses = new CacheAddress[4]; for (int i = 0; i < 4; i++) { dataAddresses[i] = new CacheAddress(fileROBuf.getInt() & UINT32_MASK); } @@ -1035,15 +1043,12 @@ final class ChromeCacheExtractor { public String toString() { StringBuilder sb = new StringBuilder(); sb.append(String.format("Entry = Hash: %08x, State: %s, ReuseCount: %d, RefetchCount: %d", - this.hash, this.state.toString(), this.reuseCount, this.refetchCount )); - - sb.append(String.format("\n\tKey: %s, Keylen: %d", - this.key, this.keyLen, this.reuseCount, this.refetchCount )); - - sb.append(String.format("\n\tCreationTime: %s", - TimeUtilities.epochToTime(this.creationTime) )); - - sb.append(String.format("\n\tNext Address: %s", + this.hash, this.state.toString(), this.reuseCount, this.refetchCount )) + .append(String.format("\n\tKey: %s, Keylen: %d", + this.key, this.keyLen, this.reuseCount, this.refetchCount )) + .append(String.format("\n\tCreationTime: %s", + TimeUtilities.epochToTime(this.creationTime) )) + .append(String.format("\n\tNext Address: %s", (nextAddress != null) ? nextAddress.toString() : "None")); for (int i = 0; i < 4; i++) { From c8bf55244d7a57084f09ab80d0b9beb785889d1b Mon Sep 17 00:00:00 2001 From: Raman Date: Mon, 11 Feb 2019 11:11:44 -0500 Subject: [PATCH 05/80] 1184 - Chrome cache parsing - Extract HTTP headers from data segment --- .../recentactivity/ChromeCacheExtractor.java | 151 +++++++++++------- 1 file changed, 94 insertions(+), 57 deletions(-) diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ChromeCacheExtractor.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ChromeCacheExtractor.java index 65dba83207..aad2753e2c 100644 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ChromeCacheExtractor.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ChromeCacheExtractor.java @@ -27,6 +27,7 @@ import java.io.RandomAccessFile; import java.nio.ByteBuffer; import java.nio.ByteOrder; import java.nio.channels.FileChannel; +import java.nio.charset.Charset; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; @@ -46,7 +47,6 @@ import org.sleuthkit.autopsy.ingest.IngestModule.IngestModuleException; import org.sleuthkit.autopsy.ingest.IngestServices; import org.sleuthkit.autopsy.ingest.ModuleContentEvent; import org.sleuthkit.autopsy.ingest.ModuleDataEvent; -import org.sleuthkit.autopsy.modules.filetypeid.FileTypeDetector; import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.BlackboardArtifact; import org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE; @@ -69,7 +69,7 @@ final class ChromeCacheExtractor { private final static int INDEXFILE_HDR_SIZE = 92*4; private final static int DATAFILE_HDR_SIZE = 8192; - private final Logger logger = Logger.getLogger(this.getClass().getName()); + private final static Logger logger = Logger.getLogger(ChromeCacheExtractor.class.getName()); private static final String VERSION_NUMBER = "1.0.0"; private final String moduleName; @@ -79,10 +79,9 @@ final class ChromeCacheExtractor { private final Content dataSource; private final IngestJobContext context; - private IngestServices services = IngestServices.getInstance(); + private final IngestServices services = IngestServices.getInstance(); private Case currentCase; private FileManager fileManager; - private FileTypeDetector fileTypeDetector; private final Map filesTable = new HashMap<>(); @@ -131,7 +130,6 @@ final class ChromeCacheExtractor { try { currentCase = Case.getCurrentCaseThrows(); fileManager = currentCase.getServices().getFileManager(); - fileTypeDetector = new FileTypeDetector(); // Create an output folder to save any derived files absOutputFolderName = RAImageIngestModule.getRAOutputPath(currentCase, moduleName); @@ -143,10 +141,7 @@ final class ChromeCacheExtractor { } catch (NoCurrentCaseException ex) { String msg = "Failed to get current case."; throw new IngestModuleException(msg, ex); - } catch (FileTypeDetector.FileTypeDetectorInitException ex) { - String msg = "Failed to get FileTypeDetector."; - throw new IngestModuleException(msg, ex); - } + } } /** @@ -280,6 +275,12 @@ final class ChromeCacheExtractor { // extract data segment and save it as derived file data.extract(); + + if (data.hasHTTPHeaders()) { + String encoding = data.getHTTPHeader("content-encoding"); + + } + String filename = data.save(); String relPathname = getRelOutputFolderName() + File.separator + filename; @@ -492,21 +493,27 @@ final class ChromeCacheExtractor { BLOCK_EVICTED } - // CacheAddress is a unsigned 32 bit number - // - // Header: - // 1000 0000 0000 0000 0000 0000 0000 0000 : initialized bit - // 0111 0000 0000 0000 0000 0000 0000 0000 : file type - // - // If separate file: - // 0000 1111 1111 1111 1111 1111 1111 1111 : file# 0 - 268,435,456 (2^28) - // - // If block file: - // 0000 1100 0000 0000 0000 0000 0000 0000 : reserved bits - // 0000 0011 0000 0000 0000 0000 0000 0000 : number of contiguous blocks 1-4 - // 0000 0000 1111 1111 0000 0000 0000 0000 : file selector 0 - 255 - // 0000 0000 0000 0000 1111 1111 1111 1111 : block# 0 - 65,535 (2^16) + + /** + * Encapsulates Cache address. + * + * CacheAddress is a unsigned 32 bit number + * + * Header: + * 1000 0000 0000 0000 0000 0000 0000 0000 : initialized bit + * 0111 0000 0000 0000 0000 0000 0000 0000 : file type + * + * If separate file: + * 0000 1111 1111 1111 1111 1111 1111 1111 : file# 0 - 268,435,456 (2^28) + * + * If block file: + * 0000 1100 0000 0000 0000 0000 0000 0000 : reserved bits + * 0000 0011 0000 0000 0000 0000 0000 0000 : number of contiguous blocks 1-4 + * 0000 0000 1111 1111 0000 0000 0000 0000 : file selector 0 - 255 + * 0000 0000 0000 0000 1111 1111 1111 1111 : block# 0 - 65,535 (2^16) + * + */ final class CacheAddress { // sundry constants to parse the bit fields in address private static final long ADDR_INITIALIZED_MASK = 0x80000000l; @@ -657,9 +664,9 @@ final class ChromeCacheExtractor { private CacheFileCopy cacheFileCopy = null; private byte[] data = null; - // mime type of the data segment helps determine if it is compressed - private String mimeType = ""; - + private String httpResponse; + private final Map httpHeaders = new HashMap<>(); + CacheData(CacheAddress cacheAdress, int len) { this(cacheAdress, len, false); } @@ -675,17 +682,16 @@ final class ChromeCacheExtractor { return address.isInExternalFile(); } - boolean isCompressedFile() { - if (isInExternalFile()) { - return mimeType.equalsIgnoreCase("application/octet-stream"); - } - else { - return false; - } + boolean hasHTTPHeaders() { + return this.type == CacheDataTypeEnum.HTTP_HEADER; } - String getMimeType() { - return mimeType; + String getHTTPHeader(String key) { + return this.httpHeaders.get(key); + } + + String getHTTPRespone() { + return httpResponse; } /** @@ -711,30 +717,66 @@ final class ChromeCacheExtractor { // if this might be a HTPP header, lets try to parse it as such if ((isHTTPHeaderHint)) { - // Check if we can find the http headers String strData = new String(data); if (strData.contains("HTTP")) { - // TBD parse header - // Past some bytes there's the HTTP headers + // Http headers if present, are usually in frst data segment in an entry // General Parsing algo: // - Find start of HTTP header by searching for string "HTTP" - // - Skip to the first 0x00 ti get to the end of the HTTP response line, this makrs start of headers section - // - Find the end of the end by searching for 0x00 0x00 bytes + // - Skip to the first 0x00 to get to the end of the HTTP response line, this makrs start of headers section + // - Find the end of the header by searching for 0x00 0x00 bytes // - Extract the headers section // - Parse the headers section - each null terminated string is a header // - Each header is of the format "name: value" e.g. - type = CacheDataTypeEnum.HTTP_HEADER; + type = CacheDataTypeEnum.HTTP_HEADER; + + int startOff = strData.indexOf("HTTP"); + Charset charset = Charset.forName("UTF-8"); + boolean done = false; + int i = startOff; + int hdrNum = 1; + + while (!done) { + // each header is null terminated + int start = i; + while (i < data.length && data[i] != 0) { + i++; + } + + // hhtp headers are terminated by 0x00 0x00 + if (data[i+1] == 0) { + done = true; + } + + int len = (i - start); + String headerLine = new String(data, start, len, charset); + + // first line is the http response + if (hdrNum == 1) { + httpResponse = headerLine; + } else { + int nPos = headerLine.indexOf(":"); + String key = headerLine.substring(0, nPos); + String val= headerLine.substring(nPos+1); + + httpHeaders.put(key.toLowerCase(), val); + } + + i++; + hdrNum++; + } } } - } else { + } + //else { // Handle external f_* files // External files may or may not be compressed // They may be compresswed with GZIP, which our other ingest modules recognize and decpress // Alternatively thay may be compressed with Brotli, in that case we may want to decopmress them + // content-encoding header in the data segment with HTTP header can tell us if data is compressed. // TBD: In future if we want to do anything with contents of file. // this.data = new byte [length]; @@ -743,11 +785,8 @@ final class ChromeCacheExtractor { // buf.position(0); // buf.get(data, 0, length); // -// // get mime type, to determine if the file is compressed or not -// AbstractFile abstractFile = cacheFileCopy.getAbstractFile(); -// mimeType = fileTypeDetector.getMIMEType(abstractFile); - } + //} } String getDataString() throws TskCoreException, IngestModuleException { @@ -800,8 +839,7 @@ final class ChromeCacheExtractor { extract(); } - if (!this.isInExternalFile() || - !this.isCompressedFile()) { + if (!this.isInExternalFile()) { // write the try (FileOutputStream stream = new FileOutputStream(filePathName)) { @@ -837,13 +875,12 @@ final class ChromeCacheExtractor { @Override public String toString() { - StringBuilder sb = new StringBuilder(); - sb.append(String.format("\t\tData type = : %s, Data Len = %d", + StringBuilder strBuilder = new StringBuilder(); + strBuilder.append(String.format("\t\tData type = : %s, Data Len = %d\n", this.type.toString(), this.length )) - .append("\n") - .append(String.format("\t\tData = : %s ", new String(data) )); + .append(String.format("\n\t\tData = : %s ", new String(data) )); - return sb.toString(); + return strBuilder.toString(); } } @@ -978,17 +1015,17 @@ final class ChromeCacheExtractor { } } else { // key stored within entry - StringBuilder sb = new StringBuilder(MAX_KEY_LEN); + StringBuilder strBuilder = new StringBuilder(MAX_KEY_LEN); int i = 0; while (fileROBuf.remaining() > 0 && i < MAX_KEY_LEN) { char c = (char)fileROBuf.get(); if (c == '\0') { break; } - sb.append(c); + strBuilder.append(c); } - key = sb.toString(); + key = strBuilder.toString(); } } @@ -1028,7 +1065,7 @@ final class ChromeCacheExtractor { return key; } - public ArrayList getData() { + public List getData() { ArrayList list = new ArrayList<>(); for (int i = 0; i < 4; i++) { if (dataSizes[i] > 0) { From 06754545fca28ca9530b1609385411d966eb3061 Mon Sep 17 00:00:00 2001 From: Raman Date: Tue, 12 Feb 2019 11:41:34 -0500 Subject: [PATCH 06/80] - Assign mime type "application/x-brotli" to Brotli compressed files from Chrome cache.. - Sundry cleanup --- .../recentactivity/ChromeCacheExtractor.java | 265 +++++++++++------- 1 file changed, 160 insertions(+), 105 deletions(-) diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ChromeCacheExtractor.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ChromeCacheExtractor.java index aad2753e2c..f7aad7b518 100644 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ChromeCacheExtractor.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ChromeCacheExtractor.java @@ -61,9 +61,20 @@ import org.sleuthkit.datamodel.TskException; /** * Extracts and parses Chrome Cache files. + * + * Cache may hold images, scripts, CSS, JSON files, + * and the URL they were downloaded from. + * + * Cache entries may or may not be compressed, + * and the entries may reside in container files or external files. + * + * We extract cache entries, create derived files if needed, + * and record the URL. */ final class ChromeCacheExtractor { + private final static String BROTLI_MIMETYPE ="application/x-brotli"; + private final static long UINT32_MASK = 0xFFFFFFFFl; private final static int INDEXFILE_HDR_SIZE = 92*4; @@ -121,9 +132,9 @@ final class ChromeCacheExtractor { /** - * Initializes Chrome cache extractor module + * Initializes Chrome cache extractor module. * - * @throws org.sleuthkit.autopsy.ingest.IngestModule.IngestModuleException + * @throws IngestModuleException */ void init() throws IngestModuleException { @@ -145,7 +156,9 @@ final class ChromeCacheExtractor { } /** - * Cleans up after the module is done + * Cleans up after the module is done. + * + * Removes any temp copies of cache files created during extraction. * */ void cleanup () { @@ -192,8 +205,6 @@ final class ChromeCacheExtractor { try { init(); } catch (IngestModuleException ex) { - - // TBD: show the error to Autospy error console?? String msg = "Failed to initialize ChromeCacheExtractor."; logger.log(Level.SEVERE, msg, ex); return; @@ -242,24 +253,24 @@ final class ChromeCacheExtractor { logger.log(Level.SEVERE, String.format("Failed to get cache entry at address %s", addr)); //NON-NLS } - // Get the cache entry at this address + // Get the cache entry and its data segments CacheEntry cacheEntry = new CacheEntry(addr, cacheFileCopy.get() ); - - // Get the data segments - each entry can have up to 4 data segments List dataEntries = cacheEntry.getData(); + for (int j = 0; j < dataEntries.size(); j++) { CacheData data = dataEntries.get(j); - - // Todo: extract the data if we are going to do something with it in the future - - //data.extract(); String dataFilename = data.getAddress().getFilename(); Optional dataFile = this.findCacheFile(dataFilename); + boolean isBrotliCompressed = false; + if (data.getType() != CacheDataTypeEnum.HTTP_HEADER && cacheEntry.isBrotliCompressed() ) { + isBrotliCompressed = true; + } + Collection bbattributes = new ArrayList<>(); - bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_URL, - moduleName, - ((cacheEntry.getKey() != null) ? cacheEntry.getKey() : ""))); //NON-NLS + bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_URL, + moduleName, + ((cacheEntry.getKey() != null) ? cacheEntry.getKey() : ""))); //NON-NLS if (dataFile.isPresent()) { if (data.isInExternalFile() ) { @@ -268,39 +279,38 @@ final class ChromeCacheExtractor { if (bbart != null) { bbart.addAttributes(bbattributes); } + if (isBrotliCompressed) { + dataFile.get().setMIMEType(BROTLI_MIMETYPE); + dataFile.get().save(); + } } catch (TskException ex) { logger.log(Level.SEVERE, "Error while trying to add an artifact", ex); //NON-NLS } } else { - - // extract data segment and save it as derived file - data.extract(); - - if (data.hasHTTPHeaders()) { - String encoding = data.getHTTPHeader("content-encoding"); - - } - - String filename = data.save(); - String relPathname = getRelOutputFolderName() + File.separator + filename; - - // TBD: check if data segment is compressed? With Brotli? - DerivedFile derivedFile = fileManager.addDerivedFile(filename, relPathname, - data.getDataLength(), - cacheEntry.getCreationTime(), cacheEntry.getCreationTime(), cacheEntry.getCreationTime(), cacheEntry.getCreationTime(), // TBD - true, - dataFile.get(), - "", - moduleName, - VERSION_NUMBER, - "", - TskData.EncodingType.NONE); - + + // Data segments in "data_x" files are saved in individual files and added as derived files + String filename = data.save(); + String relPathname = getRelOutputFolderName() + File.separator + filename; + DerivedFile derivedFile = fileManager.addDerivedFile(filename, relPathname, + data.getDataLength(), + cacheEntry.getCreationTime(), cacheEntry.getCreationTime(), cacheEntry.getCreationTime(), cacheEntry.getCreationTime(), // TBD + true, + dataFile.get(), + "", + moduleName, + VERSION_NUMBER, + "", + TskData.EncodingType.NONE); + derivedFiles.add(derivedFile); try { BlackboardArtifact bbart = derivedFile.newArtifact(ARTIFACT_TYPE.TSK_SOURCE_ARTIFACT); if (bbart != null) { bbart.addAttributes(bbattributes); + } + if (isBrotliCompressed) { + derivedFile.setMIMEType(BROTLI_MIMETYPE); + derivedFile.save(); } } catch (TskException ex) { logger.log(Level.SEVERE, "Error while trying to add an artifact", ex); //NON-NLS @@ -315,12 +325,10 @@ final class ChromeCacheExtractor { } } - if (derivedFiles.isEmpty() == false) { - for (AbstractFile derived : derivedFiles) { - services.fireModuleContentEvent(new ModuleContentEvent(derived)); - } - } - + derivedFiles.forEach((derived) -> { + services.fireModuleContentEvent(new ModuleContentEvent(derived)); + }); + context.addFilesToJob(derivedFiles); services.fireModuleDataEvent(new ModuleDataEvent(moduleName, BlackboardArtifact.ARTIFACT_TYPE.TSK_SOURCE_ARTIFACT)); @@ -338,8 +346,10 @@ final class ChromeCacheExtractor { List cacheFiles = fileManager.findFiles(dataSource, cacheFileName, "default/cache"); //NON-NLS if (!cacheFiles.isEmpty()) { - if (cacheFiles.size() > 1 ) { - logger.log(Level.WARNING, String.format("Found multiple matches for filename = %s", cacheFileName)); + for (AbstractFile abstractFile: cacheFiles ) { + if (abstractFile.getUniquePath().trim().endsWith("default/cache")) { + return Optional.of(abstractFile); + } } return Optional.of(cacheFiles.get(0)); } @@ -349,7 +359,7 @@ final class ChromeCacheExtractor { /** * Returns CacheFileCopy for the specified file from the file table. - * Find the file and creates a copy if it isnt already in the table. + * Find the file and creates a copy if it isn't already in the table. * * @param cacheFileName * @return CacheFileCopy @@ -695,7 +705,7 @@ final class ChromeCacheExtractor { } /** - * Extracts the data segment from the file + * Extracts the data segment from the cache file * * @throws TskCoreException */ @@ -768,25 +778,7 @@ final class ChromeCacheExtractor { } } } - } - //else { - // Handle external f_* files - - // External files may or may not be compressed - // They may be compresswed with GZIP, which our other ingest modules recognize and decpress - // Alternatively thay may be compressed with Brotli, in that case we may want to decopmress them - // content-encoding header in the data segment with HTTP header can tell us if data is compressed. - - // TBD: In future if we want to do anything with contents of file. -// this.data = new byte [length]; -// -// ByteBuffer buf = cacheFileCopy.getByteBuffer(); -// buf.position(0); -// buf.get(data, 0, length); -// - - //} } String getDataString() throws TskCoreException, IngestModuleException { @@ -816,6 +808,14 @@ final class ChromeCacheExtractor { } + /** + * Saves the data segment to a file in the local disk. + * + * @return file name the data is saved in + * + * @throws TskCoreException + * @throws IngestModuleException + */ String save() throws TskCoreException, IngestModuleException { String fileName; @@ -831,6 +831,14 @@ final class ChromeCacheExtractor { return fileName; } + /** + * Saves the data in he specified file name + * + * @param filePathName - file name to save the data in + * + * @throws TskCoreException + * @throws IngestModuleException + */ void save(String filePathName) throws TskCoreException, IngestModuleException { @@ -839,8 +847,8 @@ final class ChromeCacheExtractor { extract(); } + // Data in external files is not saved in local files if (!this.isInExternalFile()) { - // write the try (FileOutputStream stream = new FileOutputStream(filePathName)) { stream.write(data); @@ -848,37 +856,19 @@ final class ChromeCacheExtractor { throw new TskCoreException(String.format("Failed to write output file %s", filePathName), ex); } } -// else { -// if (mimeType.toLowerCase().contains("gzip")) { -// //if (mimeType.equalsIgnoreCase("application/gzip")) { -// try { -// ByteArrayInputStream byteInputStream = new ByteArrayInputStream(data); -// GZIPInputStream in = new GZIPInputStream(byteInputStream); -// FileOutputStream out = new FileOutputStream(filePathName); -// byte[] buffer = new byte[2048]; -// int len; -// while((len = in.read(buffer)) != -1){ -// out.write(buffer, 0, len); -// } -// out.close(); -// -// } catch (IOException ex) { -// throw new TskCoreException(String.format("Failed to write output file %s", filePathName), ex); -// } -// } -// else { -// // TBD: how to uncompress Brotli ?? -// System.out.println("TBD Dont know how to uncompress Brotli yet" ); -// } -// } } @Override public String toString() { StringBuilder strBuilder = new StringBuilder(); - strBuilder.append(String.format("\t\tData type = : %s, Data Len = %d\n", - this.type.toString(), this.length )) - .append(String.format("\n\t\tData = : %s ", new String(data) )); + strBuilder.append(String.format("\t\tData type = : %s, Data Len = %d ", + this.type.toString(), this.length )); + + if (hasHTTPHeaders()) { + String str = getHTTPHeader("content-encoding"); + if (str!=null) + strBuilder.append(String.format("\t%s=%s", "content-encoding", str )); + } return strBuilder.toString(); } @@ -951,7 +941,8 @@ final class ChromeCacheExtractor { private final int dataSizes[]; private final CacheAddress dataAddresses[]; - + ArrayList dataList = null; + private final long flags; private final int pad[] = new int[4]; @@ -1065,15 +1056,76 @@ final class ChromeCacheExtractor { return key; } - public List getData() { - ArrayList list = new ArrayList<>(); - for (int i = 0; i < 4; i++) { - if (dataSizes[i] > 0) { - CacheData cacheData = new CacheData(dataAddresses[i], dataSizes[i], true ); - list.add(cacheData); - } + /** + * Returns the data segments in the cache entry. + * + * @return list of data segments in the entry. + * + * @throws TskCoreException + * @throws org.sleuthkit.autopsy.ingest.IngestModule.IngestModuleException + */ + public List getData() throws TskCoreException, IngestModuleException { + + if (dataList == null) { + dataList = new ArrayList<>(); + for (int i = 0; i < 4; i++) { + if (dataSizes[i] > 0) { + CacheData cacheData = new CacheData(dataAddresses[i], dataSizes[i], true ); + + cacheData.extract(); + dataList.add(cacheData); + } + } } - return list; + return dataList; + } + + /** + * Returns if the Entry has HTTP headers. + * + * If present, the HTTP headers are in the first data segment + * + * @return true if the entry has HTTP headers + */ + boolean hasHTTPHeaders() { + if ((dataList == null) || dataList.isEmpty()) { + return false; + } + return dataList.get(0).hasHTTPHeaders(); + } + + /** + * Returns the specified http header , if present + * + * @param key name of header to return + * @return header value, null if not found + */ + String getHTTPHeader(String key) { + if ((dataList == null) || dataList.isEmpty()) { + return null; + } + // First data segment has the HTTP headers, if any + return dataList.get(0).getHTTPHeader(key); + } + + /** + * Returns if the entry is compressed with Brotli + * + * An entry is considered to be Brotli compressed if it has a + * HTTP header "content-encoding: br" + * + * @return true if the entry id compressed with Brotli, false otherwise. + */ + boolean isBrotliCompressed() { + + if (hasHTTPHeaders() ) { + String encodingHeader = getHTTPHeader("content-encoding"); + if (encodingHeader!= null) { + return encodingHeader.trim().equalsIgnoreCase("br"); + } + } + + return false; } @Override @@ -1090,8 +1142,11 @@ final class ChromeCacheExtractor { for (int i = 0; i < 4; i++) { if (dataSizes[i] > 0) { - sb.append(String.format("\n\tData %d: %8d bytes at cache address = %s", - i, dataSizes[i], dataAddresses[i] )); + sb.append(String.format("\n\tData %d: cache address = %s, Data = %s", + i, dataAddresses[i].toString(), + (dataList != null) + ? dataList.get(i).toString() + : "Data not retrived yet.")); } } From 065378c2741ba18fab7256b1941996133bc4f457 Mon Sep 17 00:00:00 2001 From: Raman Date: Wed, 13 Feb 2019 11:56:33 -0500 Subject: [PATCH 07/80] 1188: Web cache artifact. --- .../autopsy/datamodel/ExtractedContent.java | 2 ++ .../recentactivity/ChromeCacheExtractor.java | 23 +++++++++++-------- 2 files changed, 16 insertions(+), 9 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/datamodel/ExtractedContent.java b/Core/src/org/sleuthkit/autopsy/datamodel/ExtractedContent.java index f32d147cec..7037ad4584 100644 --- a/Core/src/org/sleuthkit/autopsy/datamodel/ExtractedContent.java +++ b/Core/src/org/sleuthkit/autopsy/datamodel/ExtractedContent.java @@ -50,6 +50,7 @@ import static org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE.TSK_HASHS import static org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_ARTIFACT_HIT; import static org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT; import static org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE.TSK_KEYWORD_HIT; +import static org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE.TSK_SOURCE; import org.sleuthkit.datamodel.SleuthkitCase; import org.sleuthkit.datamodel.TskCoreException; import org.sleuthkit.datamodel.TskException; @@ -236,6 +237,7 @@ public class ExtractedContent implements AutopsyVisitableItem { doNotShow.add(new BlackboardArtifact.Type(TSK_INTERESTING_ARTIFACT_HIT)); doNotShow.add(new BlackboardArtifact.Type(TSK_ACCOUNT)); doNotShow.add(new BlackboardArtifact.Type(TSK_DATA_SOURCE_USAGE)); + doNotShow.add(new BlackboardArtifact.Type(TSK_SOURCE) ); } private final PropertyChangeListener pcl = (PropertyChangeEvent evt) -> { diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ChromeCacheExtractor.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ChromeCacheExtractor.java index f7aad7b518..049e59f346 100644 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ChromeCacheExtractor.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ChromeCacheExtractor.java @@ -267,18 +267,20 @@ final class ChromeCacheExtractor { isBrotliCompressed = true; } - Collection bbattributes = new ArrayList<>(); - bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_URL, + Collection sourceAttributes = new ArrayList<>(); + sourceAttributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_URL, moduleName, ((cacheEntry.getKey() != null) ? cacheEntry.getKey() : ""))); //NON-NLS if (dataFile.isPresent()) { if (data.isInExternalFile() ) { try { - BlackboardArtifact bbart = dataFile.get().newArtifact(ARTIFACT_TYPE.TSK_SOURCE_ARTIFACT); - if (bbart != null) { - bbart.addAttributes(bbattributes); + BlackboardArtifact sourceArtifact = dataFile.get().newArtifact(ARTIFACT_TYPE.TSK_SOURCE); + if (sourceArtifact != null) { + sourceArtifact.addAttributes(sourceAttributes); } + BlackboardArtifact webCacheArtifact = dataFile.get().newArtifact(ARTIFACT_TYPE.TSK_WEB_CACHE); + if (isBrotliCompressed) { dataFile.get().setMIMEType(BROTLI_MIMETYPE); dataFile.get().save(); @@ -304,10 +306,11 @@ final class ChromeCacheExtractor { derivedFiles.add(derivedFile); try { - BlackboardArtifact bbart = derivedFile.newArtifact(ARTIFACT_TYPE.TSK_SOURCE_ARTIFACT); - if (bbart != null) { - bbart.addAttributes(bbattributes); + BlackboardArtifact sourceArtifact = derivedFile.newArtifact(ARTIFACT_TYPE.TSK_SOURCE); + if (sourceArtifact != null) { + sourceArtifact.addAttributes(sourceAttributes); } + BlackboardArtifact webCacheArtifact = derivedFile.newArtifact(ARTIFACT_TYPE.TSK_WEB_CACHE); if (isBrotliCompressed) { derivedFile.setMIMEType(BROTLI_MIMETYPE); derivedFile.save(); @@ -330,7 +333,9 @@ final class ChromeCacheExtractor { }); context.addFilesToJob(derivedFiles); - services.fireModuleDataEvent(new ModuleDataEvent(moduleName, BlackboardArtifact.ARTIFACT_TYPE.TSK_SOURCE_ARTIFACT)); + + services.fireModuleDataEvent(new ModuleDataEvent(moduleName, BlackboardArtifact.ARTIFACT_TYPE.TSK_SOURCE)); + services.fireModuleDataEvent(new ModuleDataEvent(moduleName, BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_CACHE)); cleanup(); } From c4be4806cf265825636a493e44fb62ac5a65c711 Mon Sep 17 00:00:00 2001 From: "U-BASIS\\kelly" Date: Thu, 14 Feb 2019 13:58:42 -0500 Subject: [PATCH 08/80] Interim committ of History support for Edge browser 1190-edge-history --- .../autopsy/recentactivity/Bundle.properties | 2 +- .../autopsy/recentactivity/Chrome.java | 81 +++++------ .../autopsy/recentactivity/ExtractEdge.java | 135 +++++++++++++++++- .../autopsy/recentactivity/ExtractIE.java | 51 +++---- .../recentactivity/ExtractRegistry.java | 4 +- .../autopsy/recentactivity/Firefox.java | 130 +++++++---------- 6 files changed, 237 insertions(+), 166 deletions(-) diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Bundle.properties b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Bundle.properties index d6425a4488..19256a6a50 100644 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Bundle.properties +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Bundle.properties @@ -109,4 +109,4 @@ ExtractEdge.moduleName=Microsoft Edge ExtractEdge.process.errMsg.unableFindESEViewer=Unable to find ESEDatabaseViewer ExtractEdge.process.errMsg.errGettingWebCacheFiles=Error retrieving Edge file ExtractEdge.process.errMsg.noWebCachFiles=No Edge WebCache file found -ExtractEdge.process.errMsg.errWriteFile={0}\: Error while trying to write file\:{1} +ExtractEdge.process.errMsg.errWriteFile={0}\: Error while trying to write file\:{1} \ No newline at end of file diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Chrome.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Chrome.java index 481b1ec3f4..211372d6d0 100644 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Chrome.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Chrome.java @@ -164,22 +164,22 @@ class Chrome extends Extract { for (HashMap result : tempList) { Collection bbattributes = new ArrayList(); bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_URL, - NbBundle.getMessage(this.getClass(), "Chrome.parentModuleName"), + RecentActivityExtracterModuleFactory.getModuleName(), ((result.get("url").toString() != null) ? result.get("url").toString() : ""))); //NON-NLS bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED, - NbBundle.getMessage(this.getClass(), "Chrome.parentModuleName"), + RecentActivityExtracterModuleFactory.getModuleName(), (Long.valueOf(result.get("last_visit_time").toString()) / 1000000) - Long.valueOf("11644473600"))); //NON-NLS bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_REFERRER, - NbBundle.getMessage(this.getClass(), "Chrome.parentModuleName"), + RecentActivityExtracterModuleFactory.getModuleName(), ((result.get("from_visit").toString() != null) ? result.get("from_visit").toString() : ""))); //NON-NLS bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_TITLE, - NbBundle.getMessage(this.getClass(), "Chrome.parentModuleName"), + RecentActivityExtracterModuleFactory.getModuleName(), ((result.get("title").toString() != null) ? result.get("title").toString() : ""))); //NON-NLS bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PROG_NAME, - NbBundle.getMessage(this.getClass(), "Chrome.parentModuleName"), + RecentActivityExtracterModuleFactory.getModuleName(), NbBundle.getMessage(this.getClass(), "Chrome.moduleName"))); bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DOMAIN, - NbBundle.getMessage(this.getClass(), "Chrome.parentModuleName"), + RecentActivityExtracterModuleFactory.getModuleName(), (NetworkUtils.extractDomain((result.get("url").toString() != null) ? result.get("url").toString() : "")))); //NON-NLS BlackboardArtifact bbart = this.addArtifact(ARTIFACT_TYPE.TSK_WEB_HISTORY, historyFile, bbattributes); @@ -309,21 +309,16 @@ class Chrome extends Extract { Collection bbattributes = new ArrayList<>(); //TODO Revisit usage of deprecated constructor as per TSK-583 bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_URL, - NbBundle.getMessage(this.getClass(), - "Chrome.parentModuleName"), url)); + RecentActivityExtracterModuleFactory.getModuleName(), url)); bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_TITLE, - NbBundle.getMessage(this.getClass(), - "Chrome.parentModuleName"), name)); + RecentActivityExtracterModuleFactory.getModuleName(), name)); bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME_CREATED, - NbBundle.getMessage(this.getClass(), - "Chrome.parentModuleName"), (date / 1000000) - Long.valueOf("11644473600"))); + RecentActivityExtracterModuleFactory.getModuleName(), (date / 1000000) - Long.valueOf("11644473600"))); bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PROG_NAME, - NbBundle.getMessage(this.getClass(), - "Chrome.parentModuleName"), + RecentActivityExtracterModuleFactory.getModuleName(), NbBundle.getMessage(this.getClass(), "Chrome.moduleName"))); bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DOMAIN, - NbBundle.getMessage(this.getClass(), - "Chrome.parentModuleName"), domain)); + RecentActivityExtracterModuleFactory.getModuleName(), domain)); bbart.addAttributes(bbattributes); // index the artifact for keyword search @@ -400,25 +395,25 @@ class Chrome extends Extract { for (HashMap result : tempList) { Collection bbattributes = new ArrayList<>(); bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_URL, - NbBundle.getMessage(this.getClass(), "Chrome.parentModuleName"), + RecentActivityExtracterModuleFactory.getModuleName(), ((result.get("host_key").toString() != null) ? result.get("host_key").toString() : ""))); //NON-NLS bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME, - NbBundle.getMessage(this.getClass(), "Chrome.parentModuleName"), + RecentActivityExtracterModuleFactory.getModuleName(), (Long.valueOf(result.get("last_access_utc").toString()) / 1000000) - Long.valueOf("11644473600"))); //NON-NLS bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_NAME, - NbBundle.getMessage(this.getClass(), "Chrome.parentModuleName"), + RecentActivityExtracterModuleFactory.getModuleName(), ((result.get("name").toString() != null) ? result.get("name").toString() : ""))); //NON-NLS bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_VALUE, - NbBundle.getMessage(this.getClass(), "Chrome.parentModuleName"), + RecentActivityExtracterModuleFactory.getModuleName(), ((result.get("value").toString() != null) ? result.get("value").toString() : ""))); //NON-NLS bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PROG_NAME, - NbBundle.getMessage(this.getClass(), "Chrome.parentModuleName"), + RecentActivityExtracterModuleFactory.getModuleName(), NbBundle.getMessage(this.getClass(), "Chrome.moduleName"))); String domain = result.get("host_key").toString(); //NON-NLS domain = domain.replaceFirst("^\\.+(?!$)", ""); bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DOMAIN, - NbBundle.getMessage(this.getClass(), "Chrome.parentModuleName"), domain)); + RecentActivityExtracterModuleFactory.getModuleName(), domain)); BlackboardArtifact bbart = this.addArtifact(ARTIFACT_TYPE.TSK_WEB_COOKIE, cookiesFile, bbattributes); if (bbart != null) { @@ -496,7 +491,7 @@ class Chrome extends Extract { for (HashMap result : tempList) { Collection bbattributes = new ArrayList<>(); bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PATH, - NbBundle.getMessage(this.getClass(), "Chrome.parentModuleName"), (result.get("full_path").toString()))); //NON-NLS + RecentActivityExtracterModuleFactory.getModuleName(), (result.get("full_path").toString()))); //NON-NLS long pathID = Util.findID(dataSource, (result.get("full_path").toString())); //NON-NLS if (pathID != -1) { bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PATH_ID, @@ -504,7 +499,7 @@ class Chrome extends Extract { "Chrome.parentModuleName"), pathID)); } bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_URL, - NbBundle.getMessage(this.getClass(), "Chrome.parentModuleName"), + RecentActivityExtracterModuleFactory.getModuleName(), ((result.get("url").toString() != null) ? result.get("url").toString() : ""))); //NON-NLS //bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_URL_DECODED.getTypeID(), "Recent Activity", ((result.get("url").toString() != null) ? EscapeUtil.decodeURL(result.get("url").toString()) : ""))); Long time = (Long.valueOf(result.get("start_time").toString()) / 1000000) - Long.valueOf("11644473600"); //NON-NLS @@ -512,12 +507,12 @@ class Chrome extends Extract { //TODO Revisit usage of deprecated constructor as per TSK-583 //bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_LAST_ACCESSED.getTypeID(), "Recent Activity", "Last Visited", time)); bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED, - NbBundle.getMessage(this.getClass(), "Chrome.parentModuleName"), time)); + RecentActivityExtracterModuleFactory.getModuleName(), time)); String domain = NetworkUtils.extractDomain((result.get("url").toString() != null) ? result.get("url").toString() : ""); //NON-NLS bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DOMAIN, - NbBundle.getMessage(this.getClass(), "Chrome.parentModuleName"), domain)); + RecentActivityExtracterModuleFactory.getModuleName(), domain)); bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PROG_NAME, - NbBundle.getMessage(this.getClass(), "Chrome.parentModuleName"), + RecentActivityExtracterModuleFactory.getModuleName(), NbBundle.getMessage(this.getClass(), "Chrome.moduleName"))); BlackboardArtifact bbart = this.addArtifact(ARTIFACT_TYPE.TSK_WEB_DOWNLOAD, downloadFile, bbattributes); @@ -590,24 +585,24 @@ class Chrome extends Extract { Collection bbattributes = new ArrayList<>(); bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_URL, - NbBundle.getMessage(this.getClass(), "Chrome.parentModuleName"), + RecentActivityExtracterModuleFactory.getModuleName(), ((result.get("origin_url").toString() != null) ? result.get("origin_url").toString() : ""))); //NON-NLS bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME_CREATED, - NbBundle.getMessage(this.getClass(), "Chrome.parentModuleName"), + RecentActivityExtracterModuleFactory.getModuleName(), (Long.valueOf(result.get("date_created").toString()) / 1000000) - Long.valueOf("11644473600"))); //NON-NLS bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_URL_DECODED, - NbBundle.getMessage(this.getClass(), "Chrome.parentModuleName"), + RecentActivityExtracterModuleFactory.getModuleName(), (NetworkUtils.extractDomain((result.get("origin_url").toString() != null) ? result.get("origin_url").toString() : "")))); //NON-NLS bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_USER_NAME, - NbBundle.getMessage(this.getClass(), "Chrome.parentModuleName"), + RecentActivityExtracterModuleFactory.getModuleName(), ((result.get("username_value").toString() != null) ? result.get("username_value").toString().replaceAll("'", "''") : ""))); //NON-NLS bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DOMAIN, - NbBundle.getMessage(this.getClass(), "Chrome.parentModuleName"), + RecentActivityExtracterModuleFactory.getModuleName(), ((result.get("signon_realm").toString() != null) ? result.get("signon_realm").toString() : ""))); //NON-NLS BlackboardArtifact bbart = this.addArtifact(ARTIFACT_TYPE.TSK_SERVICE_ACCOUNT, loginDataFile, bbattributes); @@ -721,21 +716,21 @@ class Chrome extends Extract { ((result.get("name").toString() != null) ? result.get("name").toString() : ""))); //NON-NLS bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_VALUE, - NbBundle.getMessage(this.getClass(), "Chrome.parentModuleName"), + RecentActivityExtracterModuleFactory.getModuleName(), ((result.get("value").toString() != null) ? result.get("value").toString() : ""))); //NON-NLS bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_COUNT, - NbBundle.getMessage(this.getClass(), "Chrome.parentModuleName"), + RecentActivityExtracterModuleFactory.getModuleName(), (Integer.valueOf(result.get("count").toString())))); //NON-NLS bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME_CREATED, - NbBundle.getMessage(this.getClass(), "Chrome.parentModuleName"), + RecentActivityExtracterModuleFactory.getModuleName(), Long.valueOf(result.get("date_created").toString()))); //NON-NLS // get schema version specific attributes if (isSchemaV8X) { bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED, - NbBundle.getMessage(this.getClass(), "Chrome.parentModuleName"), + RecentActivityExtracterModuleFactory.getModuleName(), Long.valueOf(result.get("date_last_used").toString()))); //NON-NLS } @@ -830,37 +825,37 @@ class Chrome extends Extract { full_name = String.join(" ", first_name, middle_name, last_name); } bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_NAME_PERSON, - NbBundle.getMessage(this.getClass(), "Chrome.parentModuleName"), + RecentActivityExtracterModuleFactory.getModuleName(), full_name)); //NON-NLS bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_EMAIL, - NbBundle.getMessage(this.getClass(), "Chrome.parentModuleName"), + RecentActivityExtracterModuleFactory.getModuleName(), email_Addr)); //NON-NLS bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PHONE_NUMBER, - NbBundle.getMessage(this.getClass(), "Chrome.parentModuleName"), + RecentActivityExtracterModuleFactory.getModuleName(), phone_number)); //NON-NLS String locationAddress = String.join(", ", street_address, city, state, zipcode, country_code); bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_LOCATION, - NbBundle.getMessage(this.getClass(), "Chrome.parentModuleName"), + RecentActivityExtracterModuleFactory.getModuleName(), locationAddress)); //NON-NLS if (date_modified > 0) { bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME_MODIFIED, - NbBundle.getMessage(this.getClass(), "Chrome.parentModuleName"), + RecentActivityExtracterModuleFactory.getModuleName(), date_modified)); //NON-NLS } if (use_count > 0 ){ bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_COUNT, - NbBundle.getMessage(this.getClass(), "Chrome.parentModuleName"), + RecentActivityExtracterModuleFactory.getModuleName(), use_count)); //NON-NLS } if (use_date > 0) { bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED, - NbBundle.getMessage(this.getClass(), "Chrome.parentModuleName"), + RecentActivityExtracterModuleFactory.getModuleName(), use_date)); //NON-NLS } diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractEdge.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractEdge.java index edf6f98ac4..a548874e71 100755 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractEdge.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractEdge.java @@ -19,24 +19,36 @@ package org.sleuthkit.autopsy.recentactivity; import java.io.File; +import java.io.FileInputStream; +import java.io.FileNotFoundException; import java.io.IOException; import java.nio.file.Path; import java.nio.file.Paths; +import java.text.ParseException; +import java.text.SimpleDateFormat; import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; import java.util.List; +import java.util.Scanner; import java.util.logging.Level; import org.openide.modules.InstalledFileLocator; import org.openide.util.NbBundle; +import org.openide.util.NbBundle.Messages; import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; import org.sleuthkit.autopsy.coreutils.ExecUtil; import org.sleuthkit.autopsy.coreutils.Logger; +import org.sleuthkit.autopsy.coreutils.NetworkUtils; import org.sleuthkit.autopsy.coreutils.PlatformUtil; import org.sleuthkit.autopsy.datamodel.ContentUtils; import org.sleuthkit.autopsy.ingest.DataSourceIngestModuleProcessTerminator; import org.sleuthkit.autopsy.ingest.IngestJobContext; import org.sleuthkit.autopsy.ingest.IngestServices; +import org.sleuthkit.autopsy.ingest.ModuleDataEvent; import org.sleuthkit.datamodel.AbstractFile; +import org.sleuthkit.datamodel.BlackboardArtifact; +import org.sleuthkit.datamodel.BlackboardAttribute; import org.sleuthkit.datamodel.Content; import org.sleuthkit.datamodel.TskCoreException; @@ -52,6 +64,8 @@ public class ExtractEdge extends Extract{ private static File ESE_TOOL_FILE; private static String EDGE_WEBCACHE_NAME = "WebCacheV01.dat"; private static String EDGE = "Edge"; + + private static final SimpleDateFormat dateFormatter = new SimpleDateFormat("MM/dd/yyyy hh:mm:ss a"); ExtractEdge() throws NoCurrentCaseException{ moduleName = NbBundle.getMessage(Chrome.class, "ExtractEdge.moduleName"); @@ -131,7 +145,7 @@ public class ExtractEdge extends Extract{ resultsDir.mkdirs(); executeDumper(esedumper, datFile.getAbsolutePath(), "webcache", resultsDir.getAbsolutePath()); - this.getHistory(); // Not implemented yet + this.getHistory(indexFile, resultsDir); // Not implemented yet this.getCookie(); // Not implemented yet this.getDownload(); // Not implemented yet @@ -140,11 +154,122 @@ public class ExtractEdge extends Extract{ } } - /** - * Query for history databases and add artifacts - */ - private void getHistory() { + + @Messages({ + "ExtractEdge_getHistory_containerFileNotFound=Error while trying to analyze Edge history" + }) + private void getHistory(AbstractFile origFile, File resultDir) { + File containerFiles[] = resultDir.listFiles((dir, name) -> name.toLowerCase().contains("container")); + if(containerFiles == null){ + this.addErrorMessage(Bundle.ExtractEdge_getHistory_containerFileNotFound()); + return; + } + + // The assumption is that the history is in one or more of the container files. + // search through all of them looking for a lines with the text "Visited:" + for(File file: containerFiles){ + Scanner fileScanner; + try { + fileScanner = new Scanner(new FileInputStream(file.toString())); + } catch (FileNotFoundException ex) { + logger.log(Level.WARNING, "Unable to find the ESEDatabaseView file at " + file.getPath(), ex); //NON-NLS + continue; // Should we keep going or bail on the whole process? + } + + Collection bbartifacts = new ArrayList<>(); + + try{ + List headers = null; + while (fileScanner.hasNext()) { + String line = fileScanner.nextLine(); + if(headers == null){ // The header should be the first line + headers = Arrays.asList(line.toLowerCase().split(",")); + continue; + } + + if(line.contains("Visited")){ + BlackboardArtifact b = parseHistoryLine(origFile, headers, line); + if(b != null){ + bbartifacts.add(b); + this.indexArtifact(b); + } + }else{ + // I am making the assumption that if the line doesn't have + // "Visited" in it that its probably not the file we are looking for + // therefore we should move on to the next file. + break; + } + } + } + finally{ + fileScanner.close(); + } + + if(!bbartifacts.isEmpty()){ + services.fireModuleDataEvent(new ModuleDataEvent( + RecentActivityExtracterModuleFactory.getModuleName(), + BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_HISTORY, bbartifacts)); + } + } + + } + + @Messages({ + "ExtractEdge_programName=Microsoft Edge" + }) + private BlackboardArtifact parseHistoryLine(AbstractFile origFile, List headers, String line){ + BlackboardArtifact bbart = null; + String[] rowSplit = line.split(","); + + int index = headers.indexOf("url"); + String urlUserStr = rowSplit[index]; + + String[] str = urlUserStr.split("@"); + String user = str[0].replace("Visited: ", ""); + String url = str[1]; + + index = headers.indexOf("accessedtime"); + String accessTime = rowSplit[index].trim(); + Long ftime = null; + try{ + Long epochtime = dateFormatter.parse(accessTime).getTime(); + ftime = epochtime / 1000; + }catch(ParseException ex){ + logger.log(Level.WARNING, "The Accessed Time format in history file seems invalid " + accessTime, ex); + } + + try{ + bbart = origFile.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_HISTORY); + Collection bbattributes = new ArrayList<>(); + bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_URL, + RecentActivityExtracterModuleFactory.getModuleName(), url)); + + bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED, + RecentActivityExtracterModuleFactory.getModuleName(), ftime)); + + bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_REFERRER, + RecentActivityExtracterModuleFactory.getModuleName(), "")); + + bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_TITLE, + RecentActivityExtracterModuleFactory.getModuleName(), "")); + + bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PROG_NAME, + RecentActivityExtracterModuleFactory.getModuleName(), Bundle.ExtractEdge_programName())); + + bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DOMAIN, + RecentActivityExtracterModuleFactory.getModuleName(), (NetworkUtils.extractDomain(url)))); //NON-NLS + + bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_USER_NAME, + RecentActivityExtracterModuleFactory.getModuleName(), user)); + + bbart.addAttributes(bbattributes); + + } catch (TskCoreException ex) { + logger.log(Level.SEVERE, "Error writing Microsoft Edge web history artifact to the blackboard.", ex); //NON-NLS + } + + return bbart; } /** diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractIE.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractIE.java index 4ba48b8ceb..223ed69799 100644 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractIE.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractIE.java @@ -130,22 +130,17 @@ class ExtractIE extends Extract { Collection bbattributes = new ArrayList<>(); bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_URL, - NbBundle.getMessage(this.getClass(), - "ExtractIE.parentModuleName.noSpace"), url)); + RecentActivityExtracterModuleFactory.getModuleName(), url)); bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_TITLE, - NbBundle.getMessage(this.getClass(), - "ExtractIE.parentModuleName.noSpace"), name)); + RecentActivityExtracterModuleFactory.getModuleName(), name)); bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME_CREATED, - NbBundle.getMessage(this.getClass(), - "ExtractIE.parentModuleName.noSpace"), datetime)); + RecentActivityExtracterModuleFactory.getModuleName(), datetime)); bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PROG_NAME, - NbBundle.getMessage(this.getClass(), - "ExtractIE.parentModuleName.noSpace"), + RecentActivityExtracterModuleFactory.getModuleName(), NbBundle.getMessage(this.getClass(), "ExtractIE.moduleName.text"))); if (domain != null && domain.isEmpty() == false) { bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DOMAIN, - NbBundle.getMessage(this.getClass(), - "ExtractIE.parentModuleName.noSpace"), domain)); + RecentActivityExtracterModuleFactory.getModuleName(), domain)); } BlackboardArtifact bbart = this.addArtifact(ARTIFACT_TYPE.TSK_WEB_BOOKMARK, fav, bbattributes); @@ -245,25 +240,19 @@ class ExtractIE extends Extract { Collection bbattributes = new ArrayList<>(); bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_URL, - NbBundle.getMessage(this.getClass(), - "ExtractIE.parentModuleName.noSpace"), url)); + RecentActivityExtracterModuleFactory.getModuleName(), url)); bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME, - NbBundle.getMessage(this.getClass(), - "ExtractIE.parentModuleName.noSpace"), datetime)); + RecentActivityExtracterModuleFactory.getModuleName(), datetime)); bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_NAME, - NbBundle.getMessage(this.getClass(), - "ExtractIE.parentModuleName.noSpace"), (name != null) ? name : "")); + RecentActivityExtracterModuleFactory.getModuleName(), (name != null) ? name : "")); bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_VALUE, - NbBundle.getMessage(this.getClass(), - "ExtractIE.parentModuleName.noSpace"), value)); + RecentActivityExtracterModuleFactory.getModuleName(), value)); bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PROG_NAME, - NbBundle.getMessage(this.getClass(), - "ExtractIE.parentModuleName.noSpace"), + RecentActivityExtracterModuleFactory.getModuleName(), NbBundle.getMessage(this.getClass(), "ExtractIE.moduleName.text"))); if (domain != null && domain.isEmpty() == false) { bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DOMAIN, - NbBundle.getMessage(this.getClass(), - "ExtractIE.parentModuleName.noSpace"), domain)); + RecentActivityExtracterModuleFactory.getModuleName(), domain)); } BlackboardArtifact bbart = this.addArtifact(ARTIFACT_TYPE.TSK_WEB_COOKIE, cookiesFile, bbattributes); if (bbart != null) { @@ -534,30 +523,24 @@ class ExtractIE extends Extract { BlackboardArtifact bbart = origFile.newArtifact(ARTIFACT_TYPE.TSK_WEB_HISTORY); Collection bbattributes = new ArrayList<>(); bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_URL, - NbBundle.getMessage(this.getClass(), - "ExtractIE.parentModuleName.noSpace"), realurl)); + RecentActivityExtracterModuleFactory.getModuleName(), realurl)); //bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_URL_DECODED.getTypeID(), "RecentActivity", EscapeUtil.decodeURL(realurl))); bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED, - NbBundle.getMessage(this.getClass(), - "ExtractIE.parentModuleName.noSpace"), ftime)); + RecentActivityExtracterModuleFactory.getModuleName(), ftime)); bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_REFERRER, - NbBundle.getMessage(this.getClass(), - "ExtractIE.parentModuleName.noSpace"), "")); + RecentActivityExtracterModuleFactory.getModuleName(), "")); // @@@ NOte that other browser modules are adding TITLE in hre for the title bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PROG_NAME, - NbBundle.getMessage(this.getClass(), - "ExtractIE.parentModuleName.noSpace"), + RecentActivityExtracterModuleFactory.getModuleName(), NbBundle.getMessage(this.getClass(), "ExtractIE.moduleName.text"))); if (domain != null && domain.isEmpty() == false) { bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DOMAIN, - NbBundle.getMessage(this.getClass(), - "ExtractIE.parentModuleName.noSpace"), domain)); + RecentActivityExtracterModuleFactory.getModuleName(), domain)); } bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_USER_NAME, - NbBundle.getMessage(this.getClass(), - "ExtractIE.parentModuleName.noSpace"), user)); + RecentActivityExtracterModuleFactory.getModuleName(), user)); bbart.addAttributes(bbattributes); // index the artifact for keyword search diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractRegistry.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractRegistry.java index 1d20f27cd7..68885bcc52 100644 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractRegistry.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractRegistry.java @@ -415,7 +415,7 @@ class ExtractRegistry extends Extract { Element artroot = (Element) artroots.item(0); NodeList myartlist = artroot.getChildNodes(); - String parentModuleName = NbBundle.getMessage(this.getClass(), "ExtractRegistry.parentModuleName.noSpace"); + String parentModuleName = RecentActivityExtracterModuleFactory.getModuleName(); String winver = ""; // If all artifact nodes should really go under one Blackboard artifact, need to process it differently @@ -834,7 +834,7 @@ class ExtractRegistry extends Extract { */ private boolean parseSamPluginOutput(String regFilePath, AbstractFile regAbstractFile) { File regfile = new File(regFilePath); - String parentModuleName = NbBundle.getMessage(this.getClass(), "ExtractRegistry.parentModuleName.noSpace"); + String parentModuleName = RecentActivityExtracterModuleFactory.getModuleName(); SimpleDateFormat regRipperTimeFormat = new SimpleDateFormat("EEE MMM dd HH:mm:ss yyyy 'Z'"); regRipperTimeFormat.setTimeZone(getTimeZone("GMT")); try (BufferedReader bufferedReader = new BufferedReader(new FileReader(regfile))) { diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Firefox.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Firefox.java index dd4e4d4d4d..043e0bf0ff 100644 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Firefox.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Firefox.java @@ -164,31 +164,25 @@ class Firefox extends Extract { Collection bbattributes = new ArrayList<>(); bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_URL, - NbBundle.getMessage(this.getClass(), - "Firefox.parentModuleName.noSpace"), + RecentActivityExtracterModuleFactory.getModuleName(), ((url != null) ? url : ""))); //NON-NLS //bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_URL_DECODED.getTypeID(), "RecentActivity", ((result.get("url").toString() != null) ? EscapeUtil.decodeURL(result.get("url").toString()) : ""))); bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED, - NbBundle.getMessage(this.getClass(), - "Firefox.parentModuleName.noSpace"), + RecentActivityExtracterModuleFactory.getModuleName(), (Long.valueOf(result.get("visit_date").toString())))); //NON-NLS bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_REFERRER, - NbBundle.getMessage(this.getClass(), - "Firefox.parentModuleName.noSpace"), + RecentActivityExtracterModuleFactory.getModuleName(), ((result.get("ref").toString() != null) ? result.get("ref").toString() : ""))); //NON-NLS bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_TITLE, - NbBundle.getMessage(this.getClass(), - "Firefox.parentModuleName.noSpace"), + RecentActivityExtracterModuleFactory.getModuleName(), ((result.get("title").toString() != null) ? result.get("title").toString() : ""))); //NON-NLS bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PROG_NAME, - NbBundle.getMessage(this.getClass(), - "Firefox.parentModuleName.noSpace"), + RecentActivityExtracterModuleFactory.getModuleName(), NbBundle.getMessage(this.getClass(), "Firefox.moduleName"))); String domain = extractDomain(url); if (domain != null && domain.isEmpty() == false) { bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DOMAIN, - NbBundle.getMessage(this.getClass(), - "Firefox.parentModuleName.noSpace"), domain)); //NON-NLS + RecentActivityExtracterModuleFactory.getModuleName(), domain)); //NON-NLS } BlackboardArtifact bbart = this.addArtifact(ARTIFACT_TYPE.TSK_WEB_HISTORY, historyFile, bbattributes); @@ -263,29 +257,23 @@ class Firefox extends Extract { Collection bbattributes = new ArrayList<>(); bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_URL, - NbBundle.getMessage(this.getClass(), - "Firefox.parentModuleName.noSpace"), + RecentActivityExtracterModuleFactory.getModuleName(), ((url != null) ? url : ""))); //NON-NLS bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_TITLE, - NbBundle.getMessage(this.getClass(), - "Firefox.parentModuleName.noSpace"), + RecentActivityExtracterModuleFactory.getModuleName(), ((result.get("title").toString() != null) ? result.get("title").toString() : ""))); //NON-NLS if (Long.valueOf(result.get("dateAdded").toString()) > 0) { //NON-NLS bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME_CREATED, - NbBundle.getMessage(this.getClass(), - "Firefox.parentModuleName.noSpace"), + RecentActivityExtracterModuleFactory.getModuleName(), (Long.valueOf(result.get("dateAdded").toString())))); //NON-NLS } bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PROG_NAME, - NbBundle.getMessage(this.getClass(), - "Firefox.parentModuleName.noSpace"), + RecentActivityExtracterModuleFactory.getModuleName(), NbBundle.getMessage(this.getClass(), "Firefox.moduleName"))); String domain = extractDomain(url); if (domain != null && domain.isEmpty() == false) { bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DOMAIN, - NbBundle.getMessage(this.getClass(), - "Firefox.parentModuleName.noSpace"), - domain)); //NON-NLS + RecentActivityExtracterModuleFactory.getModuleName(), domain)); //NON-NLS } BlackboardArtifact bbart = this.addArtifact(ARTIFACT_TYPE.TSK_WEB_BOOKMARK, bookmarkFile, bbattributes); @@ -368,38 +356,31 @@ class Firefox extends Extract { Collection bbattributes = new ArrayList<>(); bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_URL, - NbBundle.getMessage(this.getClass(), - "Firefox.parentModuleName.noSpace"), + RecentActivityExtracterModuleFactory.getModuleName(), ((host != null) ? host : ""))); //NON-NLS bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME, - NbBundle.getMessage(this.getClass(), - "Firefox.parentModuleName.noSpace"), + RecentActivityExtracterModuleFactory.getModuleName(), (Long.valueOf(result.get("lastAccessed").toString())))); //NON-NLS bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_NAME, - NbBundle.getMessage(this.getClass(), - "Firefox.parentModuleName.noSpace"), + RecentActivityExtracterModuleFactory.getModuleName(), ((result.get("name").toString() != null) ? result.get("name").toString() : ""))); //NON-NLS bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_VALUE, - NbBundle.getMessage(this.getClass(), - "Firefox.parentModuleName.noSpace"), + RecentActivityExtracterModuleFactory.getModuleName(), ((result.get("value").toString() != null) ? result.get("value").toString() : ""))); //NON-NLS bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PROG_NAME, - NbBundle.getMessage(this.getClass(), - "Firefox.parentModuleName.noSpace"), + RecentActivityExtracterModuleFactory.getModuleName(), NbBundle.getMessage(this.getClass(), "Firefox.moduleName"))); if (checkColumn == true) { bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME_CREATED, - NbBundle.getMessage(this.getClass(), - "Firefox.parentModuleName.noSpace"), - (Long.valueOf(result.get("creationTime").toString())))); //NON-NLS + RecentActivityExtracterModuleFactory.getModuleName(), + (Long.valueOf(result.get("creationTime").toString())))); //NON-NLS } String domain = extractDomain(host); if (domain != null && domain.isEmpty() == false) { domain = domain.replaceFirst("^\\.+(?!$)", ""); bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DOMAIN, - NbBundle.getMessage(this.getClass(), - "Firefox.parentModuleName.noSpace"), domain)); + RecentActivityExtracterModuleFactory.getModuleName(), domain)); } BlackboardArtifact bbart = this.addArtifact(ARTIFACT_TYPE.TSK_WEB_COOKIE, cookiesFile, bbattributes); @@ -487,13 +468,11 @@ class Firefox extends Extract { Collection bbattributes = new ArrayList<>(); bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_URL, - NbBundle.getMessage(this.getClass(), - "Firefox.parentModuleName.noSpace"), + RecentActivityExtracterModuleFactory.getModuleName(), source)); //NON-NLS //bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_URL_DECODED.getTypeID(), "RecentActivity", ((result.get("source").toString() != null) ? EscapeUtil.decodeURL(result.get("source").toString()) : ""))); bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED, - NbBundle.getMessage(this.getClass(), - "Firefox.parentModuleName.noSpace"), + RecentActivityExtracterModuleFactory.getModuleName(), (Long.valueOf(result.get("startTime").toString())))); //NON-NLS String target = result.get("target").toString(); //NON-NLS @@ -502,14 +481,12 @@ class Firefox extends Extract { try { String decodedTarget = URLDecoder.decode(target.replaceAll("file:///", ""), "UTF-8"); //NON-NLS bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PATH, - NbBundle.getMessage(this.getClass(), - "Firefox.parentModuleName.noSpace"), + RecentActivityExtracterModuleFactory.getModuleName(), decodedTarget)); long pathID = Util.findID(dataSource, decodedTarget); if (pathID != -1) { bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PATH_ID, - NbBundle.getMessage(this.getClass(), - "Firefox.parentModuleName.noSpace"), + RecentActivityExtracterModuleFactory.getModuleName(), pathID)); } } catch (UnsupportedEncodingException ex) { @@ -519,14 +496,12 @@ class Firefox extends Extract { } bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PROG_NAME, - NbBundle.getMessage(this.getClass(), - "Firefox.parentModuleName.noSpace"), + RecentActivityExtracterModuleFactory.getModuleName(), NbBundle.getMessage(this.getClass(), "Firefox.moduleName"))); String domain = extractDomain(source); if (domain != null && domain.isEmpty() == false) { bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DOMAIN, - NbBundle.getMessage(this.getClass(), - "Firefox.parentModuleName.noSpace"), + RecentActivityExtracterModuleFactory.getModuleName(), domain)); //NON-NLS } @@ -614,8 +589,7 @@ class Firefox extends Extract { Collection bbattributes = new ArrayList<>(); bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_URL, - NbBundle.getMessage(this.getClass(), - "Firefox.parentModuleName.noSpace"), + RecentActivityExtracterModuleFactory.getModuleName(), url)); //NON-NLS //bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_URL_DECODED.getTypeID(), "RecentActivity", ((result.get("source").toString() != null) ? EscapeUtil.decodeURL(result.get("source").toString()) : ""))); //TODO Revisit usage of deprecated constructor as per TSK-583 @@ -626,14 +600,12 @@ class Firefox extends Extract { try { String decodedTarget = URLDecoder.decode(target.replaceAll("file:///", ""), "UTF-8"); //NON-NLS bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PATH, - NbBundle.getMessage(this.getClass(), - "Firefox.parentModuleName.noSpace"), + RecentActivityExtracterModuleFactory.getModuleName(), decodedTarget)); long pathID = Util.findID(dataSource, decodedTarget); if (pathID != -1) { bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PATH_ID, - NbBundle.getMessage(this.getClass(), - "Firefox.parentModuleName.noSpace"), + RecentActivityExtracterModuleFactory.getModuleName(), pathID)); } } catch (UnsupportedEncodingException ex) { @@ -642,19 +614,15 @@ class Firefox extends Extract { } } bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED, - NbBundle.getMessage(this.getClass(), - "Firefox.parentModuleName.noSpace"), + RecentActivityExtracterModuleFactory.getModuleName(), Long.valueOf(result.get("lastModified").toString()))); //NON-NLS bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PROG_NAME, - NbBundle.getMessage(this.getClass(), - "Firefox.parentModuleName.noSpace"), + RecentActivityExtracterModuleFactory.getModuleName(), NbBundle.getMessage(this.getClass(), "Firefox.moduleName"))); String domain = extractDomain(url); if (domain != null && domain.isEmpty() == false) { bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DOMAIN, - NbBundle.getMessage(this.getClass(), - "Firefox.parentModuleName.noSpace"), - domain)); //NON-NLS + RecentActivityExtracterModuleFactory.getModuleName(), domain)); //NON-NLS } BlackboardArtifact bbart = this.addArtifact(ARTIFACT_TYPE.TSK_WEB_DOWNLOAD, downloadsFile, bbattributes); @@ -754,26 +722,26 @@ class Firefox extends Extract { } bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_NAME, - NbBundle.getMessage(this.getClass(), "Firefox.parentModuleName"), + RecentActivityExtracterModuleFactory.getModuleName(), fieldName)); //NON-NLS bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_VALUE, - NbBundle.getMessage(this.getClass(), "Firefox.parentModuleName"), + RecentActivityExtracterModuleFactory.getModuleName(), ((result.get("value").toString() != null) ? result.get("value").toString() : ""))); //NON-NLS // Newer versions of firefox have additional columns if (isFirefoxV64) { bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME_CREATED, - NbBundle.getMessage(this.getClass(), "Firefox.parentModuleName"), - (Long.valueOf(result.get("firstUsed").toString()) / 1000000))); //NON-NLS + RecentActivityExtracterModuleFactory.getModuleName(), + (Long.valueOf(result.get("firstUsed").toString()) / 1000000))); //NON-NLS bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED, - NbBundle.getMessage(this.getClass(), "Firefox.parentModuleName"), - (Long.valueOf(result.get("lastUsed").toString()) / 1000000))); //NON-NLS + RecentActivityExtracterModuleFactory.getModuleName(), + (Long.valueOf(result.get("lastUsed").toString()) / 1000000))); //NON-NLS bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_COUNT, - NbBundle.getMessage(this.getClass(), "Firefox.parentModuleName"), - (Integer.valueOf(result.get("timesUsed").toString())))); //NON-NLS + RecentActivityExtracterModuleFactory.getModuleName(), + (Integer.valueOf(result.get("timesUsed").toString())))); //NON-NLS } // Add artifact @@ -919,32 +887,32 @@ class Firefox extends Extract { try { Collection bbattributes = new ArrayList<>(); bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_NAME_PERSON, - NbBundle.getMessage(this.getClass(), "Firefox.parentModuleName"), + RecentActivityExtracterModuleFactory.getModuleName(), name)); //NON-NLS bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_EMAIL, - NbBundle.getMessage(this.getClass(), "Firefox.parentModuleName"), + RecentActivityExtracterModuleFactory.getModuleName(), email)); //NON-NLS bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PHONE_NUMBER, - NbBundle.getMessage(this.getClass(), "Firefox.parentModuleName"), + RecentActivityExtracterModuleFactory.getModuleName(), phoneNumber)); //NON-NLS bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_LOCATION, - NbBundle.getMessage(this.getClass(), "Firefox.parentModuleName"), + RecentActivityExtracterModuleFactory.getModuleName(), mailingAddress)); //NON-NLS bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME_CREATED, - NbBundle.getMessage(this.getClass(), "Firefox.parentModuleName"), - datetimeCreated)); //NON-NLS + RecentActivityExtracterModuleFactory.getModuleName(), + datetimeCreated)); //NON-NLS bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED, - NbBundle.getMessage(this.getClass(), "Firefox.parentModuleName"), - datetimeLastUsed)); //NON-NLS + RecentActivityExtracterModuleFactory.getModuleName(), + datetimeLastUsed)); //NON-NLS bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_COUNT, - NbBundle.getMessage(this.getClass(), "Firefox.parentModuleName"), - timesUsed)); //NON-NLS + RecentActivityExtracterModuleFactory.getModuleName(), + timesUsed)); //NON-NLS BlackboardArtifact bbart = profileFile.newArtifact(ARTIFACT_TYPE.TSK_WEB_FORM_ADDRESS); From 49680e72804e4d2e9050802f4be845aa63047a01 Mon Sep 17 00:00:00 2001 From: Raman Date: Thu, 14 Feb 2019 15:46:18 -0500 Subject: [PATCH 09/80] 1184: Chrome cache Allow for finding multiple chrome caches in a data source. --- .../autopsy/recentactivity/Chrome.java | 2 +- .../recentactivity/ChromeCacheExtractor.java | 331 ++++++++++++------ 2 files changed, 221 insertions(+), 112 deletions(-) diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Chrome.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Chrome.java index 57ee0fd62b..c0f0cf9818 100644 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Chrome.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Chrome.java @@ -99,7 +99,7 @@ class Chrome extends Extract { this.getDownload(); ChromeCacheExtractor chromeCacheExtractor = new ChromeCacheExtractor(dataSource, context); - chromeCacheExtractor.getCache(); + chromeCacheExtractor.getCaches(); } /** diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ChromeCacheExtractor.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ChromeCacheExtractor.java index f7aad7b518..0df15e48c2 100644 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ChromeCacheExtractor.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ChromeCacheExtractor.java @@ -30,6 +30,7 @@ import java.nio.channels.FileChannel; import java.nio.charset.Charset; import java.util.ArrayList; import java.util.Collection; +import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -73,6 +74,7 @@ import org.sleuthkit.datamodel.TskException; */ final class ChromeCacheExtractor { + private final static String DEFAULT_CACHE_STR = "default/cache"; private final static String BROTLI_MIMETYPE ="application/x-brotli"; private final static long UINT32_MASK = 0xFFFFFFFFl; @@ -95,7 +97,7 @@ final class ChromeCacheExtractor { private FileManager fileManager; - private final Map filesTable = new HashMap<>(); + private Map filesTable = new HashMap<>(); /** * Encapsulates abstract file for a cache file as well as a temp file copy @@ -136,7 +138,7 @@ final class ChromeCacheExtractor { * * @throws IngestModuleException */ - void init() throws IngestModuleException { + void moduleInit() throws IngestModuleException { try { currentCase = Case.getCurrentCaseThrows(); @@ -155,6 +157,30 @@ final class ChromeCacheExtractor { } } + /** + * Initializes the module to extract cache from a specific folder. + * + * @param cachePath - path where cache files are found + * + * @throws org.sleuthkit.autopsy.ingest.IngestModule.IngestModuleException + */ + void subInit(String cachePath) throws IngestModuleException { + + filesTable.clear(); + + String cacheAbsOutputFolderName = this.getAbsOutputFolderName() + cachePath; + File outDir = new File(cacheAbsOutputFolderName); + if (outDir.exists() == false) { + outDir.mkdirs(); + } + + String cacheTempPath = RAImageIngestModule.getRATempPath(currentCase, moduleName) + cachePath; + File tempDir = new File(cacheTempPath); + if (tempDir.exists() == false) { + tempDir.mkdirs(); + } + } + /** * Cleans up after the module is done. * @@ -198,143 +224,202 @@ final class ChromeCacheExtractor { } /** - * Extracts the data from Chrome cache + * Extracts the data from Chrome caches + * + * A data source may have multiple Chrome user profiles and caches. + * */ - void getCache() { + void getCaches() { try { - init(); + moduleInit(); } catch (IngestModuleException ex) { String msg = "Failed to initialize ChromeCacheExtractor."; logger.log(Level.SEVERE, msg, ex); return; } + // Find all possible caches + List indexFiles; + try { + indexFiles = findCacheFiles("index"); + + // Get each of the caches + for (AbstractFile indexFile: indexFiles) { + getCache(indexFile); + } + + } catch (TskCoreException ex) { + String msg = "Failed to find cache index files"; + logger.log(Level.SEVERE, msg, ex); + } + } + + /** + * Extracts the cache for the specified cache index file. + * + * @param cacheIndexFile + */ + void getCache(AbstractFile indexAbstractFile) { + + String cachePath = indexAbstractFile.getParentPath(); Optional indexFile; try { - // find the index file - indexFile = findAndCopyCacheFile("index"); + subInit(cachePath); + + indexFile = this.getCacheFileCopy(indexAbstractFile.getName(), cachePath); if (!indexFile.isPresent()) { + String msg = String.format("Failed to find copy cache index file %s", indexAbstractFile.getUniquePath()); + logger.log(Level.SEVERE, msg); return; } - + for (int i = 0; i < 4; i ++) { - Optional dataFile = findAndCopyCacheFile(String.format("data_%1d",i)); + Optional dataFile = findAndCopyCacheFile(String.format("data_%1d",i), cachePath ); if (!dataFile.isPresent()) { return; } } - + } catch (TskCoreException | IngestModuleException ex) { - String msg = "Failed to find cache files"; + String msg = "Failed to find cache files in path " + cachePath; logger.log(Level.SEVERE, msg, ex); return; } - logger.log(Level.INFO, "{0}- Now reading Cache index file", new Object[]{moduleName}); //NON-NLS - + logger.log(Level.INFO, "{0}- Now reading Cache index file from path {1}", new Object[]{moduleName, cachePath }); //NON-NLS + List derivedFiles = new ArrayList<>(); - + ByteBuffer indexFileROBuffer = indexFile.get().getByteBuffer(); IndexFileHeader indexHdr = new IndexFileHeader(indexFileROBuffer); - + // seek past the header indexFileROBuffer.position(INDEXFILE_HDR_SIZE); // Process each address in the table for (int i = 0; i < indexHdr.getTableLen(); i++) { - CacheAddress addr = new CacheAddress(indexFileROBuffer.getInt() & UINT32_MASK); + CacheAddress addr = new CacheAddress(indexFileROBuffer.getInt() & UINT32_MASK, cachePath); if (addr.isInitialized()) { - String fileName = addr.getFilename(); try { - Optional cacheFileCopy = this.getCacheFileCopy(fileName); - if (!cacheFileCopy.isPresent()) { - logger.log(Level.SEVERE, String.format("Failed to get cache entry at address %s", addr)); //NON-NLS - } + List addedFiles = this.getCacheEntry(addr); - // Get the cache entry and its data segments - CacheEntry cacheEntry = new CacheEntry(addr, cacheFileCopy.get() ); - List dataEntries = cacheEntry.getData(); - - for (int j = 0; j < dataEntries.size(); j++) { - CacheData data = dataEntries.get(j); - String dataFilename = data.getAddress().getFilename(); - Optional dataFile = this.findCacheFile(dataFilename); - - boolean isBrotliCompressed = false; - if (data.getType() != CacheDataTypeEnum.HTTP_HEADER && cacheEntry.isBrotliCompressed() ) { - isBrotliCompressed = true; - } - - Collection bbattributes = new ArrayList<>(); - bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_URL, - moduleName, - ((cacheEntry.getKey() != null) ? cacheEntry.getKey() : ""))); //NON-NLS - - if (dataFile.isPresent()) { - if (data.isInExternalFile() ) { - try { - BlackboardArtifact bbart = dataFile.get().newArtifact(ARTIFACT_TYPE.TSK_SOURCE_ARTIFACT); - if (bbart != null) { - bbart.addAttributes(bbattributes); - } - if (isBrotliCompressed) { - dataFile.get().setMIMEType(BROTLI_MIMETYPE); - dataFile.get().save(); - } - } catch (TskException ex) { - logger.log(Level.SEVERE, "Error while trying to add an artifact", ex); //NON-NLS - } - } else { - - // Data segments in "data_x" files are saved in individual files and added as derived files - String filename = data.save(); - String relPathname = getRelOutputFolderName() + File.separator + filename; - DerivedFile derivedFile = fileManager.addDerivedFile(filename, relPathname, - data.getDataLength(), - cacheEntry.getCreationTime(), cacheEntry.getCreationTime(), cacheEntry.getCreationTime(), cacheEntry.getCreationTime(), // TBD - true, - dataFile.get(), - "", - moduleName, - VERSION_NUMBER, - "", - TskData.EncodingType.NONE); - - derivedFiles.add(derivedFile); - try { - BlackboardArtifact bbart = derivedFile.newArtifact(ARTIFACT_TYPE.TSK_SOURCE_ARTIFACT); - if (bbart != null) { - bbart.addAttributes(bbattributes); - } - if (isBrotliCompressed) { - derivedFile.setMIMEType(BROTLI_MIMETYPE); - derivedFile.save(); - } - } catch (TskException ex) { - logger.log(Level.SEVERE, "Error while trying to add an artifact", ex); //NON-NLS - } - } - } - } - - } catch (TskCoreException | IngestModuleException ex) { + derivedFiles.addAll(addedFiles); + + } + catch (TskCoreException | IngestModuleException ex) { logger.log(Level.SEVERE, String.format("Failed to get cache entry at address %s", addr)); //NON-NLS } } } - + derivedFiles.forEach((derived) -> { services.fireModuleContentEvent(new ModuleContentEvent(derived)); }); - + context.addFilesToJob(derivedFiles); services.fireModuleDataEvent(new ModuleDataEvent(moduleName, BlackboardArtifact.ARTIFACT_TYPE.TSK_SOURCE_ARTIFACT)); - + cleanup(); } + /** + * Gets the cache entry at the specified address. + * + * Extracts the files if needed and adds as derived files, creates artifacts + * + * @param cacheEntryAddress cache entry address + * + * @return Optional derived file, is a derived file is added for the given entry + */ + List getCacheEntry(CacheAddress cacheEntryAddress) throws TskCoreException, IngestModuleException { + + List derivedFiles = new ArrayList<>(); + + String fileName = cacheEntryAddress.getFilename(); + String cachePath = cacheEntryAddress.getCachePath(); + + + Optional cacheFileCopy = this.getCacheFileCopy(fileName, cachePath); + if (!cacheFileCopy.isPresent()) { + logger.log(Level.SEVERE, String.format("Failed to get cache entry at address %s", cacheEntryAddress)); //NON-NLS + } + + // Get the cache entry and its data segments + CacheEntry cacheEntry = new CacheEntry(cacheEntryAddress, cacheFileCopy.get() ); + List dataEntries = cacheEntry.getData(); + + for (int j = 0; j < dataEntries.size(); j++) { + CacheData data = dataEntries.get(j); + String dataFilename = data.getAddress().getFilename(); + Optional dataFile = this.findCacheFile(dataFilename, cachePath); + + boolean isBrotliCompressed = false; + if (data.getType() != CacheDataTypeEnum.HTTP_HEADER && cacheEntry.isBrotliCompressed() ) { + isBrotliCompressed = true; + } + + Collection bbattributes = new ArrayList<>(); + bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_URL, + moduleName, + ((cacheEntry.getKey() != null) ? cacheEntry.getKey() : ""))); //NON-NLS + + if (dataFile.isPresent()) { + if (data.isInExternalFile() ) { + try { + BlackboardArtifact bbart = dataFile.get().newArtifact(ARTIFACT_TYPE.TSK_SOURCE_ARTIFACT); + if (bbart != null) { + bbart.addAttributes(bbattributes); + } + if (isBrotliCompressed) { + dataFile.get().setMIMEType(BROTLI_MIMETYPE); + dataFile.get().save(); + } + } catch (TskException ex) { + logger.log(Level.SEVERE, "Error while trying to add an artifact", ex); //NON-NLS + } + } else { + + // Data segments in "data_x" files are saved in individual files and added as derived files + String filename = data.save(); + + String relPathname = getRelOutputFolderName() + data.getAddress().getCachePath() + filename; + + DerivedFile derivedFile = fileManager.addDerivedFile(filename, relPathname, + data.getDataLength(), + cacheEntry.getCreationTime(), cacheEntry.getCreationTime(), cacheEntry.getCreationTime(), cacheEntry.getCreationTime(), // TBD + true, + dataFile.get(), + "", + moduleName, + VERSION_NUMBER, + "", + TskData.EncodingType.NONE); + + + try { + BlackboardArtifact bbart = derivedFile.newArtifact(ARTIFACT_TYPE.TSK_SOURCE_ARTIFACT); + if (bbart != null) { + bbart.addAttributes(bbattributes); + } + if (isBrotliCompressed) { + derivedFile.setMIMEType(BROTLI_MIMETYPE); + derivedFile.save(); + } + + + derivedFiles.add(derivedFile); + } catch (TskException ex) { + logger.log(Level.SEVERE, "Error while trying to add an artifact", ex); //NON-NLS + } + } + } + } + + return derivedFiles; + } + /** * Finds abstract file for cache file with a specified name * @@ -342,12 +427,12 @@ final class ChromeCacheExtractor { * @return Opt * @throws TskCoreException */ - Optional findCacheFile(String cacheFileName) throws TskCoreException { + Optional findCacheFile(String cacheFileName, String cachePath) throws TskCoreException { - List cacheFiles = fileManager.findFiles(dataSource, cacheFileName, "default/cache"); //NON-NLS + List cacheFiles = fileManager.findFiles(dataSource, cacheFileName, cachePath); //NON-NLS if (!cacheFiles.isEmpty()) { for (AbstractFile abstractFile: cacheFiles ) { - if (abstractFile.getUniquePath().trim().endsWith("default/cache")) { + if (abstractFile.getUniquePath().trim().endsWith(DEFAULT_CACHE_STR)) { return Optional.of(abstractFile); } } @@ -357,6 +442,21 @@ final class ChromeCacheExtractor { return Optional.empty(); } + /** + * Finds abstract file(s) for a cache file with the specified name. + * + * @param cacheFileName + * @return list of abstract files matching the specified file name + * @throws TskCoreException + */ + List findCacheFiles(String cacheFileName) throws TskCoreException { + + List cacheFiles = fileManager.findFiles(dataSource, cacheFileName, DEFAULT_CACHE_STR); //NON-NLS + + return cacheFiles; + } + + /** * Returns CacheFileCopy for the specified file from the file table. * Find the file and creates a copy if it isn't already in the table. @@ -365,32 +465,33 @@ final class ChromeCacheExtractor { * @return CacheFileCopy * @throws TskCoreException */ - Optional getCacheFileCopy(String cacheFileName) throws TskCoreException, IngestModuleException { + Optional getCacheFileCopy(String cacheFileName, String cachePath) throws TskCoreException, IngestModuleException { // Check if the file is already in the table - if (filesTable.containsKey(cacheFileName)) { - return Optional.of(filesTable.get(cacheFileName)); + String fileTableKey = cachePath + cacheFileName; + if (filesTable.containsKey(fileTableKey)) { + return Optional.of(filesTable.get(fileTableKey)); } - return findAndCopyCacheFile(cacheFileName); + return findAndCopyCacheFile(cacheFileName, cachePath); } - + /** - * Finds the specified cache file and makes a temporary copy. + * Finds the specified cache file under the specified path, and makes a temporary copy. * * @param cacheFileName * @return Cache file copy * @throws TskCoreException */ - Optional findAndCopyCacheFile(String cacheFileName) throws TskCoreException, IngestModuleException { + Optional findAndCopyCacheFile(String cacheFileName, String cachePath) throws TskCoreException, IngestModuleException { - Optional cacheFileOptional = findCacheFile(cacheFileName); + Optional cacheFileOptional = findCacheFile(cacheFileName, cachePath); if (!cacheFileOptional.isPresent()) { return Optional.empty(); } AbstractFile cacheFile = cacheFileOptional.get(); - String tempFilePathname = RAImageIngestModule.getRATempPath(currentCase, moduleName) + File.separator + cacheFile.getName(); //NON-NLS + String tempFilePathname = RAImageIngestModule.getRATempPath(currentCase, moduleName) + cachePath + cacheFile.getName(); //NON-NLS try { File newFile = new File(tempFilePathname); ContentUtils.writeToFile(cacheFile, newFile, context::dataSourceIngestIsCancelled); @@ -404,7 +505,7 @@ final class ChromeCacheExtractor { CacheFileCopy cacheFileCopy = new CacheFileCopy(cacheFile, randomAccessFile, cacheFileROBuf ); if (!cacheFileName.startsWith("f_")) { - filesTable.put(cacheFileName, cacheFileCopy); + filesTable.put(cachePath + cacheFileName, cacheFileCopy); } return Optional.of(cacheFileCopy); @@ -543,10 +644,14 @@ final class ChromeCacheExtractor { private final String fileName; private final int fileNumber; + private final String cachePath; - CacheAddress(long uint32) { + + CacheAddress(long uint32, String cachePath) { uint32CacheAddr = uint32; + this.cachePath = cachePath; + int fileTypeEnc = (int)(uint32CacheAddr & FILE_TYPE_MASK) >> FILE_TYPE_OFFSET; fileType = CacheFileTypeEnum.values()[fileTypeEnc]; @@ -583,6 +688,10 @@ final class ChromeCacheExtractor { return fileName; } + String getCachePath() { + return cachePath; + } + boolean isInExternalFile() { return (fileType == CacheFileTypeEnum.EXTERNAL); } @@ -716,7 +825,7 @@ final class ChromeCacheExtractor { return; } - cacheFileCopy = getCacheFileCopy(address.getFilename()).get(); + cacheFileCopy = getCacheFileCopy(address.getFilename(), address.getCachePath()).get(); if (!address.isInExternalFile() ) { this.data = new byte [length]; @@ -825,7 +934,7 @@ final class ChromeCacheExtractor { fileName = String.format("%s__%08x", address.getFilename(), address.getUint32CacheAddr()); } - String filePathName = getAbsOutputFolderName() + File.separator + fileName; + String filePathName = getAbsOutputFolderName() + address.getCachePath() + fileName; save(filePathName); return fileName; @@ -963,10 +1072,10 @@ final class ChromeCacheExtractor { hash = fileROBuf.getInt() & UINT32_MASK; long uint32 = fileROBuf.getInt() & UINT32_MASK; - nextAddress = (uint32 != 0) ? new CacheAddress(uint32) : null; + nextAddress = (uint32 != 0) ? new CacheAddress(uint32, selfAddress.getCachePath()) : null; uint32 = fileROBuf.getInt() & UINT32_MASK; - rankingsNodeAddress = (uint32 != 0) ? new CacheAddress(uint32) : null; + rankingsNodeAddress = (uint32 != 0) ? new CacheAddress(uint32, selfAddress.getCachePath()) : null; reuseCount = fileROBuf.getInt(); refetchCount = fileROBuf.getInt(); @@ -977,7 +1086,7 @@ final class ChromeCacheExtractor { keyLen = fileROBuf.getInt(); uint32 = fileROBuf.getInt() & UINT32_MASK; - longKeyAddresses = (uint32 != 0) ? new CacheAddress(uint32) : null; + longKeyAddresses = (uint32 != 0) ? new CacheAddress(uint32, selfAddress.getCachePath()) : null; dataSizes= new int[4]; for (int i = 0; i < 4; i++) { @@ -985,7 +1094,7 @@ final class ChromeCacheExtractor { } dataAddresses = new CacheAddress[4]; for (int i = 0; i < 4; i++) { - dataAddresses[i] = new CacheAddress(fileROBuf.getInt() & UINT32_MASK); + dataAddresses[i] = new CacheAddress(fileROBuf.getInt() & UINT32_MASK, selfAddress.getCachePath()); } flags = fileROBuf.getInt() & UINT32_MASK; From fa96ca9bf1cb209b42f43be813e0d50f6c39d5d8 Mon Sep 17 00:00:00 2001 From: "U-BASIS\\kelly" Date: Fri, 15 Feb 2019 10:46:39 -0500 Subject: [PATCH 10/80] Updated\reorganized ExtractEdge based on review comments 1190-basic-edge-module --- .../autopsy/recentactivity/Bundle.properties | 5 - .../autopsy/recentactivity/ExtractEdge.java | 257 +++++++++++------- 2 files changed, 158 insertions(+), 104 deletions(-) diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Bundle.properties b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Bundle.properties index d6425a4488..1750a4287c 100644 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Bundle.properties +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Bundle.properties @@ -105,8 +105,3 @@ SearchEngineURLQueryAnalyzer.toString=Name\: {0}\n\ SearchEngineURLQueryAnalyzer.parentModuleName.noSpace=RecentActivity SearchEngineURLQueryAnalyzer.parentModuleName=Recent Activity UsbDeviceIdMapper.parseAndLookup.text=Product\: {0} -ExtractEdge.moduleName=Microsoft Edge -ExtractEdge.process.errMsg.unableFindESEViewer=Unable to find ESEDatabaseViewer -ExtractEdge.process.errMsg.errGettingWebCacheFiles=Error retrieving Edge file -ExtractEdge.process.errMsg.noWebCachFiles=No Edge WebCache file found -ExtractEdge.process.errMsg.errWriteFile={0}\: Error while trying to write file\:{1} diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractEdge.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractEdge.java index edf6f98ac4..6dcddf9ed0 100755 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractEdge.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractEdge.java @@ -27,6 +27,7 @@ import java.util.List; import java.util.logging.Level; import org.openide.modules.InstalledFileLocator; import org.openide.util.NbBundle; +import org.openide.util.NbBundle.Messages; import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; import org.sleuthkit.autopsy.coreutils.ExecUtil; @@ -40,158 +41,216 @@ import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.Content; import org.sleuthkit.datamodel.TskCoreException; -public class ExtractEdge extends Extract{ - +/** + * Extract the bookmarks, cookies, downloads and history from the Microsoft Edge + * files + * + * @author kelly + */ +final class ExtractEdge extends Extract { + private static final Logger logger = Logger.getLogger(ExtractIE.class.getName()); private final IngestServices services = IngestServices.getInstance(); private final String moduleTempResultsDir; private Content dataSource; private IngestJobContext context; - - private static String ESE_TOOL_NAME = "ESEDatabaseView.exe"; - private static File ESE_TOOL_FILE; - private static String EDGE_WEBCACHE_NAME = "WebCacheV01.dat"; - private static String EDGE = "Edge"; - ExtractEdge() throws NoCurrentCaseException{ - moduleName = NbBundle.getMessage(Chrome.class, "ExtractEdge.moduleName"); - moduleTempResultsDir = RAImageIngestModule.getRATempPath(Case.getCurrentCaseThrows(), EDGE) + File.separator + "results"; + private static final String ESE_TOOL_NAME = "ESEDatabaseView.exe"; + private static final String EDGE_WEBCACHE_NAME = "WebCacheV01.dat"; + private static final String EDGE_WEBCACHE_PREFIX = "WebCacheV01"; + private static final String EDGE = "Edge"; + private static final String ESE_TOOL_FOLDER = "ESEDatabaseView"; + private static final String EDGE_SPARTAN_NAME = "Spartan.edb"; + + ExtractEdge() throws NoCurrentCaseException { + moduleTempResultsDir = RAImageIngestModule.getRATempPath(Case.getCurrentCaseThrows(), EDGE) + + File.separator + "results"; //NON-NLS } - + + @Messages({ + "ExtractEdge_Module_Name=Microsoft Edge" + }) + @Override + protected String getName() { + return Bundle.ExtractEdge_Module_Name(); + } + + @Messages({ + "ExtractEdge_process_errMsg_unableFindESEViewer=Unable to find ESEDatabaseViewer", + "ExtractEdge_process_errMsg_errGettingWebCacheFiles=Error trying to retrieving Edge WebCacheV01 file", + "ExtractEdge_process_errMsg_webcacheFail=Failure processing Microsoft Edge WebCache file" + }) @Override void process(Content dataSource, IngestJobContext context) { this.dataSource = dataSource; this.context = context; dataFound = false; - - this.processWebCache(); - - // Bookmarks come from spartan.edb different file + + List webCacheFiles; + List spartanFiles; + try { + webCacheFiles = fetchWebCacheFiles(); + spartanFiles = fetchSpartanFiles(); // For later use with bookmarks + } catch (TskCoreException ex) { + this.addErrorMessage(Bundle.ExtractEdge_process_errMsg_errGettingWebCacheFiles()); + logger.log(Level.WARNING, "Error fetching 'WebCacheV01.dat' files for Microsoft Edge", ex); //NON-NLS + return; + } + + // No edge files found + if (webCacheFiles == null && spartanFiles == null) { + return; + } + + dataFound = true; + + if (!PlatformUtil.isWindowsOS()) { + logger.log(Level.INFO, "Microsoft Edge files found, unable to parse on Non-Windows system"); //NON-NLS + return; + } + + final String esedumper = getPathForESEDumper(); + if (esedumper == null) { + this.addErrorMessage(Bundle.ExtractEdge_process_errMsg_unableFindESEViewer()); + logger.log(Level.SEVERE, "Error finding ESEDatabaseViewer program"); //NON-NLS + return; //If we cannot find the ESEDatabaseView we cannot proceed + } + + if (context.dataSourceIngestIsCancelled()) { + return; + } + + try { + this.processWebCache(esedumper, webCacheFiles); + } catch (IOException ex) { + this.addErrorMessage(Bundle.ExtractEdge_process_errMsg_webcacheFail()); + logger.log(Level.SEVERE, "Error returned from processWebCach", ex); // NON-NLS + } + + if (context.dataSourceIngestIsCancelled()) { + return; + } + + // Bookmarks come from spartan.edb different file this.getBookmark(); // Not implemented yet } - - void processWebCache(){ - Path path = Paths.get("ESEDatabaseView", ESE_TOOL_NAME); - ESE_TOOL_FILE = InstalledFileLocator.getDefault().locate(path.toString(), ExtractEdge.class.getPackage().getName(), false); //NON-NLS - if (ESE_TOOL_FILE == null) { - this.addErrorMessage( - NbBundle.getMessage(this.getClass(), "ExtractEdge.process.errMsg.unableFindESEViewer", this.getName())); - logger.log(Level.SEVERE, "Error finding ESEDatabaseViewer program "); //NON-NLS - } - - final String esedumper = ESE_TOOL_FILE.getAbsolutePath(); - // get WebCacheV01.dat files - org.sleuthkit.autopsy.casemodule.services.FileManager fileManager = currentCase.getServices().getFileManager(); - List webCachFiles; - try { - webCachFiles = fileManager.findFiles(dataSource, EDGE_WEBCACHE_NAME); //NON-NLS - } catch (TskCoreException ex) { - this.addErrorMessage(NbBundle.getMessage(this.getClass(), "ExtractEdge.process.errMsg.errGettingWebCacheFiles", - this.getName())); - logger.log(Level.WARNING, "Error fetching 'index.data' files for Internet Explorer history."); //NON-NLS - return; - } + void processWebCache(String eseDumperPath, List webCachFiles) throws IOException { - if (webCachFiles.isEmpty()) { - String msg = NbBundle.getMessage(this.getClass(), "ExtractEdge.process.errMsg.noWebCachFiles"); - logger.log(Level.INFO, msg); - return; - } + for (AbstractFile webCacheFile : webCachFiles) { - dataFound = true; - - if(!PlatformUtil.isWindowsOS()){ - logger.log(Level.WARNING, "Edge data found, unable to parse on non-windows system."); //NON-NLS - return; - } - - String temps; - String indexFileName; - for(AbstractFile indexFile : webCachFiles) { - //Run the dumper - indexFileName = "WebCacheV01" + Integer.toString((int) indexFile.getId()) + ".dat"; - temps = RAImageIngestModule.getRATempPath(currentCase, EDGE) + File.separator + indexFileName; //NON-NLS - File datFile = new File(temps); - if (context.dataSourceIngestIsCancelled()) { - break; - } + String tempWebCacheFileName = EDGE_WEBCACHE_PREFIX + + Integer.toString((int) webCacheFile.getId()) + ".dat"; //NON-NLS + File tempWebCacheFile = new File(RAImageIngestModule.getRATempPath(currentCase, EDGE) + + File.separator + tempWebCacheFileName); + try { - ContentUtils.writeToFile(indexFile, datFile, context::dataSourceIngestIsCancelled); - } catch (IOException e) { - logger.log(Level.WARNING, "Error while trying to write index.dat file " + datFile.getAbsolutePath(), e); //NON-NLS - this.addErrorMessage( - NbBundle.getMessage(this.getClass(), "ExtractEdge.process.errMsg.errWriteFile", this.getName(), - datFile.getAbsolutePath())); - continue; + ContentUtils.writeToFile(webCacheFile, tempWebCacheFile, + context::dataSourceIngestIsCancelled); + } catch (IOException ex) { + throw new IOException("Error writingToFile: " + webCacheFile, ex); //NON-NLS } - - File resultsDir = new File(moduleTempResultsDir + Integer.toString((int) indexFile.getId())); + + File resultsDir = new File(moduleTempResultsDir + Integer.toString((int) webCacheFile.getId())); resultsDir.mkdirs(); - executeDumper(esedumper, datFile.getAbsolutePath(), "webcache", resultsDir.getAbsolutePath()); - - this.getHistory(); // Not implemented yet - this.getCookie(); // Not implemented yet - this.getDownload(); // Not implemented yet - - datFile.delete(); - resultsDir.delete(); - } + try { + executeDumper(eseDumperPath, tempWebCacheFile.getAbsolutePath(), + EDGE_WEBCACHE_PREFIX, resultsDir.getAbsolutePath()); + + if (context.dataSourceIngestIsCancelled()) { + return; + } + + this.getHistory(); // Not implemented yet + + if (context.dataSourceIngestIsCancelled()) { + return; + } + + this.getCookie(); // Not implemented yet + + if (context.dataSourceIngestIsCancelled()) { + return; + } + + this.getDownload(); // Not implemented yet + } finally { + tempWebCacheFile.delete(); + resultsDir.delete(); + } + } } - + /** * Query for history databases and add artifacts */ private void getHistory() { - + } - + /** * Search for bookmark files and make artifacts. */ private void getBookmark() { - + } - + /** * Queries for cookie files and adds artifacts */ private void getCookie() { - + } - + /** * Queries for download files and adds artifacts */ private void getDownload() { - + } - - private boolean executeDumper(String dumperPath, String inputFilePath, String inputFilePrefix, String outputDir){ - final String outputFileFullPath = outputDir + File.separator + inputFilePrefix + ".txt"; + + private String getPathForESEDumper() { + Path path = Paths.get(ESE_TOOL_FOLDER, ESE_TOOL_NAME); + File eseToolFile = InstalledFileLocator.getDefault().locate(path.toString(), + ExtractEdge.class.getPackage().getName(), false); + if (eseToolFile != null) { + return eseToolFile.getAbsolutePath(); + } + + return null; + } + + private List fetchWebCacheFiles() throws TskCoreException { + org.sleuthkit.autopsy.casemodule.services.FileManager fileManager + = currentCase.getServices().getFileManager(); + return fileManager.findFiles(dataSource, EDGE_WEBCACHE_NAME); + } + + private List fetchSpartanFiles() throws TskCoreException { + org.sleuthkit.autopsy.casemodule.services.FileManager fileManager + = currentCase.getServices().getFileManager(); + return fileManager.findFiles(dataSource, EDGE_SPARTAN_NAME); + } + + private void executeDumper(String dumperPath, String inputFilePath, + String inputFilePrefix, String outputDir) throws IOException { + final String outputFileFullPath = outputDir + File.separator + inputFilePrefix + ".txt"; //NON-NLS final String errFileFullPath = outputDir + File.separator + inputFilePrefix + ".err"; //NON-NLS logger.log(Level.INFO, "Writing ESEDatabaseViewer results to: {0}", outputDir); //NON-NLS - + List commandLine = new ArrayList<>(); commandLine.add(dumperPath); commandLine.add("/table"); commandLine.add(inputFilePath); - commandLine.add("*"); + commandLine.add("*"); commandLine.add("/scomma"); commandLine.add(outputDir + "\\" + inputFilePrefix + "_*.csv"); - + ProcessBuilder processBuilder = new ProcessBuilder(commandLine); processBuilder.redirectOutput(new File(outputFileFullPath)); processBuilder.redirectError(new File(errFileFullPath)); - try{ - ExecUtil.execute(processBuilder, new DataSourceIngestModuleProcessTerminator(context)); - }catch(IOException ex){ - logger.log(Level.SEVERE, "Unable to execute ESEDatabaseView to process Edge file." , ex); //NON-NLS - return false; - } - - return true; + ExecUtil.execute(processBuilder, new DataSourceIngestModuleProcessTerminator(context)); } } From 1529a01cf0290147dac8aba45bca89bebb3aae02 Mon Sep 17 00:00:00 2001 From: Kelly Kelly Date: Fri, 15 Feb 2019 14:01:19 -0500 Subject: [PATCH 11/80] Cleaned up edge history code ready for pr 1191-edge-history --- .../autopsy/recentactivity/ExtractEdge.java | 187 +++++++++--------- 1 file changed, 97 insertions(+), 90 deletions(-) diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractEdge.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractEdge.java index c51270f0f9..66099f4e10 100755 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractEdge.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractEdge.java @@ -33,7 +33,6 @@ import java.util.List; import java.util.Scanner; import java.util.logging.Level; import org.openide.modules.InstalledFileLocator; -import org.openide.util.NbBundle; import org.openide.util.NbBundle.Messages; import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; @@ -55,8 +54,6 @@ import org.sleuthkit.datamodel.TskCoreException; /** * Extract the bookmarks, cookies, downloads and history from the Microsoft Edge * files - * - * @author kelly */ final class ExtractEdge extends Extract { @@ -72,8 +69,11 @@ final class ExtractEdge extends Extract { private static final String EDGE = "Edge"; private static final String ESE_TOOL_FOLDER = "ESEDatabaseView"; private static final String EDGE_SPARTAN_NAME = "Spartan.edb"; - - private static final SimpleDateFormat dateFormatter = new SimpleDateFormat("MM/dd/yyyy hh:mm:ss a"); + private static final String EDGE_HEAD_URL = "url"; + private static final String EDGE_HEAD_ACCESSTIME = "accessedtime"; + private static final String EDGE_KEYWORD_VISIT = "Visited:"; + + private static final SimpleDateFormat DATE_FORMATTER = new SimpleDateFormat("MM/dd/yyyy hh:mm:ss a"); ExtractEdge() throws NoCurrentCaseException { moduleTempResultsDir = RAImageIngestModule.getRATempPath(Case.getCurrentCaseThrows(), EDGE) @@ -138,6 +138,8 @@ final class ExtractEdge extends Extract { } catch (IOException ex) { this.addErrorMessage(Bundle.ExtractEdge_process_errMsg_webcacheFail()); logger.log(Level.SEVERE, "Error returned from processWebCach", ex); // NON-NLS + } catch (TskCoreException tcex) { + } if (context.dataSourceIngestIsCancelled()) { @@ -148,7 +150,7 @@ final class ExtractEdge extends Extract { this.getBookmark(); // Not implemented yet } - void processWebCache(String eseDumperPath, List webCachFiles) throws IOException { + void processWebCache(String eseDumperPath, List webCachFiles) throws IOException, TskCoreException { for (AbstractFile webCacheFile : webCachFiles) { @@ -194,123 +196,65 @@ final class ExtractEdge extends Extract { } } } - - + + /** + * getHistory searches the files with "container" in the file name for lines + * with the text "Visited" in them. Note that not all of the container + * files, if fact most of them do not, have the browser history in them. + */ @Messages({ "ExtractEdge_getHistory_containerFileNotFound=Error while trying to analyze Edge history" }) - private void getHistory(AbstractFile origFile, File resultDir) { + private void getHistory(AbstractFile origFile, File resultDir) throws TskCoreException { File containerFiles[] = resultDir.listFiles((dir, name) -> name.toLowerCase().contains("container")); - - if(containerFiles == null){ + + if (containerFiles == null) { this.addErrorMessage(Bundle.ExtractEdge_getHistory_containerFileNotFound()); return; } - - // The assumption is that the history is in one or more of the container files. - // search through all of them looking for a lines with the text "Visited:" - for(File file: containerFiles){ + + for (File file : containerFiles) { Scanner fileScanner; try { fileScanner = new Scanner(new FileInputStream(file.toString())); } catch (FileNotFoundException ex) { logger.log(Level.WARNING, "Unable to find the ESEDatabaseView file at " + file.getPath(), ex); //NON-NLS - continue; // Should we keep going or bail on the whole process? + continue; // If we couldn't open this file, continue to the next file } - + Collection bbartifacts = new ArrayList<>(); - - try{ + + try { List headers = null; while (fileScanner.hasNext()) { String line = fileScanner.nextLine(); - if(headers == null){ // The header should be the first line + if (headers == null) { headers = Arrays.asList(line.toLowerCase().split(",")); continue; } - if(line.contains("Visited")){ + if (line.contains(EDGE_KEYWORD_VISIT)) { BlackboardArtifact b = parseHistoryLine(origFile, headers, line); - if(b != null){ + if (b != null) { bbartifacts.add(b); this.indexArtifact(b); } - }else{ - // I am making the assumption that if the line doesn't have - // "Visited" in it that its probably not the file we are looking for - // therefore we should move on to the next file. + } else { + // If Visited is not in line than this is probably + // not the container file we're looking for, move on break; } } - } - finally{ + } finally { fileScanner.close(); } - - if(!bbartifacts.isEmpty()){ + + if (!bbartifacts.isEmpty()) { services.fireModuleDataEvent(new ModuleDataEvent( - RecentActivityExtracterModuleFactory.getModuleName(), - BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_HISTORY, bbartifacts)); + RecentActivityExtracterModuleFactory.getModuleName(), + BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_HISTORY, bbartifacts)); } } - - } - - @Messages({ - "ExtractEdge_programName=Microsoft Edge" - }) - private BlackboardArtifact parseHistoryLine(AbstractFile origFile, List headers, String line){ - BlackboardArtifact bbart = null; - String[] rowSplit = line.split(","); - - int index = headers.indexOf("url"); - String urlUserStr = rowSplit[index]; - - String[] str = urlUserStr.split("@"); - String user = str[0].replace("Visited: ", ""); - String url = str[1]; - - index = headers.indexOf("accessedtime"); - String accessTime = rowSplit[index].trim(); - Long ftime = null; - try{ - Long epochtime = dateFormatter.parse(accessTime).getTime(); - ftime = epochtime / 1000; - }catch(ParseException ex){ - logger.log(Level.WARNING, "The Accessed Time format in history file seems invalid " + accessTime, ex); - } - - try{ - bbart = origFile.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_HISTORY); - Collection bbattributes = new ArrayList<>(); - bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_URL, - RecentActivityExtracterModuleFactory.getModuleName(), url)); - - bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED, - RecentActivityExtracterModuleFactory.getModuleName(), ftime)); - - bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_REFERRER, - RecentActivityExtracterModuleFactory.getModuleName(), "")); - - bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_TITLE, - RecentActivityExtracterModuleFactory.getModuleName(), "")); - - bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PROG_NAME, - RecentActivityExtracterModuleFactory.getModuleName(), Bundle.ExtractEdge_programName())); - - bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DOMAIN, - RecentActivityExtracterModuleFactory.getModuleName(), (NetworkUtils.extractDomain(url)))); //NON-NLS - - bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_USER_NAME, - RecentActivityExtracterModuleFactory.getModuleName(), user)); - - bbart.addAttributes(bbattributes); - - } catch (TskCoreException ex) { - logger.log(Level.SEVERE, "Error writing Microsoft Edge web history artifact to the blackboard.", ex); //NON-NLS - } - - return bbart; } /** @@ -377,4 +321,67 @@ final class ExtractEdge extends Extract { ExecUtil.execute(processBuilder, new DataSourceIngestModuleProcessTerminator(context)); } + + @Messages({ + "ExtractEdge_programName=Microsoft Edge" + }) + private BlackboardArtifact parseHistoryLine(AbstractFile origFile, List headers, String line) throws TskCoreException { + String[] rowSplit = line.split(","); + + int index = headers.indexOf(EDGE_HEAD_URL); + String urlUserStr = rowSplit[index]; + + String[] str = urlUserStr.split("@"); + String user = (str[0].replace(EDGE_KEYWORD_VISIT, "")).trim(); + String url = str[1]; + + index = headers.indexOf(EDGE_HEAD_ACCESSTIME); + String accessTime = rowSplit[index].trim(); + Long ftime = null; + try { + Long epochtime = DATE_FORMATTER.parse(accessTime).getTime(); + ftime = epochtime / 1000; + } catch (ParseException ex) { + logger.log(Level.WARNING, "The Accessed Time format in history file seems invalid " + accessTime, ex); //NON-NLS + } + + BlackboardArtifact bbart = origFile.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_HISTORY); + + bbart.addAttributes(createHistoryAttributes(url, ftime, + "", "", + Bundle.ExtractEdge_programName(), + NetworkUtils.extractDomain(url), user)); + + return bbart; + } + + private Collection createHistoryAttributes(String url, Long accessTime, + String referrer, String title, String programName, String domain, String user) throws TskCoreException { + + Collection bbattributes = new ArrayList<>(); + bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_URL, + RecentActivityExtracterModuleFactory.getModuleName(), url)); + + if (accessTime != null) { + bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED, + RecentActivityExtracterModuleFactory.getModuleName(), accessTime)); + } + + bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_REFERRER, + RecentActivityExtracterModuleFactory.getModuleName(), referrer)); + + bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_TITLE, + RecentActivityExtracterModuleFactory.getModuleName(), title)); + + bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PROG_NAME, + RecentActivityExtracterModuleFactory.getModuleName(), programName)); + + bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DOMAIN, + RecentActivityExtracterModuleFactory.getModuleName(), domain)); //NON-NLS + + bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_USER_NAME, + RecentActivityExtracterModuleFactory.getModuleName(), user)); + + return bbattributes; + } } From 159524427c41521f60ad0ad5ed69fb93df0e4765 Mon Sep 17 00:00:00 2001 From: Kelly Kelly Date: Fri, 15 Feb 2019 15:28:57 -0500 Subject: [PATCH 12/80] Renamed parseHistory function to getHistoryArtifact and put a little error checking into createHistoryAttribute 1190-edge-History --- .../autopsy/recentactivity/ExtractEdge.java | 28 +++++++++++-------- 1 file changed, 17 insertions(+), 11 deletions(-) diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractEdge.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractEdge.java index 66099f4e10..a94d550232 100755 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractEdge.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractEdge.java @@ -234,7 +234,7 @@ final class ExtractEdge extends Extract { } if (line.contains(EDGE_KEYWORD_VISIT)) { - BlackboardArtifact b = parseHistoryLine(origFile, headers, line); + BlackboardArtifact b = getHistoryArtifact(origFile, headers, line); if (b != null) { bbartifacts.add(b); this.indexArtifact(b); @@ -325,7 +325,7 @@ final class ExtractEdge extends Extract { @Messages({ "ExtractEdge_programName=Microsoft Edge" }) - private BlackboardArtifact parseHistoryLine(AbstractFile origFile, List headers, String line) throws TskCoreException { + private BlackboardArtifact getHistoryArtifact(AbstractFile origFile, List headers, String line) throws TskCoreException { String[] rowSplit = line.split(","); int index = headers.indexOf(EDGE_HEAD_URL); @@ -347,20 +347,21 @@ final class ExtractEdge extends Extract { BlackboardArtifact bbart = origFile.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_HISTORY); - bbart.addAttributes(createHistoryAttributes(url, ftime, - "", "", + bbart.addAttributes(createHistoryAttribute(url, ftime, + null, null, Bundle.ExtractEdge_programName(), NetworkUtils.extractDomain(url), user)); return bbart; } - private Collection createHistoryAttributes(String url, Long accessTime, + private Collection createHistoryAttribute(String url, Long accessTime, String referrer, String title, String programName, String domain, String user) throws TskCoreException { Collection bbattributes = new ArrayList<>(); bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_URL, - RecentActivityExtracterModuleFactory.getModuleName(), url)); + RecentActivityExtracterModuleFactory.getModuleName(), + (url != null) ? url : "")); if (accessTime != null) { bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED, @@ -368,19 +369,24 @@ final class ExtractEdge extends Extract { } bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_REFERRER, - RecentActivityExtracterModuleFactory.getModuleName(), referrer)); + RecentActivityExtracterModuleFactory.getModuleName(), + (referrer != null) ? referrer : "")); bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_TITLE, - RecentActivityExtracterModuleFactory.getModuleName(), title)); + RecentActivityExtracterModuleFactory.getModuleName(), + (title != null) ? title : "")); bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PROG_NAME, - RecentActivityExtracterModuleFactory.getModuleName(), programName)); + RecentActivityExtracterModuleFactory.getModuleName(), + (programName != null) ? programName : "")); bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DOMAIN, - RecentActivityExtracterModuleFactory.getModuleName(), domain)); //NON-NLS + RecentActivityExtracterModuleFactory.getModuleName(), + (domain != null) ? domain : "")); bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_USER_NAME, - RecentActivityExtracterModuleFactory.getModuleName(), user)); + RecentActivityExtracterModuleFactory.getModuleName(), + (user != null) ? user : "")); return bbattributes; } From c0469192f46c209345011ee2066aa2061f3300d4 Mon Sep 17 00:00:00 2001 From: Kelly Kelly Date: Tue, 19 Feb 2019 11:26:04 -0500 Subject: [PATCH 13/80] Updated based on second round of Richard's comments 1190-basic-edge-module --- .../autopsy/recentactivity/Extract.java | 4 ++ .../autopsy/recentactivity/ExtractEdge.java | 52 ++++++++++--------- 2 files changed, 32 insertions(+), 24 deletions(-) diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Extract.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Extract.java index f934a420e3..466fbb6997 100644 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Extract.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Extract.java @@ -204,4 +204,8 @@ abstract class Extract { public boolean foundData() { return dataFound; } + + protected void setFoundData(boolean b){ + dataFound = b; + } } diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractEdge.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractEdge.java index 6dcddf9ed0..fa02e99428 100755 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractEdge.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractEdge.java @@ -26,7 +26,6 @@ import java.util.ArrayList; import java.util.List; import java.util.logging.Level; import org.openide.modules.InstalledFileLocator; -import org.openide.util.NbBundle; import org.openide.util.NbBundle.Messages; import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; @@ -36,22 +35,18 @@ import org.sleuthkit.autopsy.coreutils.PlatformUtil; import org.sleuthkit.autopsy.datamodel.ContentUtils; import org.sleuthkit.autopsy.ingest.DataSourceIngestModuleProcessTerminator; import org.sleuthkit.autopsy.ingest.IngestJobContext; -import org.sleuthkit.autopsy.ingest.IngestServices; import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.Content; import org.sleuthkit.datamodel.TskCoreException; /** * Extract the bookmarks, cookies, downloads and history from the Microsoft Edge - * files - * - * @author kelly + * files. */ final class ExtractEdge extends Extract { - private static final Logger logger = Logger.getLogger(ExtractIE.class.getName()); - private final IngestServices services = IngestServices.getInstance(); - private final String moduleTempResultsDir; + private static final Logger logger = Logger.getLogger(ExtractEdge.class.getName()); + private final Path moduleTempResultPath; private Content dataSource; private IngestJobContext context; @@ -63,8 +58,7 @@ final class ExtractEdge extends Extract { private static final String EDGE_SPARTAN_NAME = "Spartan.edb"; ExtractEdge() throws NoCurrentCaseException { - moduleTempResultsDir = RAImageIngestModule.getRATempPath(Case.getCurrentCaseThrows(), EDGE) - + File.separator + "results"; //NON-NLS + moduleTempResultPath = Paths.get(RAImageIngestModule.getRATempPath(Case.getCurrentCaseThrows(), EDGE), "results"); } @Messages({ @@ -78,23 +72,30 @@ final class ExtractEdge extends Extract { @Messages({ "ExtractEdge_process_errMsg_unableFindESEViewer=Unable to find ESEDatabaseViewer", "ExtractEdge_process_errMsg_errGettingWebCacheFiles=Error trying to retrieving Edge WebCacheV01 file", - "ExtractEdge_process_errMsg_webcacheFail=Failure processing Microsoft Edge WebCache file" + "ExtractEdge_process_errMsg_webcacheFail=Failure processing Microsoft Edge WebCacheV01.dat file", + "ExtractEdge_process_errMsg_spartanFail=Failure processing Microsoft Edge spartan.edb file" }) @Override void process(Content dataSource, IngestJobContext context) { this.dataSource = dataSource; this.context = context; - dataFound = false; + this.setFoundData(false); + + List webCacheFiles = null; + List spartanFiles = null; - List webCacheFiles; - List spartanFiles; try { webCacheFiles = fetchWebCacheFiles(); - spartanFiles = fetchSpartanFiles(); // For later use with bookmarks } catch (TskCoreException ex) { this.addErrorMessage(Bundle.ExtractEdge_process_errMsg_errGettingWebCacheFiles()); - logger.log(Level.WARNING, "Error fetching 'WebCacheV01.dat' files for Microsoft Edge", ex); //NON-NLS - return; + logger.log(Level.SEVERE, "Error fetching 'WebCacheV01.dat' files for Microsoft Edge", ex); //NON-NLS + } + + try { + spartanFiles = fetchSpartanFiles(); // For later use with bookmarks + } catch (TskCoreException ex) { + this.addErrorMessage(Bundle.ExtractEdge_process_errMsg_spartanFail()); + logger.log(Level.SEVERE, "Error fetching 'spartan.edb' files for Microsoft Edge", ex); //NON-NLS } // No edge files found @@ -102,10 +103,10 @@ final class ExtractEdge extends Extract { return; } - dataFound = true; + this.setFoundData(true); if (!PlatformUtil.isWindowsOS()) { - logger.log(Level.INFO, "Microsoft Edge files found, unable to parse on Non-Windows system"); //NON-NLS + logger.log(Level.WARNING, "Microsoft Edge files found, unable to parse on Non-Windows system"); //NON-NLS return; } @@ -135,15 +136,18 @@ final class ExtractEdge extends Extract { this.getBookmark(); // Not implemented yet } - void processWebCache(String eseDumperPath, List webCachFiles) throws IOException { + void processWebCache(String eseDumperPath, List webCacheFiles) throws IOException { - for (AbstractFile webCacheFile : webCachFiles) { + for (AbstractFile webCacheFile : webCacheFiles) { + + if (context.dataSourceIngestIsCancelled()) { + return; + } //Run the dumper String tempWebCacheFileName = EDGE_WEBCACHE_PREFIX + Integer.toString((int) webCacheFile.getId()) + ".dat"; //NON-NLS - File tempWebCacheFile = new File(RAImageIngestModule.getRATempPath(currentCase, EDGE) - + File.separator + tempWebCacheFileName); + File tempWebCacheFile = new File(RAImageIngestModule.getRATempPath(currentCase, EDGE), tempWebCacheFileName); try { ContentUtils.writeToFile(webCacheFile, tempWebCacheFile, @@ -152,7 +156,7 @@ final class ExtractEdge extends Extract { throw new IOException("Error writingToFile: " + webCacheFile, ex); //NON-NLS } - File resultsDir = new File(moduleTempResultsDir + Integer.toString((int) webCacheFile.getId())); + File resultsDir = new File(moduleTempResultPath.toAbsolutePath() + Integer.toString((int) webCacheFile.getId())); resultsDir.mkdirs(); try { executeDumper(eseDumperPath, tempWebCacheFile.getAbsolutePath(), From 36d31de802e1d641cc8700ae8d9386962bc6aeba Mon Sep 17 00:00:00 2001 From: Kelly Kelly Date: Tue, 19 Feb 2019 11:40:58 -0500 Subject: [PATCH 14/80] Inital commit of cookie code and skeleton of download code before merging in changes from other brancheds 1193-edge-cookies --- .../autopsy/recentactivity/ExtractEdge.java | 296 +++++++++++++++++- 1 file changed, 290 insertions(+), 6 deletions(-) diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractEdge.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractEdge.java index 66099f4e10..61a121ac4c 100755 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractEdge.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractEdge.java @@ -29,6 +29,7 @@ import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; +import java.util.Hashtable; import java.util.List; import java.util.Scanner; import java.util.logging.Level; @@ -183,13 +184,13 @@ final class ExtractEdge extends Extract { return; } - this.getCookie(); // Not implemented yet + this.getCookie(webCacheFile, resultsDir); // Not implemented yet if (context.dataSourceIngestIsCancelled()) { return; } - this.getDownload(); // Not implemented yet + this.getDownload(webCacheFile, resultsDir); // Not implemented yet } finally { tempWebCacheFile.delete(); resultsDir.delete(); @@ -260,22 +261,107 @@ final class ExtractEdge extends Extract { /** * Search for bookmark files and make artifacts. */ - private void getBookmark() { - + private void getBookmark(){ } /** * Queries for cookie files and adds artifacts */ - private void getCookie() { + private void getCookie(AbstractFile origFile, File resultDir) throws TskCoreException{ + File containerFiles[] = resultDir.listFiles((dir, name) -> name.toLowerCase().contains("cookie")); + if (containerFiles == null) { + this.addErrorMessage(Bundle.ExtractEdge_getHistory_containerFileNotFound()); + return; + } + + for (File file : containerFiles) { + Scanner fileScanner; + try { + fileScanner = new Scanner(new FileInputStream(file.toString())); + } catch (FileNotFoundException ex) { + logger.log(Level.WARNING, "Unable to find the ESEDatabaseView file at " + file.getPath(), ex); //NON-NLS + continue; // If we couldn't open this file, continue to the next file + } + + Collection bbartifacts = new ArrayList<>(); + + try { + List headers = null; + while (fileScanner.hasNext()) { + String line = fileScanner.nextLine(); + if (headers == null) { + headers = Arrays.asList(line.toLowerCase().split(",")); + continue; + } + + BlackboardArtifact b = getCookieArtifact(origFile, headers, line); + if (b != null) { + bbartifacts.add(b); + this.indexArtifact(b); + } + } + } finally { + fileScanner.close(); + } + + if (!bbartifacts.isEmpty()) { + services.fireModuleDataEvent(new ModuleDataEvent( + RecentActivityExtracterModuleFactory.getModuleName(), + BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_HISTORY, bbartifacts)); + } + } } /** * Queries for download files and adds artifacts */ - private void getDownload() { + private void getDownload(AbstractFile origFile, File resultDir) throws TskCoreException { + ArrayList downloadFiles = getDownloadFiles(resultDir); + + if (downloadFiles == null) { + this.addErrorMessage(Bundle.ExtractEdge_getHistory_containerFileNotFound()); + return; + } + for (File file : downloadFiles) { + Scanner fileScanner; + try { + fileScanner = new Scanner(new FileInputStream(file.toString())); + } catch (FileNotFoundException ex) { + logger.log(Level.WARNING, "Unable to find the ESEDatabaseView file at " + file.getPath(), ex); //NON-NLS + continue; // If we couldn't open this file, continue to the next file + } + + Collection bbartifacts = new ArrayList<>(); + + try { + List headers = null; + while (fileScanner.hasNext()) { + String line = fileScanner.nextLine(); + if (headers == null) { + headers = Arrays.asList(line.toLowerCase().split(",")); + continue; + } + + if (line.contains("iedownload")) { +// BlackboardArtifact b = parseHistoryLine(origFile, headers, line); +// if (b != null) { +// bbartifacts.add(b); +// this.indexArtifact(b); +// } + } + } + } finally { + fileScanner.close(); + } + + if (!bbartifacts.isEmpty()) { + services.fireModuleDataEvent(new ModuleDataEvent( + RecentActivityExtracterModuleFactory.getModuleName(), + BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_DOWNLOAD, bbartifacts)); + } + } } private String getPathForESEDumper() { @@ -354,6 +440,31 @@ final class ExtractEdge extends Extract { return bbart; } + + private BlackboardArtifact getCookieArtifact(AbstractFile origFile, List headers, String line) throws TskCoreException { + String[] lineSplit = line.split(","); + + String accessTime = lineSplit[headers.indexOf("lastmodified")].trim(); + Long ftime = null; + try { + Long epochtime = DATE_FORMATTER.parse(accessTime).getTime(); + ftime = epochtime / 1000; + } catch (ParseException ex) { + logger.log(Level.WARNING, "The Accessed Time format in history file seems invalid " + accessTime, ex); //NON-NLS + } + + String domain = lineSplit[headers.indexOf("rdomain")].trim(); + String name = hexToString(lineSplit[headers.indexOf("name")].trim()); + String value = hexToString(lineSplit[headers.indexOf("value")].trim()); + + BlackboardArtifact bbart = origFile.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_COOKIE); + bbart.addAttributes(createCookieAttributes(null, ftime, name, value, Bundle.ExtractEdge_programName(), flipDomain(domain))); + return bbart; + } + + private BlackboardArtifact getDownloadArtifact(AbstractFile origFile, List headers, String line) throws TskCoreException { + return null; + } private Collection createHistoryAttributes(String url, Long accessTime, String referrer, String title, String programName, String domain, String user) throws TskCoreException { @@ -384,4 +495,177 @@ final class ExtractEdge extends Extract { return bbattributes; } + + private Collection createCookieAttributes(String url, + Long accessTime, String name, String value, String programName, String domain) { + + Collection bbattributes = new ArrayList<>(); + bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_URL, + RecentActivityExtracterModuleFactory.getModuleName(), + (url != null) ? url : "")); + + if (accessTime != null) { + bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DATETIME, + RecentActivityExtracterModuleFactory.getModuleName(), accessTime)); + } + + bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_NAME, + RecentActivityExtracterModuleFactory.getModuleName(), + (name != null) ? name : "")); + + bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_VALUE, + RecentActivityExtracterModuleFactory.getModuleName(), + (value != null) ? value : "")); + + bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PROG_NAME, + RecentActivityExtracterModuleFactory.getModuleName(), + (programName != null) ? programName : "")); + + bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DOMAIN, + RecentActivityExtracterModuleFactory.getModuleName(), + (domain != null) ? domain : "")); + + return bbattributes; + } + + private Collection createDownloadAttributes(String path, String url, Long accessTime, String domain, String programName){ + Collection bbattributes = new ArrayList<>(); + + bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PATH, + RecentActivityExtracterModuleFactory.getModuleName(), + (path != null) ? path : "")); + + long pathID = Util.findID(dataSource, path); + if (pathID != -1) { + bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PATH_ID, + RecentActivityExtracterModuleFactory.getModuleName(), + pathID)); + } + + bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_URL, + RecentActivityExtracterModuleFactory.getModuleName(), + (url != null) ? url : "")); + + if (accessTime != null) { + bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED, + RecentActivityExtracterModuleFactory.getModuleName(), accessTime)); + } + + bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DOMAIN, + RecentActivityExtracterModuleFactory.getModuleName(), + (domain != null) ? domain : "")); + + bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PROG_NAME, + RecentActivityExtracterModuleFactory.getModuleName(), + (programName != null) ? programName : "")); + + return bbattributes; + } + + private String hexToString(String hexString) { + String[] hexValues = hexString.split(" "); + StringBuilder output = new StringBuilder(); + + for (String s : hexValues) { + try { + output.append((char) Integer.parseInt(s, 16)); + } catch (NumberFormatException ex) { + return null; + } + } + + return output.toString(); + } + + // For cookies the RDomain is backwards ie com.microsoft this function flip + // it around for display, this function assumes a simple path with one or + // two periods + private String flipDomain(String domain){ + if(domain == null || domain.isEmpty()) + return null; + + String[] tokens = domain.split("\\."); + + if(tokens.length < 2 || tokens.length > 3){ + logger.log(Level.INFO, "Unexpected format for edge cookie domain: " + domain); + return domain; // don't know what to do, just send it back + } + + StringBuilder buf = new StringBuilder(); + if(tokens.length > 2){ + buf.append(tokens[2]); + buf.append("."); + } + buf.append(tokens[1]); + buf.append("."); + buf.append(tokens[0]); + + return buf.toString(); + } + + private Hashtable> getContainerIDTable(File resultDir){ + Hashtable> table = null; + File containerFiles[] = resultDir.listFiles((dir, name) -> name.contains("Containers")); + + for (File file : containerFiles) { + Scanner fileScanner; + try { + fileScanner = new Scanner(new FileInputStream(file.toString())); + } catch (FileNotFoundException ex) { + logger.log(Level.WARNING, "Unable to find the ESEDatabaseView file at " + file.getPath(), ex); //NON-NLS + continue; // If we couldn't open this file, continue to the next file + } + + try { + List headers = null; + table = new Hashtable<>(); + int nameIdx = 0; + int idIdx = 0; + while (fileScanner.hasNext()) { + String line = fileScanner.nextLine(); + if (headers == null) { + headers = Arrays.asList(line.toLowerCase().split(",")); + nameIdx = headers.indexOf("name"); + idIdx = headers.indexOf("containerid"); + } + else{ + String[] row = line.split(","); + String name = row[nameIdx]; + String id = row[idIdx]; + + ArrayList idList = table.get(name); + if(idList == null){ + idList = new ArrayList<>(); + table.put(name, idList); + } + + idList.add(id); + } + } + } finally { + fileScanner.close(); + } + } + + return table; + } + + private ArrayList getDownloadFiles(File resultDir){ + Hashtable> idTable = getContainerIDTable(resultDir); + + ArrayList idList = idTable.get("iedownload"); + if(idList == null) + return null; + + ArrayList fileList = new ArrayList<>(); + for(String s : idList){ + String fileName = "Container_" + s; + File[] files = resultDir.listFiles((dir, name) -> name.contains(fileName)); + if(files != null){ + fileList.addAll(Arrays.asList(files)); + } + } + + return fileList; + } } From c4550d3a14316108676ff9cf67a70737c24a93dc Mon Sep 17 00:00:00 2001 From: Kelly Kelly Date: Wed, 20 Feb 2019 15:55:26 -0500 Subject: [PATCH 15/80] Contains the code for history, bookmarks, cookies and the skeleton of downloads for JIRA 1193,1194,1195 and 1191 branch 1193-edge-cookies --- .../autopsy/recentactivity/ExtractEdge.java | 715 +++++++++++++----- 1 file changed, 529 insertions(+), 186 deletions(-) diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractEdge.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractEdge.java index dd12961504..f97fdd5481 100755 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractEdge.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractEdge.java @@ -53,46 +53,71 @@ import org.sleuthkit.datamodel.Content; import org.sleuthkit.datamodel.TskCoreException; /** - * Extract the bookmarks, cookies, downloads and history from the Microsoft Edge + * Extract the bookmarks, cookies, downloads and history from Microsoft Edge */ final class ExtractEdge extends Extract { - private static final Logger logger = Logger.getLogger(ExtractEdge.class.getName()); + private static final Logger LOG = Logger.getLogger(ExtractEdge.class.getName()); private final IngestServices services = IngestServices.getInstance(); private final Path moduleTempResultPath; private Content dataSource; private IngestJobContext context; + private Hashtable> containersTable; + + private static final String EDGE = "Edge"; + + private static final String EDGE_KEYWORD_VISIT = "Visited:"; + private static final String IGNORE_COMMA_IN_QUOTES_REGEX = ",(?=(?:[^\"]*\"[^\"]*\")*[^\"]*$)"; + + private static final String EDGE_TABLE_TYPE_DOWNLOAD = "iedownload"; + private static final String EDGE_TABLE_TYPE_HISTORY = "History"; + private static final String EDGE_TABLE_TYPE_COOKIE = "cookie"; + + private static final String EDGE_HEAD_URL = "url"; + private static final String EDGE_HEAD_ACCESSTIME = "accessedtime"; + private static final String EDGE_HEAD_NAME = "name"; + private static final String EDGE_HEAD_CONTAINER_ID = "containerid"; + private static final String EDGE_HEAD_RESPONSEHEAD = "responseheaders"; + private static final String EDGE_HEAD_TITLE = "title"; + private static final String EDGE_HEAD_RDOMAIN = "rdomain"; + private static final String EDGE_HEAD_VALUE = "value"; + private static final String EDGE_HEAD_LASTMOD = "lastmodified"; + + private static final String EDGE_WEBCACHE_PREFIX = "WebCacheV01"; + private static final String EDGE_CONTAINER_FILE_PREFIX = "Container_"; + private static final String EDGE_CONTAINER_FILE_EXT = ".csv"; + private static final String EDGE_WEBCACHE_EXT = ".dat"; private static final String ESE_TOOL_NAME = "ESEDatabaseView.exe"; private static final String EDGE_WEBCACHE_NAME = "WebCacheV01.dat"; - private static final String EDGE_WEBCACHE_PREFIX = "WebCacheV01"; - private static final String EDGE = "Edge"; - private static final String ESE_TOOL_FOLDER = "ESEDatabaseView"; private static final String EDGE_SPARTAN_NAME = "Spartan.edb"; - private static final String EDGE_HEAD_URL = "url"; - private static final String EDGE_HEAD_ACCESSTIME = "accessedtime"; - private static final String EDGE_KEYWORD_VISIT = "Visited:"; + private static final String EDGE_CONTAINTERS_FILE_NAME = "Containers.csv"; + private static final String EDGE_FAVORITE_FILE_NAME = "Favorites.csv"; + private static final String EDGE_OUTPUT_FILE_NAME = "Output.txt"; + private static final String EDGE_ERROR_FILE_NAME = "File.txt"; + + private static final String ESE_TOOL_FOLDER = "ESEDatabaseView"; + private static final String EDGE_RESULT_FOLDER_NAME = "results"; private static final SimpleDateFormat DATE_FORMATTER = new SimpleDateFormat("MM/dd/yyyy hh:mm:ss a"); - ExtractEdge() throws NoCurrentCaseException { - moduleTempResultPath = Paths.get(RAImageIngestModule.getRATempPath(Case.getCurrentCaseThrows(), EDGE), "results"); - } - - @Messages({ - "ExtractEdge_Module_Name=Microsoft Edge" - }) - @Override - protected String getName() { - return Bundle.ExtractEdge_Module_Name(); - } - @Messages({ "ExtractEdge_process_errMsg_unableFindESEViewer=Unable to find ESEDatabaseViewer", "ExtractEdge_process_errMsg_errGettingWebCacheFiles=Error trying to retrieving Edge WebCacheV01 file", "ExtractEdge_process_errMsg_webcacheFail=Failure processing Microsoft Edge WebCacheV01.dat file", - "ExtractEdge_process_errMsg_spartanFail=Failure processing Microsoft Edge spartan.edb file" - }) + "ExtractEdge_process_errMsg_spartanFail=Failure processing Microsoft Edge spartan.edb file", + "ExtractEdge_Module_Name=Microsoft Edge", + "ExtractEdge_getHistory_containerFileNotFound=Error while trying to analyze Edge history",}) + + ExtractEdge() throws NoCurrentCaseException { + moduleTempResultPath = Paths.get(RAImageIngestModule.getRATempPath(Case.getCurrentCaseThrows(), EDGE), EDGE_RESULT_FOLDER_NAME); + } + + @Override + protected String getName() { + return Bundle.ExtractEdge_Module_Name(); + } + @Override void process(Content dataSource, IngestJobContext context) { this.dataSource = dataSource; @@ -103,17 +128,17 @@ final class ExtractEdge extends Extract { List spartanFiles = null; try { - webCacheFiles = fetchWebCacheFiles(); + webCacheFiles = fetchWebCacheDBFiles(); } catch (TskCoreException ex) { this.addErrorMessage(Bundle.ExtractEdge_process_errMsg_errGettingWebCacheFiles()); - logger.log(Level.SEVERE, "Error fetching 'WebCacheV01.dat' files for Microsoft Edge", ex); //NON-NLS + LOG.log(Level.SEVERE, "Error fetching 'WebCacheV01.dat' files for Microsoft Edge", ex); //NON-NLS } try { - spartanFiles = fetchSpartanFiles(); // For later use with bookmarks + spartanFiles = fetchSpartanDBFiles(); // For later use with bookmarks } catch (TskCoreException ex) { this.addErrorMessage(Bundle.ExtractEdge_process_errMsg_spartanFail()); - logger.log(Level.SEVERE, "Error fetching 'spartan.edb' files for Microsoft Edge", ex); //NON-NLS + LOG.log(Level.SEVERE, "Error fetching 'spartan.edb' files for Microsoft Edge", ex); //NON-NLS } // No edge files found @@ -124,49 +149,54 @@ final class ExtractEdge extends Extract { this.setFoundData(true); if (!PlatformUtil.isWindowsOS()) { - logger.log(Level.WARNING, "Microsoft Edge files found, unable to parse on Non-Windows system"); //NON-NLS + LOG.log(Level.WARNING, "Microsoft Edge files found, unable to parse on Non-Windows system"); //NON-NLS return; } final String esedumper = getPathForESEDumper(); if (esedumper == null) { this.addErrorMessage(Bundle.ExtractEdge_process_errMsg_unableFindESEViewer()); - logger.log(Level.SEVERE, "Error finding ESEDatabaseViewer program"); //NON-NLS + LOG.log(Level.SEVERE, "Error finding ESEDatabaseViewer program"); //NON-NLS return; //If we cannot find the ESEDatabaseView we cannot proceed } - if (context.dataSourceIngestIsCancelled()) { - return; + try { + this.processWebCacheDbFile(esedumper, webCacheFiles); + } catch (IOException | TskCoreException ex) { + this.addErrorMessage(Bundle.ExtractEdge_process_errMsg_webcacheFail()); + LOG.log(Level.SEVERE, "Error returned from processWebCacheDbFile", ex); // NON-NLS } try { - this.processWebCache(esedumper, webCacheFiles); - } catch (IOException ex) { + this.processSpartanDbFile(esedumper, spartanFiles); + } catch (IOException | TskCoreException ex) { this.addErrorMessage(Bundle.ExtractEdge_process_errMsg_webcacheFail()); - logger.log(Level.SEVERE, "Error returned from processWebCach", ex); // NON-NLS - } catch (TskCoreException tcex) { - + LOG.log(Level.SEVERE, "Error returned from processSpartanDbFile", ex); // NON-NLS } - - if (context.dataSourceIngestIsCancelled()) { - return; - } - - // Bookmarks come from spartan.edb different file - this.getBookmark(); // Not implemented yet } - void processWebCache(String eseDumperPath, List webCacheFiles) throws IOException, TskCoreException { + /** + * Dump the tables from WebCacheV01.dat and look for the data contained with + * in those files including downloads, cookies and history. + * + * @param eseDumperPath Path to ESEDatabaseView.exe + * @param webCacheFiles List of case WebCacheV01.dat files + * @throws IOException + * @throws TskCoreException + */ + void processWebCacheDbFile(String eseDumperPath, List webCacheFiles) throws IOException, TskCoreException { for (AbstractFile webCacheFile : webCacheFiles) { if (context.dataSourceIngestIsCancelled()) { return; } + + clearContainerTable(); //Run the dumper String tempWebCacheFileName = EDGE_WEBCACHE_PREFIX - + Integer.toString((int) webCacheFile.getId()) + ".dat"; //NON-NLS + + Integer.toString((int) webCacheFile.getId()) + EDGE_WEBCACHE_EXT; //NON-NLS File tempWebCacheFile = new File(RAImageIngestModule.getRATempPath(currentCase, EDGE), tempWebCacheFileName); try { @@ -180,25 +210,25 @@ final class ExtractEdge extends Extract { resultsDir.mkdirs(); try { executeDumper(eseDumperPath, tempWebCacheFile.getAbsolutePath(), - EDGE_WEBCACHE_PREFIX, resultsDir.getAbsolutePath()); + resultsDir.getAbsolutePath()); if (context.dataSourceIngestIsCancelled()) { return; } - this.getHistory(webCacheFile, resultsDir); // Not implemented yet + this.getHistory(webCacheFile, resultsDir); if (context.dataSourceIngestIsCancelled()) { return; } - this.getCookie(webCacheFile, resultsDir); // Not implemented yet + this.getCookie(webCacheFile, resultsDir); - if (context.dataSourceIngestIsCancelled()) { - return; - } - - this.getDownload(webCacheFile, resultsDir); // Not implemented yet +// if (context.dataSourceIngestIsCancelled()) { +// return; +// } +// Putting downloads on hold +// this.getDownload(webCacheFile, resultsDir); } finally { tempWebCacheFile.delete(); resultsDir.delete(); @@ -206,28 +236,76 @@ final class ExtractEdge extends Extract { } } + /** + * Creates a temp version of the database and runs the ESEDatabaseView tool + * to dump each of the database tables into a temporary folder. + * + * @param eseDumperPath Path to ESEDatabaseViewer + * @param spartanFiles List of the case spartan.edb files + * @throws IOException + * @throws TskCoreException + */ + void processSpartanDbFile(String eseDumperPath, List spartanFiles) throws IOException, TskCoreException { + + for (AbstractFile spartanFile : spartanFiles) { + + if (context.dataSourceIngestIsCancelled()) { + return; + } + + //Run the dumper + String tempSpartanFileName = EDGE_WEBCACHE_PREFIX + + Integer.toString((int) spartanFile.getId()) + EDGE_WEBCACHE_EXT; //NON-NLS + File tempSpartanFile = new File(RAImageIngestModule.getRATempPath(currentCase, EDGE), tempSpartanFileName); + + try { + ContentUtils.writeToFile(spartanFile, tempSpartanFile, + context::dataSourceIngestIsCancelled); + } catch (IOException ex) { + throw new IOException("Error writingToFile: " + spartanFile, ex); //NON-NLS + } + + File resultsDir = new File(moduleTempResultPath.toAbsolutePath() + Integer.toString((int) spartanFile.getId())); + resultsDir.mkdirs(); + try { + executeDumper(eseDumperPath, tempSpartanFile.getAbsolutePath(), + resultsDir.getAbsolutePath()); + + if (context.dataSourceIngestIsCancelled()) { + return; + } + + this.getBookmark(spartanFile, resultsDir); + + } finally { + tempSpartanFile.delete(); + resultsDir.delete(); + } + } + } + /** * getHistory searches the files with "container" in the file name for lines * with the text "Visited" in them. Note that not all of the container * files, if fact most of them do not, have the browser history in them. + * @param origFile Original case file + * @param resultDir Output directory of ESEDatabaseViewer + * @throws TskCoreException + * @throws FileNotFoundException */ - @Messages({ - "ExtractEdge_getHistory_containerFileNotFound=Error while trying to analyze Edge history" - }) - private void getHistory(AbstractFile origFile, File resultDir) throws TskCoreException { - File containerFiles[] = resultDir.listFiles((dir, name) -> name.toLowerCase().contains("container")); + private void getHistory(AbstractFile origFile, File resultDir) throws TskCoreException, FileNotFoundException { + ArrayList historyFiles = getHistoryFiles(resultDir); - if (containerFiles == null) { - this.addErrorMessage(Bundle.ExtractEdge_getHistory_containerFileNotFound()); + if (historyFiles == null) { return; } - for (File file : containerFiles) { + for (File file : historyFiles) { Scanner fileScanner; try { fileScanner = new Scanner(new FileInputStream(file.toString())); } catch (FileNotFoundException ex) { - logger.log(Level.WARNING, "Unable to find the ESEDatabaseView file at " + file.getPath(), ex); //NON-NLS + LOG.log(Level.WARNING, "Unable to find the ESEDatabaseView file at " + file.getPath(), ex); //NON-NLS continue; // If we couldn't open this file, continue to the next file } @@ -248,10 +326,6 @@ final class ExtractEdge extends Extract { bbartifacts.add(b); this.indexArtifact(b); } - } else { - // If Visited is not in line than this is probably - // not the container file we're looking for, move on - break; } } } finally { @@ -268,18 +342,63 @@ final class ExtractEdge extends Extract { /** * Search for bookmark files and make artifacts. + * + * @param origFile Original case file + * @param resultDir Output directory of ESEDatabaseViewer + * @throws TskCoreException + * @throws FileNotFoundException */ - private void getBookmark(){ + private void getBookmark(AbstractFile origFile, File resultDir) throws TskCoreException { + Scanner fileScanner; + File favoriteFile = new File(resultDir, EDGE_FAVORITE_FILE_NAME); + + try { + fileScanner = new Scanner(new FileInputStream(favoriteFile)); + } catch (FileNotFoundException ex) { + // This is a non-fatial error, if the favorites file is not found + // there might have not been any favorites\bookmarks + return; + } + + Collection bbartifacts = new ArrayList<>(); + + try { + List headers = null; + while (fileScanner.hasNext()) { + String line = fileScanner.nextLine(); + if (headers == null) { + headers = Arrays.asList(line.toLowerCase().split(",")); + continue; + } + + BlackboardArtifact b = getBookmarkArtifact(origFile, headers, line); + if (b != null) { + bbartifacts.add(b); + this.indexArtifact(b); + } + } + } finally { + fileScanner.close(); + } + + if (!bbartifacts.isEmpty()) { + services.fireModuleDataEvent(new ModuleDataEvent( + RecentActivityExtracterModuleFactory.getModuleName(), + BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_HISTORY, bbartifacts)); + } } /** * Queries for cookie files and adds artifacts + * + * @param origFile Original case file + * @param resultDir Output directory of ESEDatabaseViewer + * @throws TskCoreException */ - private void getCookie(AbstractFile origFile, File resultDir) throws TskCoreException{ - File containerFiles[] = resultDir.listFiles((dir, name) -> name.toLowerCase().contains("cookie")); + private void getCookie(AbstractFile origFile, File resultDir) throws TskCoreException { + File containerFiles[] = resultDir.listFiles((dir, name) -> name.toLowerCase().contains(EDGE_TABLE_TYPE_COOKIE)); if (containerFiles == null) { - this.addErrorMessage(Bundle.ExtractEdge_getHistory_containerFileNotFound()); return; } @@ -288,7 +407,7 @@ final class ExtractEdge extends Extract { try { fileScanner = new Scanner(new FileInputStream(file.toString())); } catch (FileNotFoundException ex) { - logger.log(Level.WARNING, "Unable to find the ESEDatabaseView file at " + file.getPath(), ex); //NON-NLS + LOG.log(Level.WARNING, "Unable to find the ESEDatabaseView file at " + file.getPath(), ex); //NON-NLS continue; // If we couldn't open this file, continue to the next file } @@ -323,12 +442,16 @@ final class ExtractEdge extends Extract { /** * Queries for download files and adds artifacts + * + * @param origFile Original case file + * @param resultDir Output directory of ESEDatabaseViewer + * @throws TskCoreException + * @throws FileNotFoundException */ - private void getDownload(AbstractFile origFile, File resultDir) throws TskCoreException { + private void getDownload(AbstractFile origFile, File resultDir) throws TskCoreException, FileNotFoundException { ArrayList downloadFiles = getDownloadFiles(resultDir); - + if (downloadFiles == null) { - this.addErrorMessage(Bundle.ExtractEdge_getHistory_containerFileNotFound()); return; } @@ -337,10 +460,9 @@ final class ExtractEdge extends Extract { try { fileScanner = new Scanner(new FileInputStream(file.toString())); } catch (FileNotFoundException ex) { - logger.log(Level.WARNING, "Unable to find the ESEDatabaseView file at " + file.getPath(), ex); //NON-NLS + LOG.log(Level.WARNING, "Unable to find the ESEDatabaseView file at " + file.getPath(), ex); //NON-NLS continue; // If we couldn't open this file, continue to the next file } - Collection bbartifacts = new ArrayList<>(); try { @@ -352,13 +474,14 @@ final class ExtractEdge extends Extract { continue; } - if (line.contains("iedownload")) { -// BlackboardArtifact b = parseHistoryLine(origFile, headers, line); -// if (b != null) { -// bbartifacts.add(b); -// this.indexArtifact(b); -// } - } + if (line.contains(EDGE_TABLE_TYPE_DOWNLOAD)) { + + BlackboardArtifact b = getDownloadArtifact(origFile, headers, line); + if (b != null) { + bbartifacts.add(b); + this.indexArtifact(b); + } + } } } finally { fileScanner.close(); @@ -372,6 +495,11 @@ final class ExtractEdge extends Extract { } } + /** + * Find the location of ESEDatabaseViewer.exe + * + * @return Absolute path to ESEDatabaseViewer.exe + */ private String getPathForESEDumper() { Path path = Paths.get(ESE_TOOL_FOLDER, ESE_TOOL_NAME); File eseToolFile = InstalledFileLocator.getDefault().locate(path.toString(), @@ -383,23 +511,47 @@ final class ExtractEdge extends Extract { return null; } - private List fetchWebCacheFiles() throws TskCoreException { + /** + * Finds all of the WebCacheV01.dat files in the case + * + * @return A list of WebCacheV01.dat files + * @throws TskCoreException + */ + private List fetchWebCacheDBFiles() throws TskCoreException { org.sleuthkit.autopsy.casemodule.services.FileManager fileManager = currentCase.getServices().getFileManager(); - return fileManager.findFiles(dataSource, EDGE_WEBCACHE_NAME); + return fileManager.findFiles(dataSource, EDGE_WEBCACHE_NAME, "WebCache"); } - private List fetchSpartanFiles() throws TskCoreException { + /** + * Finds all of the spartan.edb files in the case + * + * @return A list of spartan files + * @throws TskCoreException + */ + private List fetchSpartanDBFiles() throws TskCoreException { org.sleuthkit.autopsy.casemodule.services.FileManager fileManager = currentCase.getServices().getFileManager(); - return fileManager.findFiles(dataSource, EDGE_SPARTAN_NAME); + return fileManager.findFiles(dataSource, EDGE_SPARTAN_NAME, "MicrosoftEdge"); } + /** + * Executes the ESEViewDumper on the given inputFile. + * + * Each table in the ese database will be dumped as a comma separated file + * named .csv + * + * @param dumperPath Path to ESEDatabaseView.exe + * @param inputFilePath Path to ese database file to be dumped + * @param outputDir Output directory for dumper + * @throws IOException + */ private void executeDumper(String dumperPath, String inputFilePath, - String inputFilePrefix, String outputDir) throws IOException { - final String outputFileFullPath = outputDir + File.separator + inputFilePrefix + ".txt"; //NON-NLS - final String errFileFullPath = outputDir + File.separator + inputFilePrefix + ".err"; //NON-NLS - logger.log(Level.INFO, "Writing ESEDatabaseViewer results to: {0}", outputDir); //NON-NLS + String outputDir) throws IOException { + + final Path outputFilePath = Paths.get(outputDir, EDGE_OUTPUT_FILE_NAME); + final Path errFilePath = Paths.get(outputDir, EDGE_ERROR_FILE_NAME); + LOG.log(Level.INFO, "Writing ESEDatabaseViewer results to: {0}", outputDir); //NON-NLS List commandLine = new ArrayList<>(); commandLine.add(dumperPath); @@ -407,18 +559,25 @@ final class ExtractEdge extends Extract { commandLine.add(inputFilePath); commandLine.add("*"); commandLine.add("/scomma"); - commandLine.add(outputDir + "\\" + inputFilePrefix + "_*.csv"); + commandLine.add(outputDir + "\\" + "*.csv"); ProcessBuilder processBuilder = new ProcessBuilder(commandLine); - processBuilder.redirectOutput(new File(outputFileFullPath)); - processBuilder.redirectError(new File(errFileFullPath)); + processBuilder.redirectOutput(outputFilePath.toFile()); + processBuilder.redirectError(errFilePath.toFile()); ExecUtil.execute(processBuilder, new DataSourceIngestModuleProcessTerminator(context)); } - @Messages({ - "ExtractEdge_programName=Microsoft Edge" - }) + /** + * Create a BlackboardArtifact for the given row from the Edge history + * table. + * + * @param origFile Original case file + * @param headers List of table headers + * @param line CSV string representing a row of history table + * @return BlackboardArtifact representing one history table entry + * @throws TskCoreException + */ private BlackboardArtifact getHistoryArtifact(AbstractFile origFile, List headers, String line) throws TskCoreException { String[] rowSplit = line.split(","); @@ -436,44 +595,119 @@ final class ExtractEdge extends Extract { Long epochtime = DATE_FORMATTER.parse(accessTime).getTime(); ftime = epochtime / 1000; } catch (ParseException ex) { - logger.log(Level.WARNING, "The Accessed Time format in history file seems invalid " + accessTime, ex); //NON-NLS + LOG.log(Level.WARNING, "The Accessed Time format in history file seems invalid " + accessTime, ex); //NON-NLS } BlackboardArtifact bbart = origFile.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_HISTORY); bbart.addAttributes(createHistoryAttribute(url, ftime, null, null, - Bundle.ExtractEdge_programName(), + this.getName(), NetworkUtils.extractDomain(url), user)); return bbart; } - - private BlackboardArtifact getCookieArtifact(AbstractFile origFile, List headers, String line) throws TskCoreException { + + /** + * Create a BlackboardArtifact for the given row from the Edge cookie table. + * + * @param origFile Original case file + * @param headers List of table headers + * @param line CSV string representing a row of cookie table + * @return BlackboardArtifact representing one cookie table entry + * @throws TskCoreException + */ + private BlackboardArtifact getCookieArtifact(AbstractFile origFile, List headers, String line) throws TskCoreException { String[] lineSplit = line.split(","); - - String accessTime = lineSplit[headers.indexOf("lastmodified")].trim(); + + String accessTime = lineSplit[headers.indexOf(EDGE_HEAD_LASTMOD)].trim(); Long ftime = null; try { Long epochtime = DATE_FORMATTER.parse(accessTime).getTime(); ftime = epochtime / 1000; } catch (ParseException ex) { - logger.log(Level.WARNING, "The Accessed Time format in history file seems invalid " + accessTime, ex); //NON-NLS + LOG.log(Level.WARNING, "The Accessed Time format in history file seems invalid " + accessTime, ex); //NON-NLS } - - String domain = lineSplit[headers.indexOf("rdomain")].trim(); - String name = hexToString(lineSplit[headers.indexOf("name")].trim()); - String value = hexToString(lineSplit[headers.indexOf("value")].trim()); - + + String domain = lineSplit[headers.indexOf(EDGE_HEAD_RDOMAIN)].trim(); + String name = hexToChar(lineSplit[headers.indexOf(EDGE_HEAD_NAME)].trim()); + String value = hexToChar(lineSplit[headers.indexOf(EDGE_HEAD_VALUE)].trim()); + BlackboardArtifact bbart = origFile.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_COOKIE); - bbart.addAttributes(createCookieAttributes(null, ftime, name, value, Bundle.ExtractEdge_programName(), flipDomain(domain))); + bbart.addAttributes(createCookieAttributes(null, ftime, name, value, this.getName(), flipDomain(domain))); return bbart; } - - private BlackboardArtifact getDownloadArtifact(AbstractFile origFile, List headers, String line) throws TskCoreException { + + /** + * Create a BlackboardArtifact for the given row from the Edge cookie table. + * + * This function is on hold for the moment. All of the information need + * seems to be in decodedheader, but its not currently obvious how to pull + * it apart. + * + * @param origFile Original case file + * @param headers List of table headers + * @param line CSV string representing a row of download table + * @return BlackboardArtifact representing one download table entry + * @throws TskCoreException + */ + private BlackboardArtifact getDownloadArtifact(AbstractFile origFile, List headers, String line) throws TskCoreException { + +// String[] lineSplit = line.split(","); +// +// String url = lineSplit[headers.indexOf(EDGE_HEAD_URL)]; +// +// String rheader = lineSplit[headers.indexOf(EDGE_HEAD_RESPONSEHEAD)]; +// +// String decodedheader = this.hexToASCII(rheader); +// BlackboardArtifact bbart = origFile.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_DOWNLOAD); +// bbart.addAttributes(createDownloadAttributes(decodedheader, "Test2", null, "microsoft.com", this.getName())); +// return bbart; return null; } + /** + * Parse the comma separated row of information from the "Favorites" table + * of the spartan database. + * + * Note: The "Favorites" table does not have a "Creation Time" + * + * @param origFile File the table came from ie spartan.edb + * @param headers List of table column headers + * @param line The line or row of the table to parse + * @return BlackboardArtifact representation of the passed in line\table row + * @throws TskCoreException + */ + private BlackboardArtifact getBookmarkArtifact(AbstractFile origFile, List headers, String line) throws TskCoreException { + // split on all commas as long as they are not inbetween quotes + String[] lineSplit = line.split(IGNORE_COMMA_IN_QUOTES_REGEX, -1); + + String url = lineSplit[headers.indexOf(EDGE_HEAD_URL)]; + String title = lineSplit[headers.indexOf(EDGE_HEAD_TITLE)].replace("\"", ""); + + if (url.isEmpty()) { + return null; + } + + BlackboardArtifact bbart = origFile.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_BOOKMARK); + bbart.addAttributes(createBookmarkAttributes(url, title, null, + this.getName(), NetworkUtils.extractDomain(url))); + return bbart; + } + + /** + * Creates a list of attributes for a history artifact. + * + * @param url + * @param accessTime + * @param referrer + * @param title + * @param programName + * @param domain + * @param user + * @return + * @throws TskCoreException + */ private Collection createHistoryAttribute(String url, Long accessTime, String referrer, String title, String programName, String domain, String user) throws TskCoreException { @@ -492,35 +726,46 @@ final class ExtractEdge extends Extract { (referrer != null) ? referrer : "")); bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_TITLE, - RecentActivityExtracterModuleFactory.getModuleName(), + RecentActivityExtracterModuleFactory.getModuleName(), (title != null) ? title : "")); bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PROG_NAME, - RecentActivityExtracterModuleFactory.getModuleName(), + RecentActivityExtracterModuleFactory.getModuleName(), (programName != null) ? programName : "")); bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DOMAIN, - RecentActivityExtracterModuleFactory.getModuleName(), + RecentActivityExtracterModuleFactory.getModuleName(), (domain != null) ? domain : "")); bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_USER_NAME, - RecentActivityExtracterModuleFactory.getModuleName(), + RecentActivityExtracterModuleFactory.getModuleName(), (user != null) ? user : "")); return bbattributes; } - + + /** + * Creates a list of attributes for a cookie. + * + * @param url cookie url + * @param creationTime cookie creation time + * @param name cookie name + * @param value cookie value + * @param programName Name of the module creating the attribute + * @param domain Domain of the URL + * @return List of BlackboarAttributes for the passed in attributes + */ private Collection createCookieAttributes(String url, - Long accessTime, String name, String value, String programName, String domain) { - + Long creationTime, String name, String value, String programName, String domain) { + Collection bbattributes = new ArrayList<>(); bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_URL, RecentActivityExtracterModuleFactory.getModuleName(), (url != null) ? url : "")); - if (accessTime != null) { + if (creationTime != null) { bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DATETIME, - RecentActivityExtracterModuleFactory.getModuleName(), accessTime)); + RecentActivityExtracterModuleFactory.getModuleName(), creationTime)); } bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_NAME, @@ -541,48 +786,104 @@ final class ExtractEdge extends Extract { return bbattributes; } - - private Collection createDownloadAttributes(String path, String url, Long accessTime, String domain, String programName){ + + /** + * Creates a list of the attributes of a downloaded file + * + * @param path + * @param url URL of the downloaded file + * @param accessTime Time the download occurred + * @param domain Domain of the URL + * @param programName Name of the module creating the attribute + * @return A collection of attributed of a downloaded file + */ + private Collection createDownloadAttributes(String path, String url, Long accessTime, String domain, String programName) { Collection bbattributes = new ArrayList<>(); - + bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PATH, RecentActivityExtracterModuleFactory.getModuleName(), (path != null) ? path : "")); - + long pathID = Util.findID(dataSource, path); if (pathID != -1) { bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PATH_ID, RecentActivityExtracterModuleFactory.getModuleName(), pathID)); } - + bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_URL, RecentActivityExtracterModuleFactory.getModuleName(), (url != null) ? url : "")); - + if (accessTime != null) { bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED, RecentActivityExtracterModuleFactory.getModuleName(), accessTime)); } - + bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DOMAIN, RecentActivityExtracterModuleFactory.getModuleName(), (domain != null) ? domain : "")); - + bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PROG_NAME, RecentActivityExtracterModuleFactory.getModuleName(), (programName != null) ? programName : "")); - + return bbattributes; } - private String hexToString(String hexString) { + /** + * Creates a list of bookmark attributes from the passed in parameters. + * + * @param url Bookmark url + * @param title Title of the bookmarked page + * @param creationTime Date & time at which the bookmark was created + * @param programName Name of the module creating the attribute + * @param domain The domain of the bookmark's url + * @return A collection of bookmark attributes + */ + private Collection createBookmarkAttributes(String url, String title, Long creationTime, String programName, String domain) { + Collection bbattributes = new ArrayList<>(); + + bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_URL, + RecentActivityExtracterModuleFactory.getModuleName(), + (url != null) ? url : "")); + + bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_TITLE, + RecentActivityExtracterModuleFactory.getModuleName(), + (title != null) ? title : "")); + + if (creationTime != null) { + bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DATETIME_CREATED, + RecentActivityExtracterModuleFactory.getModuleName(), creationTime)); + } + + bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PROG_NAME, + RecentActivityExtracterModuleFactory.getModuleName(), + (programName != null) ? programName : "")); + + bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DOMAIN, + RecentActivityExtracterModuleFactory.getModuleName(), + (domain != null) ? domain : "")); + + return bbattributes; + } + + /** + * Converts a space separated string of hex values to ascii characters. + * + * @param hexString + * @return "decoded" string + */ + private String hexToChar(String hexString) { String[] hexValues = hexString.split(" "); StringBuilder output = new StringBuilder(); for (String s : hexValues) { try { - output.append((char) Integer.parseInt(s, 16)); + int i = Integer.parseInt(s, 16); + if (i > 31) { // Ignore non-print characters + output.append((char) i); + } } catch (NumberFormatException ex) { return null; } @@ -590,23 +891,31 @@ final class ExtractEdge extends Extract { return output.toString(); } - - // For cookies the RDomain is backwards ie com.microsoft this function flip - // it around for display, this function assumes a simple path with one or - // two periods - private String flipDomain(String domain){ - if(domain == null || domain.isEmpty()) + + /** + * The RDomain in the WebCacheV01.data cookies tables are backwards, this + * function corrects them. + * + * Values in the RDomain appear as either com.microsoft.www or com.microsoft + * but for some reason there could also be "junk". the length checks are + * there to weed out the "junk". + * + * @param domain + * @return Correct domain string + */ + private String flipDomain(String domain) { + if (domain == null || domain.isEmpty()) { return null; + } String[] tokens = domain.split("\\."); - if(tokens.length < 2 || tokens.length > 3){ - logger.log(Level.INFO, "Unexpected format for edge cookie domain: " + domain); - return domain; // don't know what to do, just send it back + if (tokens.length < 2 || tokens.length > 3) { + return domain; // don't know what to do, just send it back as is } StringBuilder buf = new StringBuilder(); - if(tokens.length > 2){ + if (tokens.length > 2) { buf.append(tokens[2]); buf.append("."); } @@ -616,70 +925,104 @@ final class ExtractEdge extends Extract { return buf.toString(); } - - private Hashtable> getContainerIDTable(File resultDir){ - Hashtable> table = null; - File containerFiles[] = resultDir.listFiles((dir, name) -> name.contains("Containers")); - for (File file : containerFiles) { - Scanner fileScanner; - try { - fileScanner = new Scanner(new FileInputStream(file.toString())); - } catch (FileNotFoundException ex) { - logger.log(Level.WARNING, "Unable to find the ESEDatabaseView file at " + file.getPath(), ex); //NON-NLS - continue; // If we couldn't open this file, continue to the next file - } + /** + * Returns a list the container files that have download information in + * them. + * + * @param resultDir Path to ESEDatabaseViewer output + * @return List of download table files + */ + private ArrayList getDownloadFiles(File resultDir) throws FileNotFoundException { + return getContainerFiles(resultDir, EDGE_TABLE_TYPE_DOWNLOAD); + } - try { + /** + * Returns a list the container files that have history information in them. + * + * @param resultDir Path to ESEDatabaseViewer output + * @return List of history table files + * @throws FileNotFoundException + */ + private ArrayList getHistoryFiles(File resultDir) throws FileNotFoundException { + return getContainerFiles(resultDir, EDGE_TABLE_TYPE_HISTORY); + } + + /** + * Returns a list of the containers files that are of the given type string + * + * @param resultDir Path to ESEDatabaseViewer output + * @param type Type of table files + * @return List of table files + * @throws FileNotFoundException + */ + private ArrayList getContainerFiles(File resultDir, String type) throws FileNotFoundException { + Hashtable> idTable = getContainerIDTable(resultDir); + + ArrayList idList = idTable.get(type); + if (idList == null) { + return null; + } + + ArrayList fileList = new ArrayList<>(); + for (String s : idList) { + String fileName = EDGE_CONTAINER_FILE_PREFIX + s + EDGE_CONTAINER_FILE_EXT; + fileList.add(new File(resultDir, fileName)); + } + + return fileList; + } + + /** + * Opens and reads the Containers table to create a table of information + * about which of the Continer_xx files contain which type of information. + * + * Each row of the "Containers" table describes one of the Container_xx + * files. + * + * @param resultDir Path to ESEDatabaseViewer output + * @return Hashtable with Key representing the table type, the value is a list of table ids for that type + */ + private Hashtable> getContainerIDTable(File resultDir) throws FileNotFoundException { + + if (containersTable == null) { + File containerFile = new File(resultDir, EDGE_CONTAINTERS_FILE_NAME); + + try (Scanner fileScanner = new Scanner(new FileInputStream(containerFile))) { List headers = null; - table = new Hashtable<>(); + containersTable = new Hashtable<>(); int nameIdx = 0; int idIdx = 0; while (fileScanner.hasNext()) { String line = fileScanner.nextLine(); if (headers == null) { headers = Arrays.asList(line.toLowerCase().split(",")); - nameIdx = headers.indexOf("name"); - idIdx = headers.indexOf("containerid"); - } - else{ + nameIdx = headers.indexOf(EDGE_HEAD_NAME); + idIdx = headers.indexOf(EDGE_HEAD_CONTAINER_ID); + } else { String[] row = line.split(","); String name = row[nameIdx]; String id = row[idIdx]; - - ArrayList idList = table.get(name); - if(idList == null){ + + ArrayList idList = containersTable.get(name); + if (idList == null) { idList = new ArrayList<>(); - table.put(name, idList); - } - + containersTable.put(name, idList); + } + idList.add(id); } } - } finally { - fileScanner.close(); } } - - return table; + + return containersTable; } - private ArrayList getDownloadFiles(File resultDir){ - Hashtable> idTable = getContainerIDTable(resultDir); - - ArrayList idList = idTable.get("iedownload"); - if(idList == null) - return null; - - ArrayList fileList = new ArrayList<>(); - for(String s : idList){ - String fileName = "Container_" + s; - File[] files = resultDir.listFiles((dir, name) -> name.contains(fileName)); - if(files != null){ - fileList.addAll(Arrays.asList(files)); - } - } - - return fileList; + /** + * Clears the containerTable + */ + private void clearContainerTable(){ + containersTable = null; } } From e3f68e42fc42662682a8675e97c320f50aa76ed2 Mon Sep 17 00:00:00 2001 From: Kelly Kelly Date: Thu, 21 Feb 2019 10:35:30 -0500 Subject: [PATCH 16/80] Updates some comments 1193-edge-cookies --- .../autopsy/recentactivity/ExtractEdge.java | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractEdge.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractEdge.java index f97fdd5481..8acfce9f87 100755 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractEdge.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractEdge.java @@ -698,14 +698,14 @@ final class ExtractEdge extends Extract { /** * Creates a list of attributes for a history artifact. * - * @param url - * @param accessTime - * @param referrer - * @param title - * @param programName - * @param domain - * @param user - * @return + * @param url + * @param accessTime Time url was accessed + * @param referrer referred url + * @param title title of the page + * @param programName module name + * @param domain domain of the url + * @param user user that accessed url + * @return List of BlackboardAttributes for giving attributes * @throws TskCoreException */ private Collection createHistoryAttribute(String url, Long accessTime, From 65928ba367c866282fc4a80ae4a4b9f226235491 Mon Sep 17 00:00:00 2001 From: Kelly Kelly Date: Thu, 21 Feb 2019 15:10:34 -0500 Subject: [PATCH 17/80] Support for getting history from the Safari Browser. Moved create functions from ExtractEdge to Extract for use in Extract Safari 1197-safari-history --- .../autopsy/recentactivity/Extract.java | 182 ++++++++++++++++++ .../autopsy/recentactivity/ExtractEdge.java | 176 +---------------- .../autopsy/recentactivity/ExtractSafari.java | 180 +++++++++++++++++ .../recentactivity/RAImageIngestModule.java | 3 + 4 files changed, 366 insertions(+), 175 deletions(-) create mode 100755 RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractSafari.java diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Extract.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Extract.java index 466fbb6997..f677ae8d22 100644 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Extract.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Extract.java @@ -208,4 +208,186 @@ abstract class Extract { protected void setFoundData(boolean b){ dataFound = b; } + + /** + * Returns the current case instance + * @return Current case instance + */ + protected Case getCurrentCase(){ + return this.currentCase; + } + + /** + * Creates a list of attributes for a history artifact. + * + * @param url + * @param accessTime Time url was accessed + * @param referrer referred url + * @param title title of the page + * @param programName module name + * @param domain domain of the url + * @param user user that accessed url + * @return List of BlackboardAttributes for giving attributes + * @throws TskCoreException + */ + protected Collection createHistoryAttribute(String url, Long accessTime, + String referrer, String title, String programName, String domain, String user) throws TskCoreException { + + Collection bbattributes = new ArrayList<>(); + bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_URL, + RecentActivityExtracterModuleFactory.getModuleName(), + (url != null) ? url : "")); + + if (accessTime != null) { + bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED, + RecentActivityExtracterModuleFactory.getModuleName(), accessTime)); + } + + bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_REFERRER, + RecentActivityExtracterModuleFactory.getModuleName(), + (referrer != null) ? referrer : "")); + + bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_TITLE, + RecentActivityExtracterModuleFactory.getModuleName(), + (title != null) ? title : "")); + + bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PROG_NAME, + RecentActivityExtracterModuleFactory.getModuleName(), + (programName != null) ? programName : "")); + + bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DOMAIN, + RecentActivityExtracterModuleFactory.getModuleName(), + (domain != null) ? domain : "")); + + bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_USER_NAME, + RecentActivityExtracterModuleFactory.getModuleName(), + (user != null) ? user : "")); + + return bbattributes; + } + + /** + * Creates a list of attributes for a cookie. + * + * @param url cookie url + * @param creationTime cookie creation time + * @param name cookie name + * @param value cookie value + * @param programName Name of the module creating the attribute + * @param domain Domain of the URL + * @return List of BlackboarAttributes for the passed in attributes + */ + protected Collection createCookieAttributes(String url, + Long creationTime, String name, String value, String programName, String domain) { + + Collection bbattributes = new ArrayList<>(); + bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_URL, + RecentActivityExtracterModuleFactory.getModuleName(), + (url != null) ? url : "")); + + if (creationTime != null) { + bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DATETIME, + RecentActivityExtracterModuleFactory.getModuleName(), creationTime)); + } + + bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_NAME, + RecentActivityExtracterModuleFactory.getModuleName(), + (name != null) ? name : "")); + + bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_VALUE, + RecentActivityExtracterModuleFactory.getModuleName(), + (value != null) ? value : "")); + + bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PROG_NAME, + RecentActivityExtracterModuleFactory.getModuleName(), + (programName != null) ? programName : "")); + + bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DOMAIN, + RecentActivityExtracterModuleFactory.getModuleName(), + (domain != null) ? domain : "")); + + return bbattributes; + } + + /** + * Creates a list of bookmark attributes from the passed in parameters. + * + * @param url Bookmark url + * @param title Title of the bookmarked page + * @param creationTime Date & time at which the bookmark was created + * @param programName Name of the module creating the attribute + * @param domain The domain of the bookmark's url + * @return A collection of bookmark attributes + */ + protected Collection createBookmarkAttributes(String url, String title, Long creationTime, String programName, String domain) { + Collection bbattributes = new ArrayList<>(); + + bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_URL, + RecentActivityExtracterModuleFactory.getModuleName(), + (url != null) ? url : "")); + + bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_TITLE, + RecentActivityExtracterModuleFactory.getModuleName(), + (title != null) ? title : "")); + + if (creationTime != null) { + bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DATETIME_CREATED, + RecentActivityExtracterModuleFactory.getModuleName(), creationTime)); + } + + bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PROG_NAME, + RecentActivityExtracterModuleFactory.getModuleName(), + (programName != null) ? programName : "")); + + bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DOMAIN, + RecentActivityExtracterModuleFactory.getModuleName(), + (domain != null) ? domain : "")); + + return bbattributes; + } + + /** + * Creates a list of the attributes of a downloaded file + * + * @param path + * @param url URL of the downloaded file + * @param accessTime Time the download occurred + * @param domain Domain of the URL + * @param programName Name of the module creating the attribute + * @return A collection of attributed of a downloaded file + */ + protected Collection createDownloadAttributes(String path, Long pathID, String url, Long accessTime, String domain, String programName) { + Collection bbattributes = new ArrayList<>(); + + bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PATH, + RecentActivityExtracterModuleFactory.getModuleName(), + (path != null) ? path : "")); + + if (pathID != null && pathID != -1) { + bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PATH_ID, + RecentActivityExtracterModuleFactory.getModuleName(), + pathID)); + } + + bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_URL, + RecentActivityExtracterModuleFactory.getModuleName(), + (url != null) ? url : "")); + + if (accessTime != null) { + bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED, + RecentActivityExtracterModuleFactory.getModuleName(), accessTime)); + } + + bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DOMAIN, + RecentActivityExtracterModuleFactory.getModuleName(), + (domain != null) ? domain : "")); + + bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PROG_NAME, + RecentActivityExtracterModuleFactory.getModuleName(), + (programName != null) ? programName : "")); + + return bbattributes; + } + + } diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractEdge.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractEdge.java index 8acfce9f87..a697eb0696 100755 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractEdge.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractEdge.java @@ -48,7 +48,6 @@ import org.sleuthkit.autopsy.ingest.IngestServices; import org.sleuthkit.autopsy.ingest.ModuleDataEvent; import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.BlackboardArtifact; -import org.sleuthkit.datamodel.BlackboardAttribute; import org.sleuthkit.datamodel.Content; import org.sleuthkit.datamodel.TskCoreException; @@ -694,180 +693,7 @@ final class ExtractEdge extends Extract { this.getName(), NetworkUtils.extractDomain(url))); return bbart; } - - /** - * Creates a list of attributes for a history artifact. - * - * @param url - * @param accessTime Time url was accessed - * @param referrer referred url - * @param title title of the page - * @param programName module name - * @param domain domain of the url - * @param user user that accessed url - * @return List of BlackboardAttributes for giving attributes - * @throws TskCoreException - */ - private Collection createHistoryAttribute(String url, Long accessTime, - String referrer, String title, String programName, String domain, String user) throws TskCoreException { - - Collection bbattributes = new ArrayList<>(); - bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_URL, - RecentActivityExtracterModuleFactory.getModuleName(), - (url != null) ? url : "")); - - if (accessTime != null) { - bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED, - RecentActivityExtracterModuleFactory.getModuleName(), accessTime)); - } - - bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_REFERRER, - RecentActivityExtracterModuleFactory.getModuleName(), - (referrer != null) ? referrer : "")); - - bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_TITLE, - RecentActivityExtracterModuleFactory.getModuleName(), - (title != null) ? title : "")); - - bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PROG_NAME, - RecentActivityExtracterModuleFactory.getModuleName(), - (programName != null) ? programName : "")); - - bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DOMAIN, - RecentActivityExtracterModuleFactory.getModuleName(), - (domain != null) ? domain : "")); - - bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_USER_NAME, - RecentActivityExtracterModuleFactory.getModuleName(), - (user != null) ? user : "")); - - return bbattributes; - } - - /** - * Creates a list of attributes for a cookie. - * - * @param url cookie url - * @param creationTime cookie creation time - * @param name cookie name - * @param value cookie value - * @param programName Name of the module creating the attribute - * @param domain Domain of the URL - * @return List of BlackboarAttributes for the passed in attributes - */ - private Collection createCookieAttributes(String url, - Long creationTime, String name, String value, String programName, String domain) { - - Collection bbattributes = new ArrayList<>(); - bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_URL, - RecentActivityExtracterModuleFactory.getModuleName(), - (url != null) ? url : "")); - - if (creationTime != null) { - bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DATETIME, - RecentActivityExtracterModuleFactory.getModuleName(), creationTime)); - } - - bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_NAME, - RecentActivityExtracterModuleFactory.getModuleName(), - (name != null) ? name : "")); - - bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_VALUE, - RecentActivityExtracterModuleFactory.getModuleName(), - (value != null) ? value : "")); - - bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PROG_NAME, - RecentActivityExtracterModuleFactory.getModuleName(), - (programName != null) ? programName : "")); - - bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DOMAIN, - RecentActivityExtracterModuleFactory.getModuleName(), - (domain != null) ? domain : "")); - - return bbattributes; - } - - /** - * Creates a list of the attributes of a downloaded file - * - * @param path - * @param url URL of the downloaded file - * @param accessTime Time the download occurred - * @param domain Domain of the URL - * @param programName Name of the module creating the attribute - * @return A collection of attributed of a downloaded file - */ - private Collection createDownloadAttributes(String path, String url, Long accessTime, String domain, String programName) { - Collection bbattributes = new ArrayList<>(); - - bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PATH, - RecentActivityExtracterModuleFactory.getModuleName(), - (path != null) ? path : "")); - - long pathID = Util.findID(dataSource, path); - if (pathID != -1) { - bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PATH_ID, - RecentActivityExtracterModuleFactory.getModuleName(), - pathID)); - } - - bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_URL, - RecentActivityExtracterModuleFactory.getModuleName(), - (url != null) ? url : "")); - - if (accessTime != null) { - bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED, - RecentActivityExtracterModuleFactory.getModuleName(), accessTime)); - } - - bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DOMAIN, - RecentActivityExtracterModuleFactory.getModuleName(), - (domain != null) ? domain : "")); - - bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PROG_NAME, - RecentActivityExtracterModuleFactory.getModuleName(), - (programName != null) ? programName : "")); - - return bbattributes; - } - - /** - * Creates a list of bookmark attributes from the passed in parameters. - * - * @param url Bookmark url - * @param title Title of the bookmarked page - * @param creationTime Date & time at which the bookmark was created - * @param programName Name of the module creating the attribute - * @param domain The domain of the bookmark's url - * @return A collection of bookmark attributes - */ - private Collection createBookmarkAttributes(String url, String title, Long creationTime, String programName, String domain) { - Collection bbattributes = new ArrayList<>(); - - bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_URL, - RecentActivityExtracterModuleFactory.getModuleName(), - (url != null) ? url : "")); - - bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_TITLE, - RecentActivityExtracterModuleFactory.getModuleName(), - (title != null) ? title : "")); - - if (creationTime != null) { - bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DATETIME_CREATED, - RecentActivityExtracterModuleFactory.getModuleName(), creationTime)); - } - - bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PROG_NAME, - RecentActivityExtracterModuleFactory.getModuleName(), - (programName != null) ? programName : "")); - - bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DOMAIN, - RecentActivityExtracterModuleFactory.getModuleName(), - (domain != null) ? domain : "")); - - return bbattributes; - } - + /** * Converts a space separated string of hex values to ascii characters. * diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractSafari.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractSafari.java new file mode 100755 index 0000000000..53d766e54f --- /dev/null +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractSafari.java @@ -0,0 +1,180 @@ +/* + * + * Autopsy Forensic Browser + * + * Copyright 2019 Basis Technology Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.recentactivity; + +import java.io.File; +import java.io.IOException; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashMap; +import java.util.List; +import java.util.logging.Level; +import org.openide.util.NbBundle.Messages; +import org.sleuthkit.autopsy.casemodule.services.FileManager; +import org.sleuthkit.autopsy.coreutils.Logger; +import org.sleuthkit.autopsy.coreutils.NetworkUtils; +import org.sleuthkit.autopsy.datamodel.ContentUtils; +import org.sleuthkit.autopsy.ingest.IngestJobContext; +import org.sleuthkit.autopsy.ingest.IngestServices; +import org.sleuthkit.autopsy.ingest.ModuleDataEvent; +import org.sleuthkit.datamodel.AbstractFile; +import org.sleuthkit.datamodel.BlackboardArtifact; +import org.sleuthkit.datamodel.Content; +import org.sleuthkit.datamodel.TskCoreException; + +/** + * Extract the bookmarks, cookies, downloads and history from Safari + * + */ +final class ExtractSafari extends Extract{ + + private final IngestServices services = IngestServices.getInstance(); + + // visit_time uses an epoch of Jan 1, 2001 thus the addition of 978307200 + private static final String SAFARI_HISTORY_QUERY = "SELECT url, title, visit_time + 978307200 as time FROM 'history_items' JOIN history_visits ON history_item = history_items.id;"; + + private static final String SAFARI_HISTORY_FILE_NAME = "History.db"; + private static final String SAFARI_DATABASE_EXT = ".db"; + + private static final String SAFARI_HEAD_URL = "url"; + private static final String SAFARI_HEAD_TITLE = "title"; + private static final String SAFARI_HEAD_TIME = "time"; + + private final Logger logger = Logger.getLogger(this.getClass().getName()); + + @Messages({ + "ExtractSafari_Module_Name=Safari", + "ExtractSafari_Error_Getting_History=An error occurred while processing Safari history files." + }) + + ExtractSafari(){ + + } + + @Override + protected String getName() { + return Bundle.ExtractSafari_Module_Name(); + } + + @Override + void process(Content dataSource, IngestJobContext context) { + setFoundData(false); + + try{ + processHistoryDB(dataSource, context); + }catch(IOException | TskCoreException ex){ + this.addErrorMessage(Bundle.ExtractSafari_Error_Getting_History()); + logger.log(Level.SEVERE, "Exception thrown while processing history file: " + ex); + } + } + + /** + * Finds the all of the history.db files in the case looping through them to + * find all of the history artifacts + * + * @throws TskCoreException + * @throws IOException + */ + private void processHistoryDB(Content dataSource, IngestJobContext context)throws TskCoreException, IOException{ + FileManager fileManager = getCurrentCase().getServices().getFileManager(); + + List historyFiles = fileManager.findFiles(dataSource, SAFARI_HISTORY_FILE_NAME); + + if(historyFiles == null || historyFiles.isEmpty()){ + return; + } + + this.setFoundData(true); + + for (AbstractFile historyFile : historyFiles) { + if (context.dataSourceIngestIsCancelled()) { + break; + } + + getHistory(context, historyFile); + } + } + + /** + * Creates a temporary copy of historyFile and creates a list of + * BlackboardArtifacts for the history information in the file. + * + * @param historyFile AbstractFile version of the history file from the case + * @throws TskCoreException + * @throws IOException + */ + private void getHistory(IngestJobContext context, AbstractFile historyFile) throws TskCoreException, IOException{ + if(historyFile.getSize() == 0) + return; + + Path tempHistoryPath = Paths.get(RAImageIngestModule.getRATempPath( + getCurrentCase(), getName()), historyFile.getName() + historyFile.getId() + SAFARI_DATABASE_EXT); + File tempHistoryFile = tempHistoryPath.toFile(); + + try{ + ContentUtils.writeToFile(historyFile, tempHistoryFile, context::dataSourceIngestIsCancelled); + } catch(IOException ex){ + throw new IOException("Error writingToFile: " + historyFile, ex); //NON-NLS + } + + try{ + Collection bbartifacts = getHistoryArtifacts(historyFile, tempHistoryPath); + if(!bbartifacts.isEmpty()){ + services.fireModuleDataEvent(new ModuleDataEvent( + RecentActivityExtracterModuleFactory.getModuleName(), + BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_HISTORY, bbartifacts)); + } + } finally{ + tempHistoryFile.delete(); + } + } + + /** + * Queries the history db for the history information creating a list of + * BlackBoardArtifact for each row returned from the db. + * + * @param origFile AbstractFile of the history file from the case + * @param tempFilePath Path to temporary copy of the history db + * @return Blackboard Artifacts for the history db + * @throws TskCoreException + */ + private Collection getHistoryArtifacts(AbstractFile origFile, Path tempFilePath) throws TskCoreException{ + List> historyList = this.dbConnect(tempFilePath.toString(), SAFARI_HISTORY_QUERY); + + if(historyList == null || historyList.isEmpty()) + return null; + + Collection bbartifacts = new ArrayList<>(); + for (HashMap row : historyList) { + String url = row.get(SAFARI_HEAD_URL).toString(); + String title = row.get(SAFARI_HEAD_TITLE).toString(); + Long time = (Double.valueOf(row.get(SAFARI_HEAD_TIME).toString())).longValue(); + + BlackboardArtifact bbart = origFile.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_HISTORY); + bbart.addAttributes(createHistoryAttribute(url, time, null, title, + this.getName(), NetworkUtils.extractDomain(url), null)); + bbartifacts.add(bbart); + } + + return bbartifacts; + } +} + diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/RAImageIngestModule.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/RAImageIngestModule.java index 7e959660f4..434cf968de 100644 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/RAImageIngestModule.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/RAImageIngestModule.java @@ -74,11 +74,13 @@ public final class RAImageIngestModule implements DataSourceIngestModule { Extract SEUQA = new SearchEngineURLQueryAnalyzer(); Extract osExtract = new ExtractOs(); Extract dataSourceAnalyzer = new DataSourceUsageAnalyzer(); + Extract safari = new ExtractSafari(); extractors.add(chrome); extractors.add(firefox); extractors.add(iexplore); extractors.add(edge); + extractors.add(safari); extractors.add(recentDocuments); extractors.add(SEUQA); // this needs to run after the web browser modules extractors.add(registry); // this should run after quicker modules like the browser modules and needs to run before the DataSourceUsageAnalyzer @@ -89,6 +91,7 @@ public final class RAImageIngestModule implements DataSourceIngestModule { browserExtractors.add(firefox); browserExtractors.add(iexplore); browserExtractors.add(edge); + browserExtractors.add(safari); for (Extract extractor : extractors) { extractor.init(); From 2926bfd634ccf437247d2179b4512f6ab7c56811 Mon Sep 17 00:00:00 2001 From: Kelly Kelly Date: Fri, 22 Feb 2019 08:53:28 -0500 Subject: [PATCH 18/80] clean added comments 1197-safari-history --- .../src/org/sleuthkit/autopsy/recentactivity/ExtractSafari.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractSafari.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractSafari.java index 53d766e54f..6c78c1ca41 100755 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractSafari.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractSafari.java @@ -82,7 +82,7 @@ final class ExtractSafari extends Extract{ processHistoryDB(dataSource, context); }catch(IOException | TskCoreException ex){ this.addErrorMessage(Bundle.ExtractSafari_Error_Getting_History()); - logger.log(Level.SEVERE, "Exception thrown while processing history file: " + ex); + logger.log(Level.SEVERE, "Exception thrown while processing history file: " + ex); //NON-NLS } } From ba04c8ad93426f2f6a2cc676aa5d1333714bcb69 Mon Sep 17 00:00:00 2001 From: Kelly Kelly Date: Fri, 22 Feb 2019 10:18:13 -0500 Subject: [PATCH 19/80] Forgot to do ALT-SHIFT-F before pushing. 1197-safari-history --- .../autopsy/recentactivity/ExtractSafari.java | 101 +++++++++--------- 1 file changed, 51 insertions(+), 50 deletions(-) diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractSafari.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractSafari.java index 6c78c1ca41..b341b79c6f 100755 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractSafari.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractSafari.java @@ -42,31 +42,31 @@ import org.sleuthkit.datamodel.TskCoreException; /** * Extract the bookmarks, cookies, downloads and history from Safari - * + * */ -final class ExtractSafari extends Extract{ - +final class ExtractSafari extends Extract { + private final IngestServices services = IngestServices.getInstance(); - + // visit_time uses an epoch of Jan 1, 2001 thus the addition of 978307200 private static final String SAFARI_HISTORY_QUERY = "SELECT url, title, visit_time + 978307200 as time FROM 'history_items' JOIN history_visits ON history_item = history_items.id;"; - + private static final String SAFARI_HISTORY_FILE_NAME = "History.db"; private static final String SAFARI_DATABASE_EXT = ".db"; - + private static final String SAFARI_HEAD_URL = "url"; private static final String SAFARI_HEAD_TITLE = "title"; private static final String SAFARI_HEAD_TIME = "time"; - + private final Logger logger = Logger.getLogger(this.getClass().getName()); @Messages({ "ExtractSafari_Module_Name=Safari", "ExtractSafari_Error_Getting_History=An error occurred while processing Safari history files." }) - - ExtractSafari(){ - + + ExtractSafari() { + } @Override @@ -77,91 +77,93 @@ final class ExtractSafari extends Extract{ @Override void process(Content dataSource, IngestJobContext context) { setFoundData(false); - - try{ + + try { processHistoryDB(dataSource, context); - }catch(IOException | TskCoreException ex){ + } catch (IOException | TskCoreException ex) { this.addErrorMessage(Bundle.ExtractSafari_Error_Getting_History()); logger.log(Level.SEVERE, "Exception thrown while processing history file: " + ex); //NON-NLS } } - + /** * Finds the all of the history.db files in the case looping through them to * find all of the history artifacts - * + * * @throws TskCoreException - * @throws IOException + * @throws IOException */ - private void processHistoryDB(Content dataSource, IngestJobContext context)throws TskCoreException, IOException{ - FileManager fileManager = getCurrentCase().getServices().getFileManager(); - + private void processHistoryDB(Content dataSource, IngestJobContext context) throws TskCoreException, IOException { + FileManager fileManager = getCurrentCase().getServices().getFileManager(); + List historyFiles = fileManager.findFiles(dataSource, SAFARI_HISTORY_FILE_NAME); - - if(historyFiles == null || historyFiles.isEmpty()){ + + if (historyFiles == null || historyFiles.isEmpty()) { return; } - + this.setFoundData(true); - + for (AbstractFile historyFile : historyFiles) { if (context.dataSourceIngestIsCancelled()) { break; } - + getHistory(context, historyFile); } } - + /** - * Creates a temporary copy of historyFile and creates a list of + * Creates a temporary copy of historyFile and creates a list of * BlackboardArtifacts for the history information in the file. - * + * * @param historyFile AbstractFile version of the history file from the case * @throws TskCoreException - * @throws IOException + * @throws IOException */ - private void getHistory(IngestJobContext context, AbstractFile historyFile) throws TskCoreException, IOException{ - if(historyFile.getSize() == 0) + private void getHistory(IngestJobContext context, AbstractFile historyFile) throws TskCoreException, IOException { + if (historyFile.getSize() == 0) { return; - + } + Path tempHistoryPath = Paths.get(RAImageIngestModule.getRATempPath( getCurrentCase(), getName()), historyFile.getName() + historyFile.getId() + SAFARI_DATABASE_EXT); File tempHistoryFile = tempHistoryPath.toFile(); - - try{ + + try { ContentUtils.writeToFile(historyFile, tempHistoryFile, context::dataSourceIngestIsCancelled); - } catch(IOException ex){ + } catch (IOException ex) { throw new IOException("Error writingToFile: " + historyFile, ex); //NON-NLS } - - try{ + + try { Collection bbartifacts = getHistoryArtifacts(historyFile, tempHistoryPath); - if(!bbartifacts.isEmpty()){ - services.fireModuleDataEvent(new ModuleDataEvent( + if (!bbartifacts.isEmpty()) { + services.fireModuleDataEvent(new ModuleDataEvent( RecentActivityExtracterModuleFactory.getModuleName(), BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_HISTORY, bbartifacts)); } - } finally{ + } finally { tempHistoryFile.delete(); } } - + /** - * Queries the history db for the history information creating a list of + * Queries the history db for the history information creating a list of * BlackBoardArtifact for each row returned from the db. - * + * * @param origFile AbstractFile of the history file from the case * @param tempFilePath Path to temporary copy of the history db * @return Blackboard Artifacts for the history db - * @throws TskCoreException + * @throws TskCoreException */ - private Collection getHistoryArtifacts(AbstractFile origFile, Path tempFilePath) throws TskCoreException{ + private Collection getHistoryArtifacts(AbstractFile origFile, Path tempFilePath) throws TskCoreException { List> historyList = this.dbConnect(tempFilePath.toString(), SAFARI_HISTORY_QUERY); - - if(historyList == null || historyList.isEmpty()) + + if (historyList == null || historyList.isEmpty()) { return null; - + } + Collection bbartifacts = new ArrayList<>(); for (HashMap row : historyList) { String url = row.get(SAFARI_HEAD_URL).toString(); @@ -169,12 +171,11 @@ final class ExtractSafari extends Extract{ Long time = (Double.valueOf(row.get(SAFARI_HEAD_TIME).toString())).longValue(); BlackboardArtifact bbart = origFile.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_HISTORY); - bbart.addAttributes(createHistoryAttribute(url, time, null, title, + bbart.addAttributes(createHistoryAttribute(url, time, null, title, this.getName(), NetworkUtils.extractDomain(url), null)); bbartifacts.add(bbart); } - + return bbartifacts; } } - From a71fa44bf7c9ee76f036cd7f5df5a1c9f7e967d1 Mon Sep 17 00:00:00 2001 From: Kelly Kelly Date: Fri, 22 Feb 2019 11:49:07 -0500 Subject: [PATCH 20/80] moving plist jar from core to corelib, had build errors --- Core/ivy.xml | 1 - Core/nbproject/project.properties | 1 - Core/nbproject/project.xml | 4 ---- CoreLibs/ivy.xml | 2 ++ CoreLibs/nbproject/project.properties | 1 + CoreLibs/nbproject/project.xml | 20 ++++++++++++-------- 6 files changed, 15 insertions(+), 14 deletions(-) diff --git a/Core/ivy.xml b/Core/ivy.xml index 0088cc75ba..a45d09bf8b 100644 --- a/Core/ivy.xml +++ b/Core/ivy.xml @@ -30,7 +30,6 @@ - diff --git a/Core/nbproject/project.properties b/Core/nbproject/project.properties index 58622cf7a5..27838ea554 100644 --- a/Core/nbproject/project.properties +++ b/Core/nbproject/project.properties @@ -12,7 +12,6 @@ file.reference.commons-compress-1.14.jar=release/modules/ext/commons-compress-1. file.reference.commons-dbcp2-2.1.1.jar=release/modules/ext/commons-dbcp2-2.1.1.jar file.reference.commons-io-2.5.jar=release/modules/ext/commons-io-2.5.jar file.reference.commons-pool2-2.4.2.jar=release/modules/ext/commons-pool2-2.4.2.jar -file.reference.dd-plist-1.20.jar=release/modules/ext/dd-plist-1.20.jar file.reference.geoapi-3.0.0.jar=release/modules/ext/geoapi-3.0.0.jar file.reference.grib-4.5.5.jar=release/modules/ext/grib-4.5.5.jar file.reference.httpservices-4.5.5.jar=release/modules/ext/httpservices-4.5.5.jar diff --git a/Core/nbproject/project.xml b/Core/nbproject/project.xml index 802dcfbd43..bbb0693d50 100644 --- a/Core/nbproject/project.xml +++ b/Core/nbproject/project.xml @@ -627,10 +627,6 @@ ext/Rejistry-1.0-SNAPSHOT.jar release/modules/ext/Rejistry-1.0-SNAPSHOT.jar - - ext/dd-plist-1.20.jar - release/modules/ext/dd-plist-1.20.jar - ext/rome-1.5.1.jar release/modules/ext/rome-1.5.1.jar diff --git a/CoreLibs/ivy.xml b/CoreLibs/ivy.xml index 196fcc36ee..4970130ff5 100644 --- a/CoreLibs/ivy.xml +++ b/CoreLibs/ivy.xml @@ -67,5 +67,7 @@ + + diff --git a/CoreLibs/nbproject/project.properties b/CoreLibs/nbproject/project.properties index fee5235915..037c0b3e1d 100644 --- a/CoreLibs/nbproject/project.properties +++ b/CoreLibs/nbproject/project.properties @@ -73,6 +73,7 @@ file.reference.slf4j-simple-1.6.1.jar=release/modules/ext/slf4j-simple-1.6.1.jar file.reference.stax-api-1.0.1.jar=release/modules/ext/stax-api-1.0.1.jar file.reference.xml-apis-1.0.b2.jar=release/modules/ext/xml-apis-1.0.b2.jar file.reference.xmlbeans-2.6.0.jar=release/modules/ext/xmlbeans-2.6.0.jar +file.reference.dd-plist-1.20.jar=release/modules/ext/dd-plist-1.20.jar javac.source=1.8 javac.compilerargs=-Xlint -Xlint:-serial javadoc.reference.commons-csv-1.4.jar=release/modules/ext/commons-csv-1.4-javadoc.jar diff --git a/CoreLibs/nbproject/project.xml b/CoreLibs/nbproject/project.xml index bb34c3281e..6ea61cd742 100644 --- a/CoreLibs/nbproject/project.xml +++ b/CoreLibs/nbproject/project.xml @@ -742,18 +742,10 @@ ext/jna-3.4.0.jar release/modules/ext/jna-3.4.0.jar - - ext/gson-2.8.1.jar - release/modules/ext/gson-2.8.1.jar - ext/jfxtras-common-8.0-r4.jar release/modules/ext/jfxtras-common-8.0-r4.jar - - ext/opencv-248.jar - release/modules/ext/opencv-248.jar - ext/jsr305-1.3.9.jar release/modules/ext/jsr305-1.3.9.jar @@ -874,6 +866,14 @@ ext/commons-codec-1.10.jar release/modules/ext/commons-codec-1.10.jar + + ext/gson-2.8.1.jar + release/modules/ext/gson-2.8.1.jar + + + ext/opencv-248.jar + release/modules/ext/opencv-248.jar + ext/slf4j-simple-1.6.1.jar release/modules/ext/slf4j-simple-1.6.1.jar @@ -970,6 +970,10 @@ ext/gstreamer-java-1.5.jar release/modules/ext/gstreamer-java-1.5.jar + + ext/dd-plist-1.20.jar + C:\Users\kelly\Workspace\autopsy\CoreLibs\release\modules\ext\dd-plist-1.20.jar + ext/dom4j-1.6.1.jar release/modules/ext/dom4j-1.6.1.jar From 5e2190a8cf006315edc49923fae1902bd1453914 Mon Sep 17 00:00:00 2001 From: Kelly Kelly Date: Fri, 22 Feb 2019 12:51:48 -0500 Subject: [PATCH 21/80] Set the pList jar to be exported by corelib --- CoreLibs/nbproject/project.xml | 1 + 1 file changed, 1 insertion(+) diff --git a/CoreLibs/nbproject/project.xml b/CoreLibs/nbproject/project.xml index 6ea61cd742..33c3e28eeb 100644 --- a/CoreLibs/nbproject/project.xml +++ b/CoreLibs/nbproject/project.xml @@ -37,6 +37,7 @@ com.apple.eawt com.apple.eawt.event com.apple.eio + com.dd.plist com.github.lgooddatepicker.components com.github.lgooddatepicker.optionalusertools com.github.lgooddatepicker.zinternaltools From 13b0fe2c74d03889f87603522f1f6c4e8a93dc90 Mon Sep 17 00:00:00 2001 From: William Schaefer Date: Fri, 22 Feb 2019 16:10:58 -0500 Subject: [PATCH 22/80] 4757 changes to allow results by case search to perform quicker and hide duplicate results --- .../CommonAttributeCaseSearchResults.java | 4 +- .../CommonAttributeCountSearchResults.java | 4 +- .../CommonAttributeValueList.java | 10 +++ .../InstanceDataSourceNode.java | 2 +- .../InterCaseSearchResultsProcessor.java | 69 +++++++++++-------- 5 files changed, 54 insertions(+), 35 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/commonpropertiessearch/CommonAttributeCaseSearchResults.java b/Core/src/org/sleuthkit/autopsy/commonpropertiessearch/CommonAttributeCaseSearchResults.java index 0b61923044..1bd94f4a8a 100644 --- a/Core/src/org/sleuthkit/autopsy/commonpropertiessearch/CommonAttributeCaseSearchResults.java +++ b/Core/src/org/sleuthkit/autopsy/commonpropertiessearch/CommonAttributeCaseSearchResults.java @@ -173,7 +173,7 @@ final public class CommonAttributeCaseSearchResults { Map valuesToKeep = new HashMap<>(); Set valuesToRemove = new HashSet<>(); for (Entry mapOfValueLists : Collections.unmodifiableMap(dataSourceToValueList).entrySet()) { - for (CommonAttributeValue value : mapOfValueLists.getValue().getDelayedMetadataList()) { + for (CommonAttributeValue value : mapOfValueLists.getValue().getDelayedMetadataSet()) { if (valuesToRemove.contains(value.getValue())) { //do nothing this value will not be added } else if (filterValue(attributeType, value, maximumPercentageThreshold, uniqueCaseDataSourceTuples, mimeTypesToFilterOn)) { @@ -202,7 +202,7 @@ final public class CommonAttributeCaseSearchResults { private Map createTreeForCase(Map valuesToKeepCurrentCase, Map dataSourceToValueList) throws EamDbException { Map treeForCase = new HashMap<>(); for (Entry mapOfValueLists : Collections.unmodifiableMap(dataSourceToValueList).entrySet()) { - for (CommonAttributeValue value : mapOfValueLists.getValue().getDelayedMetadataList()) { + for (CommonAttributeValue value : mapOfValueLists.getValue().getDelayedMetadataSet()) { if (valuesToKeepCurrentCase.containsKey(value.getValue())) { if (!treeForCase.containsKey(mapOfValueLists.getKey())) { treeForCase.put(mapOfValueLists.getKey(), new CommonAttributeValueList()); diff --git a/Core/src/org/sleuthkit/autopsy/commonpropertiessearch/CommonAttributeCountSearchResults.java b/Core/src/org/sleuthkit/autopsy/commonpropertiessearch/CommonAttributeCountSearchResults.java index 20751264b7..15de0dd09a 100644 --- a/Core/src/org/sleuthkit/autopsy/commonpropertiessearch/CommonAttributeCountSearchResults.java +++ b/Core/src/org/sleuthkit/autopsy/commonpropertiessearch/CommonAttributeCountSearchResults.java @@ -152,7 +152,7 @@ final public class CommonAttributeCountSearchResults { final Integer key = listOfValues.getKey(); final CommonAttributeValueList values = listOfValues.getValue(); - for (CommonAttributeValue value : values.getDelayedMetadataList()) { // Need the real metadata + for (CommonAttributeValue value : values.getDelayedMetadataSet()) { // Need the real metadata //Intracase common attribute searches will have been created with an empty mimeTypesToInclude list //because when performing intra case search this filtering will have been done during the query of the case database @@ -209,7 +209,7 @@ final public class CommonAttributeCountSearchResults { final CommonAttributeValueList instanceCountValue = this.instanceCountToAttributeValues.get(key); if (instanceCountValue != null) { instanceCountValue.removeMetaData(value); - if (instanceCountValue.getDelayedMetadataList().isEmpty()) { // Check the real metadata + if (instanceCountValue.getDelayedMetadataSet().isEmpty()) { // Check the real metadata this.instanceCountToAttributeValues.remove(key); } } diff --git a/Core/src/org/sleuthkit/autopsy/commonpropertiessearch/CommonAttributeValueList.java b/Core/src/org/sleuthkit/autopsy/commonpropertiessearch/CommonAttributeValueList.java index 513196ed98..50f29dff13 100644 --- a/Core/src/org/sleuthkit/autopsy/commonpropertiessearch/CommonAttributeValueList.java +++ b/Core/src/org/sleuthkit/autopsy/commonpropertiessearch/CommonAttributeValueList.java @@ -21,7 +21,9 @@ package org.sleuthkit.autopsy.commonpropertiessearch; import java.util.ArrayList; import java.util.Collections; +import java.util.HashSet; import java.util.List; +import java.util.Set; /** * Utility and wrapper model around data required for Common Files Search @@ -67,6 +69,10 @@ final public class CommonAttributeValueList { return Collections.unmodifiableList(this.metadataList); } + public Set getMetadataSet() { + return new HashSet<>(this.metadataList); + } + /** * Get the delayed list of value nodes. Only use for * determining how many CommonAttributeValues @@ -77,6 +83,10 @@ final public class CommonAttributeValueList { return Collections.unmodifiableList(this.delayedMetadataList); } + Set getDelayedMetadataSet() { + return new HashSet<>(this.delayedMetadataList); + } + void removeMetaData(CommonAttributeValue commonVal) { this.delayedMetadataList.remove(commonVal); } diff --git a/Core/src/org/sleuthkit/autopsy/commonpropertiessearch/InstanceDataSourceNode.java b/Core/src/org/sleuthkit/autopsy/commonpropertiessearch/InstanceDataSourceNode.java index 8de44c415e..28c08e3c78 100644 --- a/Core/src/org/sleuthkit/autopsy/commonpropertiessearch/InstanceDataSourceNode.java +++ b/Core/src/org/sleuthkit/autopsy/commonpropertiessearch/InstanceDataSourceNode.java @@ -123,7 +123,7 @@ public final class InstanceDataSourceNode extends DisplayableItemNode { @Override protected boolean createKeys(List list) { - for (CommonAttributeValue value : descendants.getDelayedMetadataList()) { + for (CommonAttributeValue value : descendants.getDelayedMetadataSet()) { // This is a bit of a hack to ensure that the AbstractFile instance // has been created before createNodesForKey() is called. Constructing // the AbstractFile in createNodesForKey() was resulting in UI lockups. diff --git a/Core/src/org/sleuthkit/autopsy/commonpropertiessearch/InterCaseSearchResultsProcessor.java b/Core/src/org/sleuthkit/autopsy/commonpropertiessearch/InterCaseSearchResultsProcessor.java index 2c54ad65a4..788938df5b 100644 --- a/Core/src/org/sleuthkit/autopsy/commonpropertiessearch/InterCaseSearchResultsProcessor.java +++ b/Core/src/org/sleuthkit/autopsy/commonpropertiessearch/InterCaseSearchResultsProcessor.java @@ -22,8 +22,12 @@ import java.sql.ResultSet; import java.sql.SQLException; import java.util.Collections; import java.util.HashMap; +import java.util.HashSet; +import java.util.Set; +import java.util.List; import java.util.Map; import java.util.logging.Level; +import org.openide.util.Exceptions; import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationAttributeInstance; import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationAttributeInstance.Type; @@ -76,15 +80,7 @@ final class InterCaseSearchResultsProcessor { } private String getInterCaseWhereClause() { - String tableName = EamDbUtil.correlationTypeToInstanceTableName(correlationType); - StringBuilder sqlString = new StringBuilder(250); - sqlString.append("value IN (SELECT value FROM ") - .append(tableName) - .append(" WHERE value IN (SELECT value FROM ") - .append(tableName) - .append(" WHERE case_id=%s AND (known_status !=%s OR known_status IS NULL) GROUP BY value)") - .append(" GROUP BY value HAVING COUNT(DISTINCT case_id) > 1) ORDER BY value"); - return sqlString.toString(); + return "case_id=%s AND (known_status !=%s OR known_status IS NULL)"; } private String getSingleInterCaseWhereClause() { @@ -322,36 +318,49 @@ final class InterCaseSearchResultsProcessor { @Override public void process(ResultSet resultSet) { try { + Set values = new HashSet<>(); while (resultSet.next()) { - int resultId = InstanceTableCallback.getId(resultSet); String corValue = InstanceTableCallback.getValue(resultSet); if (corValue == null || HashUtility.isNoDataMd5(corValue)) { continue; } - CorrelationCase correlationCase = EamDb.getInstance().getCaseById(InstanceTableCallback.getCaseId(resultSet)); - String caseName = correlationCase.getDisplayName(); - CorrelationDataSource correlationDatasource = EamDb.getInstance().getDataSourceById(correlationCase, InstanceTableCallback.getDataSourceId(resultSet)); - //label datasource with it's id for uniqueness done in same manner as ImageGallery does in the DataSourceCell class - String dataSourceNameKey = correlationDatasource.getName() + " (Id: " + correlationDatasource.getDataSourceObjectID() + ")"; - if (!caseCollatedDataSourceCollections.containsKey(caseName)) { - caseCollatedDataSourceCollections.put(caseName, new HashMap()); + values.add(corValue); + } + for (String corValue : values){ + //select * from _instances where value=corValue && case_id!=caseId + List instances = EamDb.getInstance().getArtifactInstancesByTypeValue(correlationType, corValue); + + if (instances.size() > 1) { + for (CorrelationAttributeInstance instance : instances) { + CorrelationCase correlationCase = instance.getCorrelationCase(); + String caseName = correlationCase.getDisplayName(); + CorrelationDataSource correlationDatasource = instance.getCorrelationDataSource(); + //label datasource with it's id for uniqueness done in same manner as ImageGallery does in the DataSourceCell class + String dataSourceNameKey = correlationDatasource.getName() + " (Id: " + correlationDatasource.getDataSourceObjectID() + ")"; + if (!caseCollatedDataSourceCollections.containsKey(caseName)) { + caseCollatedDataSourceCollections.put(caseName, new HashMap()); + } + Map dataSourceToFile = caseCollatedDataSourceCollections.get(caseName); + if (!dataSourceToFile.containsKey(dataSourceNameKey)) { + dataSourceToFile.put(dataSourceNameKey, new CommonAttributeValueList()); + } + CommonAttributeValueList valueList = dataSourceToFile.get(dataSourceNameKey); + CentralRepoCommonAttributeInstance searchResult = new CentralRepoCommonAttributeInstance(instance.getID(), correlationType, NODE_TYPE.CASE_NODE); + searchResult.setCurrentAttributeInst(instance); + CommonAttributeValue commonAttributeValue = new CommonAttributeValue(corValue); + commonAttributeValue.addInstance(searchResult); + valueList.addMetadataToList(commonAttributeValue); + dataSourceToFile.put(dataSourceNameKey, valueList); + caseCollatedDataSourceCollections.put(caseName, dataSourceToFile); + } +// } } - Map dataSourceToFile = caseCollatedDataSourceCollections.get(caseName); - if (!dataSourceToFile.containsKey(dataSourceNameKey)) { - dataSourceToFile.put(dataSourceNameKey, new CommonAttributeValueList()); - } - CommonAttributeValueList valueList = dataSourceToFile.get(dataSourceNameKey); - CentralRepoCommonAttributeInstance searchResult = new CentralRepoCommonAttributeInstance(resultId, correlationType, NODE_TYPE.CASE_NODE); - CorrelationAttributeInstance corrAttr = findSingleCorrelationAttribute(resultId); - searchResult.setCurrentAttributeInst(corrAttr); - CommonAttributeValue commonAttributeValue = new CommonAttributeValue(corValue); - commonAttributeValue.addInstance(searchResult); - valueList.addMetadataToList(commonAttributeValue); - dataSourceToFile.put(dataSourceNameKey, valueList); - caseCollatedDataSourceCollections.put(caseName, dataSourceToFile); + } } catch (EamDbException | SQLException ex) { LOGGER.log(Level.WARNING, "Error getting artifact instances from database.", ex); // NON-NLS + } catch (CorrelationAttributeNormalizationException ex) { + Exceptions.printStackTrace(ex); } } From d606d681896370b5fa9986dc7e6f2ce517ee2a41 Mon Sep 17 00:00:00 2001 From: William Schaefer Date: Fri, 22 Feb 2019 17:16:37 -0500 Subject: [PATCH 23/80] 4757 fix by count query results to be quicker --- .../InterCaseSearchResultsProcessor.java | 129 +++++++----------- 1 file changed, 51 insertions(+), 78 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/commonpropertiessearch/InterCaseSearchResultsProcessor.java b/Core/src/org/sleuthkit/autopsy/commonpropertiessearch/InterCaseSearchResultsProcessor.java index 788938df5b..794d57ffec 100644 --- a/Core/src/org/sleuthkit/autopsy/commonpropertiessearch/InterCaseSearchResultsProcessor.java +++ b/Core/src/org/sleuthkit/autopsy/commonpropertiessearch/InterCaseSearchResultsProcessor.java @@ -231,77 +231,53 @@ final class InterCaseSearchResultsProcessor { final Map instanceCollatedCommonFiles = new HashMap<>(); - private CommonAttributeValue commonAttributeValue = null; - private String previousRowMd5 = ""; - @Override public void process(ResultSet resultSet) { try { + Set values = new HashSet<>(); + Integer caseID = null; while (resultSet.next()) { - - int resultId = InstanceTableCallback.getId(resultSet); - String corValue = InstanceTableCallback.getValue(resultSet); - if (previousRowMd5.isEmpty()) { - previousRowMd5 = corValue; + if (caseID == null) { + caseID = InstanceTableCallback.getCaseId(resultSet); } + String corValue = InstanceTableCallback.getValue(resultSet); if (corValue == null || HashUtility.isNoDataMd5(corValue)) { continue; } - - countAndAddCommonAttributes(corValue, resultId); - + values.add(corValue); } - //Add the final instance(s) - if (commonAttributeValue != null) { - int size = commonAttributeValue.getInstanceCount(); - if (instanceCollatedCommonFiles.containsKey(size)) { - instanceCollatedCommonFiles.get(size).addMetadataToList(commonAttributeValue); - } else { - CommonAttributeValueList value = new CommonAttributeValueList(); - value.addMetadataToList(commonAttributeValue); - instanceCollatedCommonFiles.put(size, value); + for (String corValue : values) { + List instances = EamDb.getInstance().getArtifactInstancesByTypeValue(correlationType, corValue); + int size = instances.size(); + if (size > 1) { + CommonAttributeValue commonAttributeValue = new CommonAttributeValue(corValue); + boolean anotherCase = false; + for (CorrelationAttributeInstance instance : instances) { + CentralRepoCommonAttributeInstance searchResult = new CentralRepoCommonAttributeInstance(instance.getID(), correlationType, NODE_TYPE.COUNT_NODE); + searchResult.setCurrentAttributeInst(instance); + commonAttributeValue.addInstance(searchResult); + anotherCase = anotherCase || instance.getCorrelationCase().getID() != caseID; + } + if (anotherCase) { + if (instanceCollatedCommonFiles.containsKey(size)) { + instanceCollatedCommonFiles.get(size).addMetadataToList(commonAttributeValue); + } else { + CommonAttributeValueList value = new CommonAttributeValueList(); + value.addMetadataToList(commonAttributeValue); + instanceCollatedCommonFiles.put(size, value); + } + } } } } catch (SQLException ex) { LOGGER.log(Level.WARNING, "Error getting artifact instances from database.", ex); // NON-NLS + } catch (EamDbException ex) { + Exceptions.printStackTrace(ex); + } catch (CorrelationAttributeNormalizationException ex) { + Exceptions.printStackTrace(ex); } } - /** - * Add a resultId to the list of matches for a given corValue, which - * counts to number of instances of that match, determining which - * InstanceCountNode the match will be added to. - * - * @param corValue the value which matches - * @param resultId the CorrelationAttributeInstance id to be retrieved - * later. - */ - private void countAndAddCommonAttributes(String corValue, int resultId) { - if (commonAttributeValue == null) { - commonAttributeValue = new CommonAttributeValue(corValue); - } - if (!corValue.equals(previousRowMd5)) { - int size = commonAttributeValue.getInstanceCount(); - if (instanceCollatedCommonFiles.containsKey(size)) { - instanceCollatedCommonFiles.get(size).addMetadataToList(commonAttributeValue); - } else { - CommonAttributeValueList value = new CommonAttributeValueList(); - value.addMetadataToList(commonAttributeValue); - instanceCollatedCommonFiles.put(size, value); - } - - commonAttributeValue = new CommonAttributeValue(corValue); - previousRowMd5 = corValue; - } - // we don't *have* all the information for the rows in the CR, - // so we need to consult the present case via the SleuthkitCase object - // Later, when the FileInstanceNode is built. Therefore, build node generators for now. - CentralRepoCommonAttributeInstance searchResult = new CentralRepoCommonAttributeInstance(resultId, correlationType, NODE_TYPE.COUNT_NODE); - CorrelationAttributeInstance corrAttr = findSingleCorrelationAttribute(resultId); - searchResult.setCurrentAttributeInst(corrAttr); - commonAttributeValue.addInstance(searchResult); - } - Map getInstanceCollatedCommonFiles() { return Collections.unmodifiableMap(instanceCollatedCommonFiles); } @@ -326,34 +302,31 @@ final class InterCaseSearchResultsProcessor { } values.add(corValue); } - for (String corValue : values){ - //select * from _instances where value=corValue && case_id!=caseId + for (String corValue : values) { List instances = EamDb.getInstance().getArtifactInstancesByTypeValue(correlationType, corValue); - if (instances.size() > 1) { for (CorrelationAttributeInstance instance : instances) { CorrelationCase correlationCase = instance.getCorrelationCase(); - String caseName = correlationCase.getDisplayName(); - CorrelationDataSource correlationDatasource = instance.getCorrelationDataSource(); - //label datasource with it's id for uniqueness done in same manner as ImageGallery does in the DataSourceCell class - String dataSourceNameKey = correlationDatasource.getName() + " (Id: " + correlationDatasource.getDataSourceObjectID() + ")"; - if (!caseCollatedDataSourceCollections.containsKey(caseName)) { - caseCollatedDataSourceCollections.put(caseName, new HashMap()); - } - Map dataSourceToFile = caseCollatedDataSourceCollections.get(caseName); - if (!dataSourceToFile.containsKey(dataSourceNameKey)) { - dataSourceToFile.put(dataSourceNameKey, new CommonAttributeValueList()); - } - CommonAttributeValueList valueList = dataSourceToFile.get(dataSourceNameKey); - CentralRepoCommonAttributeInstance searchResult = new CentralRepoCommonAttributeInstance(instance.getID(), correlationType, NODE_TYPE.CASE_NODE); - searchResult.setCurrentAttributeInst(instance); - CommonAttributeValue commonAttributeValue = new CommonAttributeValue(corValue); - commonAttributeValue.addInstance(searchResult); - valueList.addMetadataToList(commonAttributeValue); - dataSourceToFile.put(dataSourceNameKey, valueList); - caseCollatedDataSourceCollections.put(caseName, dataSourceToFile); + String caseName = correlationCase.getDisplayName(); + CorrelationDataSource correlationDatasource = instance.getCorrelationDataSource(); + //label datasource with it's id for uniqueness done in same manner as ImageGallery does in the DataSourceCell class + String dataSourceNameKey = correlationDatasource.getName() + " (Id: " + correlationDatasource.getDataSourceObjectID() + ")"; + if (!caseCollatedDataSourceCollections.containsKey(caseName)) { + caseCollatedDataSourceCollections.put(caseName, new HashMap()); } -// } + Map dataSourceToFile = caseCollatedDataSourceCollections.get(caseName); + if (!dataSourceToFile.containsKey(dataSourceNameKey)) { + dataSourceToFile.put(dataSourceNameKey, new CommonAttributeValueList()); + } + CommonAttributeValueList valueList = dataSourceToFile.get(dataSourceNameKey); + CentralRepoCommonAttributeInstance searchResult = new CentralRepoCommonAttributeInstance(instance.getID(), correlationType, NODE_TYPE.CASE_NODE); + searchResult.setCurrentAttributeInst(instance); + CommonAttributeValue commonAttributeValue = new CommonAttributeValue(corValue); + commonAttributeValue.addInstance(searchResult); + valueList.addMetadataToList(commonAttributeValue); + dataSourceToFile.put(dataSourceNameKey, valueList); + caseCollatedDataSourceCollections.put(caseName, dataSourceToFile); + } } } From b2ca49a3d1b97e6f45977c73b9c1ebc604a3e666 Mon Sep 17 00:00:00 2001 From: Raman Date: Mon, 25 Feb 2019 09:15:50 -0500 Subject: [PATCH 24/80] Address Codacy comments and review comments on previous commit. --- .../autopsy/recentactivity/Bundle.properties | 1 - .../recentactivity/ChromeCacheExtractor.java | 47 ++++++++++--------- 2 files changed, 25 insertions(+), 23 deletions(-) diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Bundle.properties b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Bundle.properties index 30e2189452..1750a4287c 100644 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Bundle.properties +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Bundle.properties @@ -24,7 +24,6 @@ Chrome.getLogin.errMsg.errGettingFiles=Error when trying to get Chrome history f Chrome.getLogin.errMsg.errAnalyzingFiles={0}\: Error while trying to analyze file\:{1} Chrome.getAutofill.errMsg.errGettingFiles=Error when trying to get Chrome Web Data files. Chrome.getAutofill.errMsg.errAnalyzingFiles={0}\: Error while trying to analyze file\:{1} -ChromeCacheExtractor.moduleName=ChromeCacheExtractor Extract.dbConn.errMsg.failedToQueryDb={0}\: Failed to query database. ExtractIE.moduleName.text=Internet Explorer ExtractIE.getBookmark.errMsg.errGettingBookmarks={0}\: Error getting Internet Explorer Bookmarks. diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ChromeCacheExtractor.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ChromeCacheExtractor.java index 0df15e48c2..8928dc6c7b 100644 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ChromeCacheExtractor.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ChromeCacheExtractor.java @@ -28,9 +28,9 @@ import java.nio.ByteBuffer; import java.nio.ByteOrder; import java.nio.channels.FileChannel; import java.nio.charset.Charset; +import java.nio.file.Paths; import java.util.ArrayList; import java.util.Collection; -import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -95,9 +95,8 @@ final class ChromeCacheExtractor { private final IngestServices services = IngestServices.getInstance(); private Case currentCase; private FileManager fileManager; - - - private Map filesTable = new HashMap<>(); + + private final Map filesTable = new HashMap<>(); /** * Encapsulates abstract file for a cache file as well as a temp file copy @@ -126,6 +125,9 @@ final class ChromeCacheExtractor { } } + @NbBundle.Messages({ + "ChromeCacheExtractor.moduleName=ChromeCacheExtractor" + }) ChromeCacheExtractor(Content dataSource, IngestJobContext context ) { moduleName = NbBundle.getMessage(ChromeCacheExtractor.class, "ChromeCacheExtractor.moduleName"); this.dataSource = dataSource; @@ -146,7 +148,8 @@ final class ChromeCacheExtractor { // Create an output folder to save any derived files absOutputFolderName = RAImageIngestModule.getRAOutputPath(currentCase, moduleName); - relOutputFolderName = RAImageIngestModule.getRelModuleOutputPath() + File.separator + moduleName; + relOutputFolderName = Paths.get( RAImageIngestModule.getRelModuleOutputPath(), moduleName).normalize().toString(); + File dir = new File(absOutputFolderName); if (dir.exists() == false) { dir.mkdirs(); @@ -450,10 +453,7 @@ final class ChromeCacheExtractor { * @throws TskCoreException */ List findCacheFiles(String cacheFileName) throws TskCoreException { - - List cacheFiles = fileManager.findFiles(dataSource, cacheFileName, DEFAULT_CACHE_STR); //NON-NLS - - return cacheFiles; + return fileManager.findFiles(dataSource, cacheFileName, DEFAULT_CACHE_STR); //NON-NLS } @@ -875,7 +875,7 @@ final class ChromeCacheExtractor { if (hdrNum == 1) { httpResponse = headerLine; } else { - int nPos = headerLine.indexOf(":"); + int nPos = headerLine.indexOf(':'); String key = headerLine.substring(0, nPos); String val= headerLine.substring(nPos+1); @@ -975,8 +975,9 @@ final class ChromeCacheExtractor { if (hasHTTPHeaders()) { String str = getHTTPHeader("content-encoding"); - if (str!=null) + if (str != null) { strBuilder.append(String.format("\t%s=%s", "content-encoding", str )); + } } return strBuilder.toString(); @@ -1050,12 +1051,10 @@ final class ChromeCacheExtractor { private final int dataSizes[]; private final CacheAddress dataAddresses[]; - ArrayList dataList = null; + private List dataList; private final long flags; - private final int pad[] = new int[4]; - - private final long selfHash; // hash of the entry itself so far. + private String key; // Key may be found within the entry or may be external CacheEntry(CacheAddress cacheAdress, CacheFileCopy cacheFileCopy ) { @@ -1088,6 +1087,7 @@ final class ChromeCacheExtractor { uint32 = fileROBuf.getInt() & UINT32_MASK; longKeyAddresses = (uint32 != 0) ? new CacheAddress(uint32, selfAddress.getCachePath()) : null; + dataList = null; dataSizes= new int[4]; for (int i = 0; i < 4; i++) { dataSizes[i] = fileROBuf.getInt(); @@ -1098,11 +1098,13 @@ final class ChromeCacheExtractor { } flags = fileROBuf.getInt() & UINT32_MASK; + // skip over pad for (int i = 0; i < 4; i++) { - pad[i] = fileROBuf.getInt(); + fileROBuf.getInt(); } - selfHash = fileROBuf.getInt() & UINT32_MASK; + // skip over self hash + fileROBuf.getInt(); // get the key if (longKeyAddresses != null) { @@ -1116,13 +1118,14 @@ final class ChromeCacheExtractor { } else { // key stored within entry StringBuilder strBuilder = new StringBuilder(MAX_KEY_LEN); - int i = 0; - while (fileROBuf.remaining() > 0 && i < MAX_KEY_LEN) { - char c = (char)fileROBuf.get(); - if (c == '\0') { + int keyLen = 0; + while (fileROBuf.remaining() > 0 && keyLen < MAX_KEY_LEN) { + char keyChar = (char)fileROBuf.get(); + if (keyChar == '\0') { break; } - strBuilder.append(c); + strBuilder.append(keyChar); + keyLen++; } key = strBuilder.toString(); From 3397c4b22d29827261f27fe391176a2962ddc28f Mon Sep 17 00:00:00 2001 From: Raman Date: Mon, 25 Feb 2019 09:59:29 -0500 Subject: [PATCH 25/80] Change log level for when unable to delete a temp copy of cache file. --- .../sleuthkit/autopsy/recentactivity/ChromeCacheExtractor.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ChromeCacheExtractor.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ChromeCacheExtractor.java index 8928dc6c7b..a4563352c8 100644 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ChromeCacheExtractor.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ChromeCacheExtractor.java @@ -203,7 +203,7 @@ final class ChromeCacheExtractor { tmpFile.deleteOnExit(); } } catch (IOException ex) { - logger.log(Level.SEVERE, String.format("Failed to delete cache file copy %s", tempFilePathname), ex); + logger.log(Level.WARNING, String.format("Failed to delete cache file copy %s", tempFilePathname), ex); } } } From 1e1e01098082d63cf6aade95eeeb33994626e10c Mon Sep 17 00:00:00 2001 From: Kelly Kelly Date: Mon, 25 Feb 2019 10:05:17 -0500 Subject: [PATCH 26/80] Made changes based on review comments, including the addition of NON-NLS and changing hashtable to hashmap --- .../autopsy/recentactivity/Extract.java | 12 +- .../autopsy/recentactivity/ExtractEdge.java | 156 +++++++++--------- 2 files changed, 90 insertions(+), 78 deletions(-) diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Extract.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Extract.java index 466fbb6997..7ab22b4850 100644 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Extract.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Extract.java @@ -201,11 +201,19 @@ abstract class Extract { return moduleName; } + /** + * Returns the state of foundData + * @return + */ public boolean foundData() { return dataFound; } - protected void setFoundData(boolean b){ - dataFound = b; + /** + * Sets the value of foundData + * @param foundData + */ + protected void setFoundData(boolean foundData){ + dataFound = foundData; } } diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractEdge.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractEdge.java index 8acfce9f87..2264a83850 100755 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractEdge.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractEdge.java @@ -29,7 +29,7 @@ import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; -import java.util.Hashtable; +import java.util.HashMap; import java.util.List; import java.util.Scanner; import java.util.logging.Level; @@ -62,42 +62,42 @@ final class ExtractEdge extends Extract { private final Path moduleTempResultPath; private Content dataSource; private IngestJobContext context; - private Hashtable> containersTable; + private HashMap> containersTable; - private static final String EDGE = "Edge"; + private static final String EDGE = "Edge"; //NON-NLS - private static final String EDGE_KEYWORD_VISIT = "Visited:"; - private static final String IGNORE_COMMA_IN_QUOTES_REGEX = ",(?=(?:[^\"]*\"[^\"]*\")*[^\"]*$)"; + private static final String EDGE_KEYWORD_VISIT = "Visited:"; //NON-NLS + private static final String IGNORE_COMMA_IN_QUOTES_REGEX = ",(?=(?:[^\"]*\"[^\"]*\")*[^\"]*$)"; //NON-NLS - private static final String EDGE_TABLE_TYPE_DOWNLOAD = "iedownload"; - private static final String EDGE_TABLE_TYPE_HISTORY = "History"; - private static final String EDGE_TABLE_TYPE_COOKIE = "cookie"; + private static final String EDGE_TABLE_TYPE_DOWNLOAD = "iedownload"; //NON-NLS + private static final String EDGE_TABLE_TYPE_HISTORY = "History"; //NON-NLS + private static final String EDGE_TABLE_TYPE_COOKIE = "cookie"; //NON-NLS - private static final String EDGE_HEAD_URL = "url"; - private static final String EDGE_HEAD_ACCESSTIME = "accessedtime"; - private static final String EDGE_HEAD_NAME = "name"; - private static final String EDGE_HEAD_CONTAINER_ID = "containerid"; - private static final String EDGE_HEAD_RESPONSEHEAD = "responseheaders"; - private static final String EDGE_HEAD_TITLE = "title"; - private static final String EDGE_HEAD_RDOMAIN = "rdomain"; - private static final String EDGE_HEAD_VALUE = "value"; - private static final String EDGE_HEAD_LASTMOD = "lastmodified"; + private static final String EDGE_HEAD_URL = "url"; //NON-NLS + private static final String EDGE_HEAD_ACCESSTIME = "accessedtime"; //NON-NLS + private static final String EDGE_HEAD_NAME = "name"; //NON-NLS + private static final String EDGE_HEAD_CONTAINER_ID = "containerid"; //NON-NLS + private static final String EDGE_HEAD_RESPONSEHEAD = "responseheaders"; //NON-NLS + private static final String EDGE_HEAD_TITLE = "title"; //NON-NLS + private static final String EDGE_HEAD_RDOMAIN = "rdomain"; //NON-NLS + private static final String EDGE_HEAD_VALUE = "value"; //NON-NLS + private static final String EDGE_HEAD_LASTMOD = "lastmodified"; //NON-NLS - private static final String EDGE_WEBCACHE_PREFIX = "WebCacheV01"; - private static final String EDGE_CONTAINER_FILE_PREFIX = "Container_"; - private static final String EDGE_CONTAINER_FILE_EXT = ".csv"; - private static final String EDGE_WEBCACHE_EXT = ".dat"; + private static final String EDGE_WEBCACHE_PREFIX = "WebCacheV01"; //NON-NLS + private static final String EDGE_CONTAINER_FILE_PREFIX = "Container_"; //NON-NLS + private static final String EDGE_CONTAINER_FILE_EXT = ".csv"; //NON-NLS + private static final String EDGE_WEBCACHE_EXT = ".dat"; //NON-NLS - private static final String ESE_TOOL_NAME = "ESEDatabaseView.exe"; - private static final String EDGE_WEBCACHE_NAME = "WebCacheV01.dat"; - private static final String EDGE_SPARTAN_NAME = "Spartan.edb"; - private static final String EDGE_CONTAINTERS_FILE_NAME = "Containers.csv"; - private static final String EDGE_FAVORITE_FILE_NAME = "Favorites.csv"; - private static final String EDGE_OUTPUT_FILE_NAME = "Output.txt"; - private static final String EDGE_ERROR_FILE_NAME = "File.txt"; + private static final String ESE_TOOL_NAME = "ESEDatabaseView.exe"; //NON-NLS + private static final String EDGE_WEBCACHE_NAME = "WebCacheV01.dat"; //NON-NLS + private static final String EDGE_SPARTAN_NAME = "Spartan.edb"; //NON-NLS + private static final String EDGE_CONTAINTERS_FILE_NAME = "Containers.csv"; //NON-NLS + private static final String EDGE_FAVORITE_FILE_NAME = "Favorites.csv"; //NON-NLS + private static final String EDGE_OUTPUT_FILE_NAME = "Output.txt"; //NON-NLS + private static final String EDGE_ERROR_FILE_NAME = "File.txt"; //NON-NLS - private static final String ESE_TOOL_FOLDER = "ESEDatabaseView"; - private static final String EDGE_RESULT_FOLDER_NAME = "results"; + private static final String ESE_TOOL_FOLDER = "ESEDatabaseView"; //NON-NLS + private static final String EDGE_RESULT_FOLDER_NAME = "results"; //NON-NLS private static final SimpleDateFormat DATE_FORMATTER = new SimpleDateFormat("MM/dd/yyyy hh:mm:ss a"); @@ -107,8 +107,12 @@ final class ExtractEdge extends Extract { "ExtractEdge_process_errMsg_webcacheFail=Failure processing Microsoft Edge WebCacheV01.dat file", "ExtractEdge_process_errMsg_spartanFail=Failure processing Microsoft Edge spartan.edb file", "ExtractEdge_Module_Name=Microsoft Edge", - "ExtractEdge_getHistory_containerFileNotFound=Error while trying to analyze Edge history",}) + "ExtractEdge_getHistory_containerFileNotFound=Error while trying to analyze Edge history", + }) + /** + * Extract the bookmarks, cookies, downloads and history from Microsoft Edge + */ ExtractEdge() throws NoCurrentCaseException { moduleTempResultPath = Paths.get(RAImageIngestModule.getRATempPath(Case.getCurrentCaseThrows(), EDGE), EDGE_RESULT_FOLDER_NAME); } @@ -222,7 +226,7 @@ final class ExtractEdge extends Extract { return; } - this.getCookie(webCacheFile, resultsDir); + this.getCookies(webCacheFile, resultsDir); // if (context.dataSourceIngestIsCancelled()) { // return; @@ -255,7 +259,7 @@ final class ExtractEdge extends Extract { //Run the dumper String tempSpartanFileName = EDGE_WEBCACHE_PREFIX - + Integer.toString((int) spartanFile.getId()) + EDGE_WEBCACHE_EXT; //NON-NLS + + Integer.toString((int) spartanFile.getId()) + EDGE_WEBCACHE_EXT; File tempSpartanFile = new File(RAImageIngestModule.getRATempPath(currentCase, EDGE), tempSpartanFileName); try { @@ -275,7 +279,7 @@ final class ExtractEdge extends Extract { return; } - this.getBookmark(spartanFile, resultsDir); + this.getBookmarks(spartanFile, resultsDir); } finally { tempSpartanFile.delete(); @@ -321,10 +325,10 @@ final class ExtractEdge extends Extract { } if (line.contains(EDGE_KEYWORD_VISIT)) { - BlackboardArtifact b = getHistoryArtifact(origFile, headers, line); - if (b != null) { - bbartifacts.add(b); - this.indexArtifact(b); + BlackboardArtifact ba = getHistoryArtifact(origFile, headers, line); + if (ba != null) { + bbartifacts.add(ba); + this.indexArtifact(ba); } } } @@ -348,14 +352,14 @@ final class ExtractEdge extends Extract { * @throws TskCoreException * @throws FileNotFoundException */ - private void getBookmark(AbstractFile origFile, File resultDir) throws TskCoreException { + private void getBookmarks(AbstractFile origFile, File resultDir) throws TskCoreException { Scanner fileScanner; File favoriteFile = new File(resultDir, EDGE_FAVORITE_FILE_NAME); try { fileScanner = new Scanner(new FileInputStream(favoriteFile)); } catch (FileNotFoundException ex) { - // This is a non-fatial error, if the favorites file is not found + // This is a non-fatal error, if the favorites file is not found // there might have not been any favorites\bookmarks return; } @@ -371,10 +375,10 @@ final class ExtractEdge extends Extract { continue; } - BlackboardArtifact b = getBookmarkArtifact(origFile, headers, line); - if (b != null) { - bbartifacts.add(b); - this.indexArtifact(b); + BlackboardArtifact ba = getBookmarkArtifact(origFile, headers, line); + if (ba != null) { + bbartifacts.add(ba); + this.indexArtifact(ba); } } } finally { @@ -395,7 +399,7 @@ final class ExtractEdge extends Extract { * @param resultDir Output directory of ESEDatabaseViewer * @throws TskCoreException */ - private void getCookie(AbstractFile origFile, File resultDir) throws TskCoreException { + private void getCookies(AbstractFile origFile, File resultDir) throws TskCoreException { File containerFiles[] = resultDir.listFiles((dir, name) -> name.toLowerCase().contains(EDGE_TABLE_TYPE_COOKIE)); if (containerFiles == null) { @@ -422,10 +426,10 @@ final class ExtractEdge extends Extract { continue; } - BlackboardArtifact b = getCookieArtifact(origFile, headers, line); - if (b != null) { - bbartifacts.add(b); - this.indexArtifact(b); + BlackboardArtifact ba = getCookieArtifact(origFile, headers, line); + if (ba != null) { + bbartifacts.add(ba); + this.indexArtifact(ba); } } } finally { @@ -448,7 +452,7 @@ final class ExtractEdge extends Extract { * @throws TskCoreException * @throws FileNotFoundException */ - private void getDownload(AbstractFile origFile, File resultDir) throws TskCoreException, FileNotFoundException { + private void getDownloads(AbstractFile origFile, File resultDir) throws TskCoreException, FileNotFoundException { ArrayList downloadFiles = getDownloadFiles(resultDir); if (downloadFiles == null) { @@ -476,10 +480,10 @@ final class ExtractEdge extends Extract { if (line.contains(EDGE_TABLE_TYPE_DOWNLOAD)) { - BlackboardArtifact b = getDownloadArtifact(origFile, headers, line); - if (b != null) { - bbartifacts.add(b); - this.indexArtifact(b); + BlackboardArtifact ba = getDownloadArtifact(origFile, headers, line); + if (ba != null) { + bbartifacts.add(ba); + this.indexArtifact(ba); } } } @@ -498,7 +502,7 @@ final class ExtractEdge extends Extract { /** * Find the location of ESEDatabaseViewer.exe * - * @return Absolute path to ESEDatabaseViewer.exe + * @return Absolute path to ESEDatabaseViewer.exe or null if the file is not found */ private String getPathForESEDumper() { Path path = Paths.get(ESE_TOOL_FOLDER, ESE_TOOL_NAME); @@ -514,25 +518,25 @@ final class ExtractEdge extends Extract { /** * Finds all of the WebCacheV01.dat files in the case * - * @return A list of WebCacheV01.dat files + * @return A list of WebCacheV01.dat files, possibly empty if none are found * @throws TskCoreException */ private List fetchWebCacheDBFiles() throws TskCoreException { org.sleuthkit.autopsy.casemodule.services.FileManager fileManager = currentCase.getServices().getFileManager(); - return fileManager.findFiles(dataSource, EDGE_WEBCACHE_NAME, "WebCache"); + return fileManager.findFiles(dataSource, EDGE_WEBCACHE_NAME, "WebCache"); //NON-NLS } /** * Finds all of the spartan.edb files in the case * - * @return A list of spartan files + * @return A list of spartan files, possibly empty if none are found * @throws TskCoreException */ private List fetchSpartanDBFiles() throws TskCoreException { org.sleuthkit.autopsy.casemodule.services.FileManager fileManager = currentCase.getServices().getFileManager(); - return fileManager.findFiles(dataSource, EDGE_SPARTAN_NAME, "MicrosoftEdge"); + return fileManager.findFiles(dataSource, EDGE_SPARTAN_NAME, "MicrosoftEdge"); //NON-NLS } /** @@ -555,11 +559,11 @@ final class ExtractEdge extends Extract { List commandLine = new ArrayList<>(); commandLine.add(dumperPath); - commandLine.add("/table"); + commandLine.add("/table"); //NON-NLS commandLine.add(inputFilePath); - commandLine.add("*"); - commandLine.add("/scomma"); - commandLine.add(outputDir + "\\" + "*.csv"); + commandLine.add("*"); //NON-NLS + commandLine.add("/scomma"); //NON-NLS + commandLine.add(outputDir + "\\" + "*.csv"); //NON-NLS ProcessBuilder processBuilder = new ProcessBuilder(commandLine); processBuilder.redirectOutput(outputFilePath.toFile()); @@ -675,7 +679,7 @@ final class ExtractEdge extends Extract { * @param origFile File the table came from ie spartan.edb * @param headers List of table column headers * @param line The line or row of the table to parse - * @return BlackboardArtifact representation of the passed in line\table row + * @return BlackboardArtifact representation of the passed in line\table row or null if no Bookmark is found * @throws TskCoreException */ private BlackboardArtifact getBookmarkArtifact(AbstractFile origFile, List headers, String line) throws TskCoreException { @@ -872,17 +876,17 @@ final class ExtractEdge extends Extract { * Converts a space separated string of hex values to ascii characters. * * @param hexString - * @return "decoded" string + * @return "decoded" string or null if a non-hex value was found */ private String hexToChar(String hexString) { String[] hexValues = hexString.split(" "); StringBuilder output = new StringBuilder(); - for (String s : hexValues) { + for (String str : hexValues) { try { - int i = Integer.parseInt(s, 16); - if (i > 31) { // Ignore non-print characters - output.append((char) i); + int value = Integer.parseInt(str, 16); + if (value > 31) { // Ignore non-print characters + output.append((char) value); } } catch (NumberFormatException ex) { return null; @@ -953,11 +957,11 @@ final class ExtractEdge extends Extract { * * @param resultDir Path to ESEDatabaseViewer output * @param type Type of table files - * @return List of table files + * @return List of table files returns null if no files of that type are found * @throws FileNotFoundException */ private ArrayList getContainerFiles(File resultDir, String type) throws FileNotFoundException { - Hashtable> idTable = getContainerIDTable(resultDir); + HashMap> idTable = getContainerIDTable(resultDir); ArrayList idList = idTable.get(type); if (idList == null) { @@ -965,8 +969,8 @@ final class ExtractEdge extends Extract { } ArrayList fileList = new ArrayList<>(); - for (String s : idList) { - String fileName = EDGE_CONTAINER_FILE_PREFIX + s + EDGE_CONTAINER_FILE_EXT; + for (String str : idList) { + String fileName = EDGE_CONTAINER_FILE_PREFIX + str + EDGE_CONTAINER_FILE_EXT; fileList.add(new File(resultDir, fileName)); } @@ -981,16 +985,16 @@ final class ExtractEdge extends Extract { * files. * * @param resultDir Path to ESEDatabaseViewer output - * @return Hashtable with Key representing the table type, the value is a list of table ids for that type + * @return Hashmap with Key representing the table type, the value is a list of table ids for that type */ - private Hashtable> getContainerIDTable(File resultDir) throws FileNotFoundException { + private HashMap> getContainerIDTable(File resultDir) throws FileNotFoundException { if (containersTable == null) { File containerFile = new File(resultDir, EDGE_CONTAINTERS_FILE_NAME); try (Scanner fileScanner = new Scanner(new FileInputStream(containerFile))) { List headers = null; - containersTable = new Hashtable<>(); + containersTable = new HashMap<>(); int nameIdx = 0; int idIdx = 0; while (fileScanner.hasNext()) { From 57651d219d11272e75034a652710d4f279e83e94 Mon Sep 17 00:00:00 2001 From: Kelly Kelly Date: Mon, 25 Feb 2019 12:06:50 -0500 Subject: [PATCH 27/80] added function createTemportyFile to Extract for reuse by all subclasses, cleaned up ExtractSafari a bit adding comments --- .../autopsy/recentactivity/Extract.java | 26 +++++++++++++- .../autopsy/recentactivity/ExtractSafari.java | 34 ++++++++++--------- 2 files changed, 43 insertions(+), 17 deletions(-) diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Extract.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Extract.java index f6e7eb8436..48a262f89c 100644 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Extract.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Extract.java @@ -22,6 +22,9 @@ */ package org.sleuthkit.autopsy.recentactivity; +import java.io.IOException; +import java.nio.file.Path; +import java.nio.file.Paths; import java.sql.ResultSet; import java.sql.ResultSetMetaData; import java.sql.SQLException; @@ -35,6 +38,7 @@ import org.sleuthkit.autopsy.casemodule.services.Blackboard; import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil; import org.sleuthkit.autopsy.coreutils.SQLiteDBConnect; +import org.sleuthkit.autopsy.datamodel.ContentUtils; import org.sleuthkit.autopsy.ingest.IngestJobContext; import org.sleuthkit.autopsy.ingest.IngestModule.IngestModuleException; import org.sleuthkit.datamodel.*; @@ -397,5 +401,25 @@ abstract class Extract { return bbattributes; } - + /** + * Create temporary file for the given AbstractFile + * + * @param context + * @param file + * @return + * @throws IOException + */ + protected java.io.File createTemporaryFile(IngestJobContext context, AbstractFile file) throws IOException{ + Path tempFilePath = Paths.get(RAImageIngestModule.getRATempPath( + getCurrentCase(), getName()), file.getName() + file.getId() + file.getNameExtension()); + java.io.File tempFile = tempFilePath.toFile(); + + try { + ContentUtils.writeToFile(file, tempFile, context::dataSourceIngestIsCancelled); + } catch (IOException ex) { + throw new IOException("Error writingToFile: " + file, ex); //NON-NLS + } + + return tempFile; + } } diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractSafari.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractSafari.java index b341b79c6f..8bd52a1f2d 100755 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractSafari.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractSafari.java @@ -49,14 +49,13 @@ final class ExtractSafari extends Extract { private final IngestServices services = IngestServices.getInstance(); // visit_time uses an epoch of Jan 1, 2001 thus the addition of 978307200 - private static final String SAFARI_HISTORY_QUERY = "SELECT url, title, visit_time + 978307200 as time FROM 'history_items' JOIN history_visits ON history_item = history_items.id;"; + private static final String HISTORY_QUERY = "SELECT url, title, visit_time + 978307200 as time FROM 'history_items' JOIN history_visits ON history_item = history_items.id;"; //NON-NLS - private static final String SAFARI_HISTORY_FILE_NAME = "History.db"; - private static final String SAFARI_DATABASE_EXT = ".db"; + private static final String HISTORY_FILE_NAME = "History.db"; //NON-NLS - private static final String SAFARI_HEAD_URL = "url"; - private static final String SAFARI_HEAD_TITLE = "title"; - private static final String SAFARI_HEAD_TIME = "time"; + private static final String HEAD_URL = "url"; //NON-NLS + private static final String HEAD_TITLE = "title"; //NON-NLS + private static final String HEAD_TIME = "time"; //NON-NLS private final Logger logger = Logger.getLogger(this.getClass().getName()); @@ -65,6 +64,10 @@ final class ExtractSafari extends Extract { "ExtractSafari_Error_Getting_History=An error occurred while processing Safari history files." }) + /** + * Extract the bookmarks, cookies, downloads and history from Safari + * + */ ExtractSafari() { } @@ -96,7 +99,7 @@ final class ExtractSafari extends Extract { private void processHistoryDB(Content dataSource, IngestJobContext context) throws TskCoreException, IOException { FileManager fileManager = getCurrentCase().getServices().getFileManager(); - List historyFiles = fileManager.findFiles(dataSource, SAFARI_HISTORY_FILE_NAME); + List historyFiles = fileManager.findFiles(dataSource, HISTORY_FILE_NAME); if (historyFiles == null || historyFiles.isEmpty()) { return; @@ -126,9 +129,7 @@ final class ExtractSafari extends Extract { return; } - Path tempHistoryPath = Paths.get(RAImageIngestModule.getRATempPath( - getCurrentCase(), getName()), historyFile.getName() + historyFile.getId() + SAFARI_DATABASE_EXT); - File tempHistoryFile = tempHistoryPath.toFile(); + File tempHistoryFile = this.createTemporaryFile(context, historyFile); try { ContentUtils.writeToFile(historyFile, tempHistoryFile, context::dataSourceIngestIsCancelled); @@ -137,7 +138,7 @@ final class ExtractSafari extends Extract { } try { - Collection bbartifacts = getHistoryArtifacts(historyFile, tempHistoryPath); + Collection bbartifacts = getHistoryArtifacts(historyFile, tempHistoryFile.toPath()); if (!bbartifacts.isEmpty()) { services.fireModuleDataEvent(new ModuleDataEvent( RecentActivityExtracterModuleFactory.getModuleName(), @@ -154,11 +155,12 @@ final class ExtractSafari extends Extract { * * @param origFile AbstractFile of the history file from the case * @param tempFilePath Path to temporary copy of the history db - * @return Blackboard Artifacts for the history db + * @return Blackboard Artifacts for the history db or null if there are + * no history artifacts * @throws TskCoreException */ private Collection getHistoryArtifacts(AbstractFile origFile, Path tempFilePath) throws TskCoreException { - List> historyList = this.dbConnect(tempFilePath.toString(), SAFARI_HISTORY_QUERY); + List> historyList = this.dbConnect(tempFilePath.toString(), HISTORY_QUERY); if (historyList == null || historyList.isEmpty()) { return null; @@ -166,9 +168,9 @@ final class ExtractSafari extends Extract { Collection bbartifacts = new ArrayList<>(); for (HashMap row : historyList) { - String url = row.get(SAFARI_HEAD_URL).toString(); - String title = row.get(SAFARI_HEAD_TITLE).toString(); - Long time = (Double.valueOf(row.get(SAFARI_HEAD_TIME).toString())).longValue(); + String url = row.get(HEAD_URL).toString(); + String title = row.get(HEAD_TITLE).toString(); + Long time = (Double.valueOf(row.get(HEAD_TIME).toString())).longValue(); BlackboardArtifact bbart = origFile.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_HISTORY); bbart.addAttributes(createHistoryAttribute(url, time, null, title, From b886e0b7cceff7bb5973b9584ec06852f9ec4ea4 Mon Sep 17 00:00:00 2001 From: William Schaefer Date: Mon, 25 Feb 2019 12:29:33 -0500 Subject: [PATCH 28/80] 4757 make single case queries consistant with current approach --- .../InterCaseSearchResultsProcessor.java | 112 +++++++++++------- 1 file changed, 70 insertions(+), 42 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/commonpropertiessearch/InterCaseSearchResultsProcessor.java b/Core/src/org/sleuthkit/autopsy/commonpropertiessearch/InterCaseSearchResultsProcessor.java index 794d57ffec..3dafb6e06e 100644 --- a/Core/src/org/sleuthkit/autopsy/commonpropertiessearch/InterCaseSearchResultsProcessor.java +++ b/Core/src/org/sleuthkit/autopsy/commonpropertiessearch/InterCaseSearchResultsProcessor.java @@ -84,15 +84,7 @@ final class InterCaseSearchResultsProcessor { } private String getSingleInterCaseWhereClause() { - String tableName = EamDbUtil.correlationTypeToInstanceTableName(correlationType); - StringBuilder sqlString = new StringBuilder(250); - sqlString.append("value IN (SELECT value FROM ") - .append(tableName) - .append(" WHERE value IN (SELECT value FROM ") - .append(tableName) - .append(" WHERE case_id=%s AND (known_status !=%s OR known_status IS NULL) GROUP BY value)") - .append(" AND (case_id=%s OR case_id=%s) GROUP BY value HAVING COUNT(DISTINCT case_id) > 1) ORDER BY value"); - return sqlString.toString(); + return "case_id=%s AND (known_status !=%s OR known_status IS NULL)"; } /** @@ -130,11 +122,11 @@ final class InterCaseSearchResultsProcessor { */ Map> findInterCaseValuesByCase(Case currentCase) { try { - InterCaseByCaseCallback instancetableCallback = new InterCaseByCaseCallback(); + EamDb dbManager = EamDb.getInstance(); int caseId = dbManager.getCase(currentCase).getID(); - + InterCaseByCaseCallback instancetableCallback = new InterCaseByCaseCallback(caseId); dbManager.processInstanceTableWhere(correlationType, String.format(interCaseWhereClause, caseId, TskData.FileKnown.KNOWN.getFileKnownValue()), instancetableCallback); @@ -155,11 +147,11 @@ final class InterCaseSearchResultsProcessor { */ Map findInterCaseValuesByCount(Case currentCase) { try { - InterCaseByCountCallback instancetableCallback = new InterCaseByCountCallback(); + EamDb dbManager = EamDb.getInstance(); int caseId = dbManager.getCase(currentCase).getID(); - + InterCaseByCountCallback instancetableCallback = new InterCaseByCountCallback(caseId); dbManager.processInstanceTableWhere(correlationType, String.format(interCaseWhereClause, caseId, TskData.FileKnown.KNOWN.getFileKnownValue()), instancetableCallback); @@ -182,12 +174,12 @@ final class InterCaseSearchResultsProcessor { */ Map findSingleInterCaseValuesByCount(Case currentCase, CorrelationCase singleCase) { try { - InterCaseByCountCallback instancetableCallback = new InterCaseByCountCallback(); EamDb dbManager = EamDb.getInstance(); int caseId = dbManager.getCase(currentCase).getID(); int targetCaseId = singleCase.getID(); + InterCaseByCountCallback instancetableCallback = new InterCaseByCountCallback(caseId, targetCaseId); dbManager.processInstanceTableWhere(correlationType, String.format(singleInterCaseWhereClause, caseId, - TskData.FileKnown.KNOWN.getFileKnownValue(), caseId, targetCaseId), instancetableCallback); + TskData.FileKnown.KNOWN.getFileKnownValue()), instancetableCallback); return instancetableCallback.getInstanceCollatedCommonFiles(); } catch (EamDbException ex) { LOGGER.log(Level.SEVERE, "Error accessing EamDb processing CaseInstancesTable.", ex); @@ -210,12 +202,14 @@ final class InterCaseSearchResultsProcessor { */ Map> findSingleInterCaseValuesByCase(Case currentCase, CorrelationCase singleCase) { try { - InterCaseByCaseCallback instancetableCallback = new InterCaseByCaseCallback(); + EamDb dbManager = EamDb.getInstance(); int caseId = dbManager.getCase(currentCase).getID(); int targetCaseId = singleCase.getID(); + InterCaseByCaseCallback instancetableCallback = new InterCaseByCaseCallback(caseId, targetCaseId); dbManager.processInstanceTableWhere(correlationType, String.format(singleInterCaseWhereClause, caseId, - TskData.FileKnown.KNOWN.getFileKnownValue(), caseId, targetCaseId), instancetableCallback); + TskData.FileKnown.KNOWN.getFileKnownValue()), instancetableCallback); + return instancetableCallback.getInstanceCollatedCommonFiles(); } catch (EamDbException ex) { LOGGER.log(Level.SEVERE, "Error accessing EamDb processing CaseInstancesTable.", ex); @@ -230,16 +224,23 @@ final class InterCaseSearchResultsProcessor { private class InterCaseByCountCallback implements InstanceTableCallback { final Map instanceCollatedCommonFiles = new HashMap<>(); + private final int caseID; + private final int targetCase; + + private InterCaseByCountCallback(int caseId) { + this(caseId, 0); + } + + private InterCaseByCountCallback(int caseId, int targetCase) { + this.caseID = caseId; + this.targetCase = targetCase; + } @Override public void process(ResultSet resultSet) { try { Set values = new HashSet<>(); - Integer caseID = null; while (resultSet.next()) { - if (caseID == null) { - caseID = InstanceTableCallback.getCaseId(resultSet); - } String corValue = InstanceTableCallback.getValue(resultSet); if (corValue == null || HashUtility.isNoDataMd5(corValue)) { continue; @@ -256,7 +257,9 @@ final class InterCaseSearchResultsProcessor { CentralRepoCommonAttributeInstance searchResult = new CentralRepoCommonAttributeInstance(instance.getID(), correlationType, NODE_TYPE.COUNT_NODE); searchResult.setCurrentAttributeInst(instance); commonAttributeValue.addInstance(searchResult); - anotherCase = anotherCase || instance.getCorrelationCase().getID() != caseID; + if (!anotherCase && ((targetCase == 0 && instance.getCorrelationCase().getID() != caseID) || (targetCase == instance.getCorrelationCase().getID()))) { + anotherCase = true; + } } if (anotherCase) { if (instanceCollatedCommonFiles.containsKey(size)) { @@ -290,6 +293,17 @@ final class InterCaseSearchResultsProcessor { private class InterCaseByCaseCallback implements InstanceTableCallback { final Map> caseCollatedDataSourceCollections = new HashMap<>(); + private final int caseID; + private final int targetCase; + + private InterCaseByCaseCallback(int caseId) { + this(caseId, 0); + } + + private InterCaseByCaseCallback(int caseId, int targetCase) { + this.caseID = caseId; + this.targetCase = targetCase; + } @Override public void process(ResultSet resultSet) { @@ -305,30 +319,44 @@ final class InterCaseSearchResultsProcessor { for (String corValue : values) { List instances = EamDb.getInstance().getArtifactInstancesByTypeValue(correlationType, corValue); if (instances.size() > 1) { - for (CorrelationAttributeInstance instance : instances) { - CorrelationCase correlationCase = instance.getCorrelationCase(); - String caseName = correlationCase.getDisplayName(); - CorrelationDataSource correlationDatasource = instance.getCorrelationDataSource(); - //label datasource with it's id for uniqueness done in same manner as ImageGallery does in the DataSourceCell class - String dataSourceNameKey = correlationDatasource.getName() + " (Id: " + correlationDatasource.getDataSourceObjectID() + ")"; - if (!caseCollatedDataSourceCollections.containsKey(caseName)) { - caseCollatedDataSourceCollections.put(caseName, new HashMap()); + boolean addToResults = targetCase == 0; + if (!addToResults) { + for (CorrelationAttributeInstance instance : instances) { + if (instance.getCorrelationCase().getID() == targetCase) { + System.out.println("Target case found in results"); + addToResults = true; + break; + } } - Map dataSourceToFile = caseCollatedDataSourceCollections.get(caseName); - if (!dataSourceToFile.containsKey(dataSourceNameKey)) { - dataSourceToFile.put(dataSourceNameKey, new CommonAttributeValueList()); + } + else { + System.out.println("Target case is not set adding all results"); + } + if (addToResults) { + for (CorrelationAttributeInstance instance : instances) { + CorrelationCase correlationCase = instance.getCorrelationCase(); + String caseName = correlationCase.getDisplayName(); + CorrelationDataSource correlationDatasource = instance.getCorrelationDataSource(); + //label datasource with it's id for uniqueness done in same manner as ImageGallery does in the DataSourceCell class + String dataSourceNameKey = correlationDatasource.getName() + " (Id: " + correlationDatasource.getDataSourceObjectID() + ")"; + if (!caseCollatedDataSourceCollections.containsKey(caseName)) { + caseCollatedDataSourceCollections.put(caseName, new HashMap<>()); + } + Map dataSourceToFile = caseCollatedDataSourceCollections.get(caseName); + if (!dataSourceToFile.containsKey(dataSourceNameKey)) { + dataSourceToFile.put(dataSourceNameKey, new CommonAttributeValueList()); + } + CommonAttributeValueList valueList = dataSourceToFile.get(dataSourceNameKey); + CentralRepoCommonAttributeInstance searchResult = new CentralRepoCommonAttributeInstance(instance.getID(), correlationType, NODE_TYPE.CASE_NODE); + searchResult.setCurrentAttributeInst(instance); + CommonAttributeValue commonAttributeValue = new CommonAttributeValue(corValue); + commonAttributeValue.addInstance(searchResult); + valueList.addMetadataToList(commonAttributeValue); + dataSourceToFile.put(dataSourceNameKey, valueList); + caseCollatedDataSourceCollections.put(caseName, dataSourceToFile); } - CommonAttributeValueList valueList = dataSourceToFile.get(dataSourceNameKey); - CentralRepoCommonAttributeInstance searchResult = new CentralRepoCommonAttributeInstance(instance.getID(), correlationType, NODE_TYPE.CASE_NODE); - searchResult.setCurrentAttributeInst(instance); - CommonAttributeValue commonAttributeValue = new CommonAttributeValue(corValue); - commonAttributeValue.addInstance(searchResult); - valueList.addMetadataToList(commonAttributeValue); - dataSourceToFile.put(dataSourceNameKey, valueList); - caseCollatedDataSourceCollections.put(caseName, dataSourceToFile); } } - } } catch (EamDbException | SQLException ex) { LOGGER.log(Level.WARNING, "Error getting artifact instances from database.", ex); // NON-NLS From 9d2c87f03d077cf4fca1825b944fc42471bbd38c Mon Sep 17 00:00:00 2001 From: Kelly Kelly Date: Mon, 25 Feb 2019 13:22:24 -0500 Subject: [PATCH 29/80] Plist jar moved to RecentActivities, Safari support for bookmarks and merged in changes from history branch --- RecentActivity/ivy.xml | 3 + RecentActivity/nbproject/project.properties | 1 + RecentActivity/nbproject/project.xml | 4 + .../autopsy/recentactivity/ExtractSafari.java | 190 +++++++++++++++++- 4 files changed, 188 insertions(+), 10 deletions(-) diff --git a/RecentActivity/ivy.xml b/RecentActivity/ivy.xml index 290c8371ea..ca95f14a98 100644 --- a/RecentActivity/ivy.xml +++ b/RecentActivity/ivy.xml @@ -6,4 +6,7 @@ + + + diff --git a/RecentActivity/nbproject/project.properties b/RecentActivity/nbproject/project.properties index 9736070e53..b5f9e4cc71 100644 --- a/RecentActivity/nbproject/project.properties +++ b/RecentActivity/nbproject/project.properties @@ -1,3 +1,4 @@ +file.reference.dd-plist-1.20.jar=C:\\Users\\kelly\\Workspace\\autopsy\\RecentActivity\\release\\modules\\ext\\dd-plist-1.20.jar javac.source=1.8 javac.compilerargs=-Xlint -Xlint:-serial license.file=../LICENSE-2.0.txt diff --git a/RecentActivity/nbproject/project.xml b/RecentActivity/nbproject/project.xml index 87619a8356..f397b6b23b 100644 --- a/RecentActivity/nbproject/project.xml +++ b/RecentActivity/nbproject/project.xml @@ -74,6 +74,10 @@ + + ext/dd-plist-1.20.jar + C:\Users\kelly\Workspace\autopsy\RecentActivity\release\modules\ext\dd-plist-1.20.jar + diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractSafari.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractSafari.java index 8bd52a1f2d..cc172d56e8 100755 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractSafari.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractSafari.java @@ -18,15 +18,22 @@ */ package org.sleuthkit.autopsy.recentactivity; +import com.dd.plist.NSArray; +import com.dd.plist.NSDictionary; +import com.dd.plist.NSObject; +import com.dd.plist.NSString; +import com.dd.plist.PropertyListFormatException; +import com.dd.plist.PropertyListParser; import java.io.File; import java.io.IOException; import java.nio.file.Path; -import java.nio.file.Paths; +import java.text.ParseException; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.List; import java.util.logging.Level; +import javax.xml.parsers.ParserConfigurationException; import org.openide.util.NbBundle.Messages; import org.sleuthkit.autopsy.casemodule.services.FileManager; import org.sleuthkit.autopsy.coreutils.Logger; @@ -39,6 +46,7 @@ import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.BlackboardArtifact; import org.sleuthkit.datamodel.Content; import org.sleuthkit.datamodel.TskCoreException; +import org.xml.sax.SAXException; /** * Extract the bookmarks, cookies, downloads and history from Safari @@ -52,22 +60,28 @@ final class ExtractSafari extends Extract { private static final String HISTORY_QUERY = "SELECT url, title, visit_time + 978307200 as time FROM 'history_items' JOIN history_visits ON history_item = history_items.id;"; //NON-NLS private static final String HISTORY_FILE_NAME = "History.db"; //NON-NLS + private static final String BOOKMARK_FILE_NAME = "Bookmarks.plist"; //NON-NLS private static final String HEAD_URL = "url"; //NON-NLS private static final String HEAD_TITLE = "title"; //NON-NLS private static final String HEAD_TIME = "time"; //NON-NLS + private static final String PLIST_KEY_CHILDREN = "Children"; //NON-NLS + private static final String PLIST_KEY_URL = "URLString"; //NON-NLS + private static final String PLIST_KEY_URI = "URIDictionary"; //NON-NLS + private static final String PLIST_KEY_TITLE = "title"; //NON-NLS + private final Logger logger = Logger.getLogger(this.getClass().getName()); @Messages({ "ExtractSafari_Module_Name=Safari", - "ExtractSafari_Error_Getting_History=An error occurred while processing Safari history files." - }) + "ExtractSafari_Error_Getting_History=An error occurred while processing Safari history files.", + "ExtractSafari_Error_Parsing_Bookmark=An error occured while processing Safari Bookmark files",}) /** - * Extract the bookmarks, cookies, downloads and history from Safari - * - */ + * Extract the bookmarks, cookies, downloads and history from Safari + * + */ ExtractSafari() { } @@ -83,9 +97,17 @@ final class ExtractSafari extends Extract { try { processHistoryDB(dataSource, context); + } catch (IOException | TskCoreException ex) { this.addErrorMessage(Bundle.ExtractSafari_Error_Getting_History()); - logger.log(Level.SEVERE, "Exception thrown while processing history file: " + ex); //NON-NLS + logger.log(Level.SEVERE, "Exception thrown while processing history file: {0}", ex); //NON-NLS + } + + try { + processBookmarkPList(dataSource, context); + } catch (IOException | TskCoreException | SAXException | PropertyListFormatException | ParseException | ParserConfigurationException ex) { + this.addErrorMessage(Bundle.ExtractSafari_Error_Parsing_Bookmark()); + logger.log(Level.SEVERE, "Exception thrown while parsing Safari Bookmarks file: {0}", ex); //NON-NLS } } @@ -116,6 +138,37 @@ final class ExtractSafari extends Extract { } } + /** + * + * @param dataSource + * @param context + * @throws TskCoreException + * @throws IOException + * @throws SAXException + * @throws PropertyListFormatException + * @throws ParseException + * @throws ParserConfigurationException + */ + private void processBookmarkPList(Content dataSource, IngestJobContext context) throws TskCoreException, IOException, SAXException, PropertyListFormatException, ParseException, ParserConfigurationException { + FileManager fileManager = getCurrentCase().getServices().getFileManager(); + + List files = fileManager.findFiles(dataSource, BOOKMARK_FILE_NAME); + + if (files == null || files.isEmpty()) { + return; + } + + this.setFoundData(true); + + for (AbstractFile file : files) { + if (context.dataSourceIngestIsCancelled()) { + break; + } + + getBookmarks(context, file); + } + } + /** * Creates a temporary copy of historyFile and creates a list of * BlackboardArtifacts for the history information in the file. @@ -129,7 +182,7 @@ final class ExtractSafari extends Extract { return; } - File tempHistoryFile = this.createTemporaryFile(context, historyFile); + File tempHistoryFile = createTemporaryFile(context, historyFile); try { ContentUtils.writeToFile(historyFile, tempHistoryFile, context::dataSourceIngestIsCancelled); @@ -149,14 +202,47 @@ final class ExtractSafari extends Extract { } } + /** + * Creates a temporary bookmark file from the AbstractFile and creates + * BlackboardArtifacts for the any bookmarks found. + * + * @param context IngestJobContext object + * @param file AbstractFile from case + * @throws TskCoreException + * @throws IOException + * @throws SAXException + * @throws PropertyListFormatException + * @throws ParseException + * @throws ParserConfigurationException + */ + private void getBookmarks(IngestJobContext context, AbstractFile file) throws TskCoreException, IOException, SAXException, PropertyListFormatException, ParseException, ParserConfigurationException { + if (file.getSize() == 0) { + return; + } + + File tempFile = createTemporaryFile(context, file); + + try { + Collection bbartifacts = getBookmarkArtifacts(file, tempFile); + if (!bbartifacts.isEmpty()) { + services.fireModuleDataEvent(new ModuleDataEvent( + RecentActivityExtracterModuleFactory.getModuleName(), + BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_BOOKMARK, bbartifacts)); + } + } finally { + tempFile.delete(); + } + + } + /** * Queries the history db for the history information creating a list of * BlackBoardArtifact for each row returned from the db. * * @param origFile AbstractFile of the history file from the case * @param tempFilePath Path to temporary copy of the history db - * @return Blackboard Artifacts for the history db or null if there are - * no history artifacts + * @return Blackboard Artifacts for the history db or null if there are no + * history artifacts * @throws TskCoreException */ private Collection getHistoryArtifacts(AbstractFile origFile, Path tempFilePath) throws TskCoreException { @@ -180,4 +266,88 @@ final class ExtractSafari extends Extract { return bbartifacts; } + + /** + * Parses the temporary version of bookmarks.plist and creates + * + * @param origFile The origFile Bookmark.plist file from the case + * @param tempFile The temporary local version of Bookmark.plist + * @return Collection of BlackboardArtifacts for the bookmarks in origFile + * @throws IOException + * @throws PropertyListFormatException + * @throws ParseException + * @throws ParserConfigurationException + * @throws SAXException + * @throws TskCoreException + */ + private Collection getBookmarkArtifacts(AbstractFile origFile, File tempFile) throws IOException, PropertyListFormatException, ParseException, ParserConfigurationException, SAXException, TskCoreException { + Collection bbartifacts = new ArrayList<>(); + + try { + NSDictionary root = (NSDictionary) PropertyListParser.parse(tempFile); + + parseBookmarkDictionary(bbartifacts, origFile, root); + } catch (PropertyListFormatException ex) { + PropertyListFormatException plfe = new PropertyListFormatException(origFile.getName() + ": " + ex.getMessage()); + plfe.setStackTrace(ex.getStackTrace()); + throw plfe; + } catch (ParseException ex) { + ParseException pe = new ParseException(origFile.getName() + ": " + ex.getMessage(), ex.getErrorOffset()); + pe.setStackTrace(ex.getStackTrace()); + throw pe; + } catch (ParserConfigurationException ex) { + ParserConfigurationException pce = new ParserConfigurationException(origFile.getName() + ": " + ex.getMessage()); + pce.setStackTrace(ex.getStackTrace()); + throw pce; + } catch (SAXException ex) { + SAXException se = new SAXException(origFile.getName() + ": " + ex.getMessage()); + se.setStackTrace(ex.getStackTrace()); + throw se; + } + + return bbartifacts; + } + + /** + * Parses the plist object to find the bookmark child objects, then creates + * an artifact with the bookmark information + * + * @param bbartifacts BlackboardArtifact list to add new the artifacts to + * @param origFile The origFile Bookmark.plist file from the case + * @param root NSDictionary object to parse + * @throws TskCoreException + */ + private void parseBookmarkDictionary(Collection bbartifacts, AbstractFile origFile, NSDictionary root) throws TskCoreException { + if (root.containsKey(PLIST_KEY_CHILDREN)) { + NSArray children = (NSArray) root.objectForKey(PLIST_KEY_CHILDREN); + + if (children != null) { + for (NSObject obj : children.getArray()) { + parseBookmarkDictionary(bbartifacts, origFile, (NSDictionary) obj); + } + } + } else if (root.containsKey(PLIST_KEY_URL)) { + String url = null; + String title = null; + + NSString nsstr = (NSString) root.objectForKey(PLIST_KEY_URL); + if (nsstr != null) { + url = nsstr.toString(); + } + + NSDictionary dic = (NSDictionary) root.get(PLIST_KEY_URI); + + nsstr = (NSString) root.objectForKey(PLIST_KEY_TITLE); + + if (nsstr != null) { + title = ((NSString) dic.get(PLIST_KEY_TITLE)).toString(); + } + + if (url != null || title != null) { + BlackboardArtifact bbart = origFile.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_BOOKMARK); + bbart.addAttributes(createBookmarkAttributes(url, title, null, this.getName(), NetworkUtils.extractDomain(url))); + bbartifacts.add(bbart); + } + } + } } From 439c43204939336199f4ea2882bbd212e166327a Mon Sep 17 00:00:00 2001 From: William Schaefer Date: Mon, 25 Feb 2019 13:49:07 -0500 Subject: [PATCH 30/80] 4757 fix bug with target case results showing more cases --- .../datamodel/AbstractSqlEamDb.java | 138 +++++++++++++++++- .../centralrepository/datamodel/EamDb.java | 10 ++ .../InterCaseSearchResultsProcessor.java | 82 ++++++----- 3 files changed, 188 insertions(+), 42 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/AbstractSqlEamDb.java b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/AbstractSqlEamDb.java index bfcbcb6cb5..2987ada263 100644 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/AbstractSqlEamDb.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/AbstractSqlEamDb.java @@ -40,6 +40,7 @@ import java.util.Set; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; import java.util.logging.Level; +import org.openide.util.Exceptions; import org.openide.util.NbBundle.Messages; import org.sleuthkit.autopsy.casemodule.Case; import static org.sleuthkit.autopsy.centralrepository.datamodel.EamDbUtil.updateSchemaVersion; @@ -625,7 +626,7 @@ abstract class AbstractSqlEamDb implements EamDb { // This data source is already in the central repo return eamDataSource; } - + Connection conn = connect(); PreparedStatement preparedStatement = null; @@ -650,7 +651,7 @@ abstract class AbstractSqlEamDb implements EamDb { /* * If nothing was inserted, then return the data source that * exists in the Central Repository. - * + * * This is expected to occur with PostgreSQL Central Repository * databases. */ @@ -675,7 +676,7 @@ abstract class AbstractSqlEamDb implements EamDb { * If an exception was thrown causing us to not return a new data * source, attempt to get an existing data source with the same case * ID and data source object ID. - * + * * This exception block is expected to occur with SQLite Central * Repository databases. */ @@ -1052,6 +1053,74 @@ abstract class AbstractSqlEamDb implements EamDb { } } + /** + * Retrieves eamArtifact instances from the database that are associated + * with the eamArtifactType and eamArtifactValue of the given eamArtifact. + * + * @param aType The type of the artifact + * @param value The correlation value + * + * @return List of artifact instances for a given type/value + * + * @throws EamDbException + */ + public List getArtifactInstancesByTypeValues(CorrelationAttributeInstance.Type aType, List values) throws EamDbException, CorrelationAttributeNormalizationException { + + String tableName = EamDbUtil.correlationTypeToInstanceTableName(aType); + String sql + = "SELECT " + + tableName + + ".id," + + tableName + + ".value," + + tableName + + ".file_obj_id," + + " cases.case_name, cases.case_uid, data_sources.id AS data_source_id, data_sources.name, device_id, file_path, known_status, comment, data_sources.case_id, data_sources.datasource_obj_id, data_sources.md5, data_sources.sha1, data_sources.sha256 FROM " + + tableName + + " LEFT JOIN cases ON " + + tableName + + ".case_id=cases.id" + + " LEFT JOIN data_sources ON " + + tableName + + ".data_source_id=data_sources.id" + + " WHERE value IN ("; + StringBuilder inValuesBuilder = new StringBuilder(sql); + //WJS-TODO use non-stream solution to making statement for proper error handling + for (String value : values) { + if (value != null) { + inValuesBuilder.append("'"); + inValuesBuilder.append(value); + inValuesBuilder.append("',"); + } + } + inValuesBuilder.deleteCharAt(inValuesBuilder.length() - 1); //delete last comma + inValuesBuilder.append(")"); + Connection conn = connect(); + + List artifactInstances = new ArrayList<>(); + + CorrelationAttributeInstance artifactInstance; + PreparedStatement preparedStatement = null; + ResultSet resultSet = null; + + try { + preparedStatement = conn.prepareStatement(sql); + resultSet = preparedStatement.executeQuery(); + while (resultSet.next()) { + artifactInstance = getEamArtifactInstanceFromResultSet(resultSet, aType); + artifactInstances.add(artifactInstance); + } + } catch (SQLException ex) { + throw new EamDbException("Error getting artifact instances by artifactType and artifactValue.", ex); // NON-NLS + } finally { + EamDbUtil.closeStatement(preparedStatement); + EamDbUtil.closeResultSet(resultSet); + EamDbUtil.closeConnection(conn); + } + + return artifactInstances; + } + /** * Retrieves eamArtifact instances from the database that are associated * with the eamArtifactType and eamArtifactValue of the given eamArtifact. @@ -1114,6 +1183,69 @@ abstract class AbstractSqlEamDb implements EamDb { return artifactInstances; } + /** + * Retrieves eamArtifact instances from the database that are associated + * with the eamArtifactType and eamArtifactValue of the given eamArtifact. + * + * @param aType The type of the artifact + * @param value The correlation value + * + * @return List of artifact instances for a given type/value + * + * @throws EamDbException + */ + @Override + public List getArtifactInstancesByTypeValueAndCase(CorrelationAttributeInstance.Type aType, String value, List caseIds) throws EamDbException, CorrelationAttributeNormalizationException { + String tableName = EamDbUtil.correlationTypeToInstanceTableName(aType); + String sql + = "SELECT " + + tableName + + ".id," + + tableName + + ".value," + + tableName + + ".file_obj_id," + + " cases.case_name, cases.case_uid, data_sources.id AS data_source_id, data_sources.name, device_id, file_path, known_status, comment, data_sources.case_id, data_sources.datasource_obj_id, data_sources.md5, data_sources.sha1, data_sources.sha256 FROM " + + tableName + + " LEFT JOIN cases ON " + + tableName + + ".case_id=cases.id" + + " LEFT JOIN data_sources ON " + + tableName + + ".data_source_id=data_sources.id" + + " WHERE value=? and " + + tableName + +".case_id in ('"; + StringBuilder inValuesBuilder = new StringBuilder(sql); + inValuesBuilder.append(caseIds.stream().map(String::valueOf).collect(Collectors.joining("', '"))); + inValuesBuilder.append("')"); + String normalizedValue = CorrelationAttributeNormalizer.normalize(aType, value); + Connection conn = connect(); + List artifactInstances = new ArrayList<>(); + + CorrelationAttributeInstance artifactInstance; + PreparedStatement preparedStatement = null; + ResultSet resultSet = null; + + try { + preparedStatement = conn.prepareStatement(inValuesBuilder.toString()); + preparedStatement.setString(1, normalizedValue); + resultSet = preparedStatement.executeQuery(); + while (resultSet.next()) { + artifactInstance = getEamArtifactInstanceFromResultSet(resultSet, aType); + artifactInstances.add(artifactInstance); + } + } catch (SQLException ex) { + throw new EamDbException("Error getting artifact instances by artifactType and artifactValue.", ex); // NON-NLS + } finally { + EamDbUtil.closeStatement(preparedStatement); + EamDbUtil.closeResultSet(resultSet); + EamDbUtil.closeConnection(conn); + } + + return artifactInstances; + } + /** * Retrieves eamArtifact instances from the database that are associated * with the aType and filePath diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/EamDb.java b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/EamDb.java index a5ef89caa0..110a24e5a1 100755 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/EamDb.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/EamDb.java @@ -284,6 +284,16 @@ public interface EamDb { */ List getArtifactInstancesByTypeValue(CorrelationAttributeInstance.Type aType, String value) throws EamDbException, CorrelationAttributeNormalizationException; + /** + * Retrieves eamArtifact instances from the database that are associated + * with the eamArtifactType and eamArtifactValue of the given eamArtifact. + * + * @param aType EamArtifact.Type to search for + * @param value Value to search for + * + * @return List of artifact instances for a given type/value + */ + List getArtifactInstancesByTypeValueAndCase(CorrelationAttributeInstance.Type aType, String value, List caseIds) throws EamDbException, CorrelationAttributeNormalizationException; /** * Retrieves eamArtifact instances from the database that are associated * with the aType and filePath diff --git a/Core/src/org/sleuthkit/autopsy/commonpropertiessearch/InterCaseSearchResultsProcessor.java b/Core/src/org/sleuthkit/autopsy/commonpropertiessearch/InterCaseSearchResultsProcessor.java index 3dafb6e06e..e4146a15af 100644 --- a/Core/src/org/sleuthkit/autopsy/commonpropertiessearch/InterCaseSearchResultsProcessor.java +++ b/Core/src/org/sleuthkit/autopsy/commonpropertiessearch/InterCaseSearchResultsProcessor.java @@ -20,6 +20,7 @@ package org.sleuthkit.autopsy.commonpropertiessearch; import java.sql.ResultSet; import java.sql.SQLException; +import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; @@ -240,6 +241,11 @@ final class InterCaseSearchResultsProcessor { public void process(ResultSet resultSet) { try { Set values = new HashSet<>(); + List targetCases = new ArrayList<>(); + if (targetCase != 0) { + targetCases.add(caseID); + targetCases.add(targetCase); + } while (resultSet.next()) { String corValue = InstanceTableCallback.getValue(resultSet); if (corValue == null || HashUtility.isNoDataMd5(corValue)) { @@ -248,7 +254,12 @@ final class InterCaseSearchResultsProcessor { values.add(corValue); } for (String corValue : values) { - List instances = EamDb.getInstance().getArtifactInstancesByTypeValue(correlationType, corValue); + List instances; + if (targetCases.isEmpty()) { + instances = EamDb.getInstance().getArtifactInstancesByTypeValue(correlationType, corValue); + } else { + instances = EamDb.getInstance().getArtifactInstancesByTypeValueAndCase(correlationType, corValue, targetCases); + } int size = instances.size(); if (size > 1) { CommonAttributeValue commonAttributeValue = new CommonAttributeValue(corValue); @@ -257,9 +268,7 @@ final class InterCaseSearchResultsProcessor { CentralRepoCommonAttributeInstance searchResult = new CentralRepoCommonAttributeInstance(instance.getID(), correlationType, NODE_TYPE.COUNT_NODE); searchResult.setCurrentAttributeInst(instance); commonAttributeValue.addInstance(searchResult); - if (!anotherCase && ((targetCase == 0 && instance.getCorrelationCase().getID() != caseID) || (targetCase == instance.getCorrelationCase().getID()))) { - anotherCase = true; - } + anotherCase = anotherCase || instance.getCorrelationCase().getID() != caseID; } if (anotherCase) { if (instanceCollatedCommonFiles.containsKey(size)) { @@ -308,6 +317,11 @@ final class InterCaseSearchResultsProcessor { @Override public void process(ResultSet resultSet) { try { + List targetCases = new ArrayList<>(); + if (targetCase != 0) { + targetCases.add(caseID); + targetCases.add(targetCase); + } Set values = new HashSet<>(); while (resultSet.next()) { String corValue = InstanceTableCallback.getValue(resultSet); @@ -317,44 +331,34 @@ final class InterCaseSearchResultsProcessor { values.add(corValue); } for (String corValue : values) { - List instances = EamDb.getInstance().getArtifactInstancesByTypeValue(correlationType, corValue); + List instances; + if (targetCases.isEmpty()) { + instances = EamDb.getInstance().getArtifactInstancesByTypeValue(correlationType, corValue); + } else { + instances = EamDb.getInstance().getArtifactInstancesByTypeValueAndCase(correlationType, corValue, targetCases); + } if (instances.size() > 1) { - boolean addToResults = targetCase == 0; - if (!addToResults) { - for (CorrelationAttributeInstance instance : instances) { - if (instance.getCorrelationCase().getID() == targetCase) { - System.out.println("Target case found in results"); - addToResults = true; - break; - } + for (CorrelationAttributeInstance instance : instances) { + CorrelationCase correlationCase = instance.getCorrelationCase(); + String caseName = correlationCase.getDisplayName(); + CorrelationDataSource correlationDatasource = instance.getCorrelationDataSource(); + //label datasource with it's id for uniqueness done in same manner as ImageGallery does in the DataSourceCell class + String dataSourceNameKey = correlationDatasource.getName() + " (Id: " + correlationDatasource.getDataSourceObjectID() + ")"; + if (!caseCollatedDataSourceCollections.containsKey(caseName)) { + caseCollatedDataSourceCollections.put(caseName, new HashMap<>()); } - } - else { - System.out.println("Target case is not set adding all results"); - } - if (addToResults) { - for (CorrelationAttributeInstance instance : instances) { - CorrelationCase correlationCase = instance.getCorrelationCase(); - String caseName = correlationCase.getDisplayName(); - CorrelationDataSource correlationDatasource = instance.getCorrelationDataSource(); - //label datasource with it's id for uniqueness done in same manner as ImageGallery does in the DataSourceCell class - String dataSourceNameKey = correlationDatasource.getName() + " (Id: " + correlationDatasource.getDataSourceObjectID() + ")"; - if (!caseCollatedDataSourceCollections.containsKey(caseName)) { - caseCollatedDataSourceCollections.put(caseName, new HashMap<>()); - } - Map dataSourceToFile = caseCollatedDataSourceCollections.get(caseName); - if (!dataSourceToFile.containsKey(dataSourceNameKey)) { - dataSourceToFile.put(dataSourceNameKey, new CommonAttributeValueList()); - } - CommonAttributeValueList valueList = dataSourceToFile.get(dataSourceNameKey); - CentralRepoCommonAttributeInstance searchResult = new CentralRepoCommonAttributeInstance(instance.getID(), correlationType, NODE_TYPE.CASE_NODE); - searchResult.setCurrentAttributeInst(instance); - CommonAttributeValue commonAttributeValue = new CommonAttributeValue(corValue); - commonAttributeValue.addInstance(searchResult); - valueList.addMetadataToList(commonAttributeValue); - dataSourceToFile.put(dataSourceNameKey, valueList); - caseCollatedDataSourceCollections.put(caseName, dataSourceToFile); + Map dataSourceToFile = caseCollatedDataSourceCollections.get(caseName); + if (!dataSourceToFile.containsKey(dataSourceNameKey)) { + dataSourceToFile.put(dataSourceNameKey, new CommonAttributeValueList()); } + CommonAttributeValueList valueList = dataSourceToFile.get(dataSourceNameKey); + CentralRepoCommonAttributeInstance searchResult = new CentralRepoCommonAttributeInstance(instance.getID(), correlationType, NODE_TYPE.CASE_NODE); + searchResult.setCurrentAttributeInst(instance); + CommonAttributeValue commonAttributeValue = new CommonAttributeValue(corValue); + commonAttributeValue.addInstance(searchResult); + valueList.addMetadataToList(commonAttributeValue); + dataSourceToFile.put(dataSourceNameKey, valueList); + caseCollatedDataSourceCollections.put(caseName, dataSourceToFile); } } } From aac9ced6123e7a42e61a9fb2ab5e2a6a644bb1d2 Mon Sep 17 00:00:00 2001 From: William Schaefer Date: Mon, 25 Feb 2019 16:37:24 -0500 Subject: [PATCH 31/80] 4757 combine queries made for by case results --- .../datamodel/AbstractSqlEamDb.java | 144 ++++-------------- .../centralrepository/datamodel/EamDb.java | 15 +- .../datamodel/SqliteEamDb.java | 40 ++++- .../InterCaseSearchResultsProcessor.java | 16 +- 4 files changed, 93 insertions(+), 122 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/AbstractSqlEamDb.java b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/AbstractSqlEamDb.java index 2987ada263..26af02e7e2 100644 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/AbstractSqlEamDb.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/AbstractSqlEamDb.java @@ -34,6 +34,7 @@ import java.sql.SQLException; import java.sql.Statement; import java.sql.Types; import java.time.LocalDate; +import java.util.Arrays; import java.util.HashMap; import java.util.Map; import java.util.Set; @@ -1064,8 +1065,28 @@ abstract class AbstractSqlEamDb implements EamDb { * * @throws EamDbException */ + @Override + public List getArtifactInstancesByTypeValue(CorrelationAttributeInstance.Type aType, String value) throws EamDbException, CorrelationAttributeNormalizationException { + return getArtifactInstancesByTypeValues(aType, Arrays.asList(value)); + } + + /** + * Retrieves eamArtifact instances from the database that are associated + * with the eamArtifactType and eamArtifactValue of the given eamArtifact. + * + * @param aType The type of the artifact + * @param value The correlation value + * + * @return List of artifact instances for a given type/value + * + * @throws EamDbException + */ + @Override public List getArtifactInstancesByTypeValues(CorrelationAttributeInstance.Type aType, List values) throws EamDbException, CorrelationAttributeNormalizationException { + return getArtifactInstances(prepareGetInstancesSql(aType, values), aType); + } + private String prepareGetInstancesSql(CorrelationAttributeInstance.Type aType, List values) throws CorrelationAttributeNormalizationException { String tableName = EamDbUtil.correlationTypeToInstanceTableName(aType); String sql = "SELECT " @@ -1085,24 +1106,24 @@ abstract class AbstractSqlEamDb implements EamDb { + ".data_source_id=data_sources.id" + " WHERE value IN ("; StringBuilder inValuesBuilder = new StringBuilder(sql); - //WJS-TODO use non-stream solution to making statement for proper error handling for (String value : values) { if (value != null) { inValuesBuilder.append("'"); - inValuesBuilder.append(value); + inValuesBuilder.append(CorrelationAttributeNormalizer.normalize(aType, value)); inValuesBuilder.append("',"); } } inValuesBuilder.deleteCharAt(inValuesBuilder.length() - 1); //delete last comma inValuesBuilder.append(")"); + return inValuesBuilder.toString(); + } + + private List getArtifactInstances(String sql, CorrelationAttributeInstance.Type aType) throws CorrelationAttributeNormalizationException, EamDbException { Connection conn = connect(); - List artifactInstances = new ArrayList<>(); - CorrelationAttributeInstance artifactInstance; PreparedStatement preparedStatement = null; ResultSet resultSet = null; - try { preparedStatement = conn.prepareStatement(sql); resultSet = preparedStatement.executeQuery(); @@ -1117,7 +1138,6 @@ abstract class AbstractSqlEamDb implements EamDb { EamDbUtil.closeResultSet(resultSet); EamDbUtil.closeConnection(conn); } - return artifactInstances; } @@ -1133,117 +1153,17 @@ abstract class AbstractSqlEamDb implements EamDb { * @throws EamDbException */ @Override - public List getArtifactInstancesByTypeValue(CorrelationAttributeInstance.Type aType, String value) throws EamDbException, CorrelationAttributeNormalizationException { - - String normalizedValue = CorrelationAttributeNormalizer.normalize(aType, value); - - Connection conn = connect(); - - List artifactInstances = new ArrayList<>(); - - CorrelationAttributeInstance artifactInstance; - PreparedStatement preparedStatement = null; - ResultSet resultSet = null; - + public List getArtifactInstancesByTypeValuesAndCases(CorrelationAttributeInstance.Type aType, List values, List caseIds) throws EamDbException, CorrelationAttributeNormalizationException { String tableName = EamDbUtil.correlationTypeToInstanceTableName(aType); String sql - = "SELECT " + = " and " + tableName - + ".id," - + tableName - + ".value," - + tableName - + ".file_obj_id," - + " cases.case_name, cases.case_uid, data_sources.id AS data_source_id, data_sources.name, device_id, file_path, known_status, comment, data_sources.case_id, data_sources.datasource_obj_id, data_sources.md5, data_sources.sha1, data_sources.sha256 FROM " - + tableName - + " LEFT JOIN cases ON " - + tableName - + ".case_id=cases.id" - + " LEFT JOIN data_sources ON " - + tableName - + ".data_source_id=data_sources.id" - + " WHERE value=?"; - - try { - preparedStatement = conn.prepareStatement(sql); - preparedStatement.setString(1, normalizedValue); - resultSet = preparedStatement.executeQuery(); - while (resultSet.next()) { - artifactInstance = getEamArtifactInstanceFromResultSet(resultSet, aType); - artifactInstances.add(artifactInstance); - } - } catch (SQLException ex) { - throw new EamDbException("Error getting artifact instances by artifactType and artifactValue.", ex); // NON-NLS - } finally { - EamDbUtil.closeStatement(preparedStatement); - EamDbUtil.closeResultSet(resultSet); - EamDbUtil.closeConnection(conn); - } - - return artifactInstances; - } - - /** - * Retrieves eamArtifact instances from the database that are associated - * with the eamArtifactType and eamArtifactValue of the given eamArtifact. - * - * @param aType The type of the artifact - * @param value The correlation value - * - * @return List of artifact instances for a given type/value - * - * @throws EamDbException - */ - @Override - public List getArtifactInstancesByTypeValueAndCase(CorrelationAttributeInstance.Type aType, String value, List caseIds) throws EamDbException, CorrelationAttributeNormalizationException { - String tableName = EamDbUtil.correlationTypeToInstanceTableName(aType); - String sql - = "SELECT " - + tableName - + ".id," - + tableName - + ".value," - + tableName - + ".file_obj_id," - + " cases.case_name, cases.case_uid, data_sources.id AS data_source_id, data_sources.name, device_id, file_path, known_status, comment, data_sources.case_id, data_sources.datasource_obj_id, data_sources.md5, data_sources.sha1, data_sources.sha256 FROM " - + tableName - + " LEFT JOIN cases ON " - + tableName - + ".case_id=cases.id" - + " LEFT JOIN data_sources ON " - + tableName - + ".data_source_id=data_sources.id" - + " WHERE value=? and " - + tableName - +".case_id in ('"; - StringBuilder inValuesBuilder = new StringBuilder(sql); + + ".case_id in ('"; + StringBuilder inValuesBuilder = new StringBuilder(prepareGetInstancesSql(aType, values)); + inValuesBuilder.append(sql); inValuesBuilder.append(caseIds.stream().map(String::valueOf).collect(Collectors.joining("', '"))); inValuesBuilder.append("')"); - String normalizedValue = CorrelationAttributeNormalizer.normalize(aType, value); - Connection conn = connect(); - List artifactInstances = new ArrayList<>(); - - CorrelationAttributeInstance artifactInstance; - PreparedStatement preparedStatement = null; - ResultSet resultSet = null; - - try { - preparedStatement = conn.prepareStatement(inValuesBuilder.toString()); - preparedStatement.setString(1, normalizedValue); - resultSet = preparedStatement.executeQuery(); - while (resultSet.next()) { - artifactInstance = getEamArtifactInstanceFromResultSet(resultSet, aType); - artifactInstances.add(artifactInstance); - } - } catch (SQLException ex) { - throw new EamDbException("Error getting artifact instances by artifactType and artifactValue.", ex); // NON-NLS - } finally { - EamDbUtil.closeStatement(preparedStatement); - EamDbUtil.closeResultSet(resultSet); - EamDbUtil.closeConnection(conn); - } - - return artifactInstances; + return getArtifactInstances(inValuesBuilder.toString(), aType); } /** diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/EamDb.java b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/EamDb.java index 110a24e5a1..ce5052d0a9 100755 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/EamDb.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/EamDb.java @@ -282,7 +282,7 @@ public interface EamDb { * * @return List of artifact instances for a given type/value */ - List getArtifactInstancesByTypeValue(CorrelationAttributeInstance.Type aType, String value) throws EamDbException, CorrelationAttributeNormalizationException; + List getArtifactInstancesByTypeValues(CorrelationAttributeInstance.Type aType, List values) throws EamDbException, CorrelationAttributeNormalizationException; /** * Retrieves eamArtifact instances from the database that are associated @@ -293,7 +293,18 @@ public interface EamDb { * * @return List of artifact instances for a given type/value */ - List getArtifactInstancesByTypeValueAndCase(CorrelationAttributeInstance.Type aType, String value, List caseIds) throws EamDbException, CorrelationAttributeNormalizationException; + List getArtifactInstancesByTypeValue(CorrelationAttributeInstance.Type aType, String value) throws EamDbException, CorrelationAttributeNormalizationException; + + /** + * Retrieves eamArtifact instances from the database that are associated + * with the eamArtifactType and eamArtifactValue of the given eamArtifact. + * + * @param aType EamArtifact.Type to search for + * @param value Value to search for + * + * @return List of artifact instances for a given type/value + */ + List getArtifactInstancesByTypeValuesAndCases(CorrelationAttributeInstance.Type aType, List values, List caseIds) throws EamDbException, CorrelationAttributeNormalizationException; /** * Retrieves eamArtifact instances from the database that are associated * with the aType and filePath diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/SqliteEamDb.java b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/SqliteEamDb.java index a9d3b8b46d..aa02a57839 100644 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/SqliteEamDb.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/SqliteEamDb.java @@ -512,7 +512,7 @@ final class SqliteEamDb extends AbstractSqlEamDb { releaseExclusiveLock(); } } - + /** * Retrieves eamArtifact instances from the database that are associated * with the eamArtifactType and eamArtifactValue of the given eamArtifact. @@ -531,7 +531,45 @@ final class SqliteEamDb extends AbstractSqlEamDb { releaseSharedLock(); } } + + /** + * Retrieves eamArtifact instances from the database that are associated + * with the eamArtifactType and eamArtifactValue of the given eamArtifact. + * + * @param aType The type of the artifact + * @param value The correlation value + * + * @return List of artifact instances for a given type/value + */ + @Override + public List getArtifactInstancesByTypeValues(CorrelationAttributeInstance.Type aType, List values) throws EamDbException, CorrelationAttributeNormalizationException { + try { + acquireSharedLock(); + return super.getArtifactInstancesByTypeValues(aType, values); + } finally { + releaseSharedLock(); + } + } + /** + * Retrieves eamArtifact instances from the database that are associated + * with the eamArtifactType and eamArtifactValue of the given eamArtifact. + * + * @param aType The type of the artifact + * @param value The correlation value + * + * @return List of artifact instances for a given type/value + */ + @Override + public List getArtifactInstancesByTypeValuesAndCases(CorrelationAttributeInstance.Type aType, List values, List caseIds) throws EamDbException, CorrelationAttributeNormalizationException { + try { + acquireSharedLock(); + return super.getArtifactInstancesByTypeValuesAndCases(aType, values, caseIds); + } finally { + releaseSharedLock(); + } + } + /** * Retrieves eamArtifact instances from the database that are associated * with the aType and filePath diff --git a/Core/src/org/sleuthkit/autopsy/commonpropertiessearch/InterCaseSearchResultsProcessor.java b/Core/src/org/sleuthkit/autopsy/commonpropertiessearch/InterCaseSearchResultsProcessor.java index e4146a15af..2b4677cfe4 100644 --- a/Core/src/org/sleuthkit/autopsy/commonpropertiessearch/InterCaseSearchResultsProcessor.java +++ b/Core/src/org/sleuthkit/autopsy/commonpropertiessearch/InterCaseSearchResultsProcessor.java @@ -18,9 +18,11 @@ */ package org.sleuthkit.autopsy.commonpropertiessearch; +import com.google.common.collect.Iterables; import java.sql.ResultSet; import java.sql.SQLException; import java.util.ArrayList; +import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; @@ -37,7 +39,6 @@ import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationCase; import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationDataSource; import org.sleuthkit.autopsy.centralrepository.datamodel.EamDb; import org.sleuthkit.autopsy.centralrepository.datamodel.EamDbException; -import org.sleuthkit.autopsy.centralrepository.datamodel.EamDbUtil; import org.sleuthkit.autopsy.centralrepository.datamodel.InstanceTableCallback; import org.sleuthkit.autopsy.commonpropertiessearch.AbstractCommonAttributeInstance.NODE_TYPE; import org.sleuthkit.autopsy.coreutils.Logger; @@ -256,9 +257,9 @@ final class InterCaseSearchResultsProcessor { for (String corValue : values) { List instances; if (targetCases.isEmpty()) { - instances = EamDb.getInstance().getArtifactInstancesByTypeValue(correlationType, corValue); + instances = EamDb.getInstance().getArtifactInstancesByTypeValues(correlationType, Arrays.asList(corValue)); } else { - instances = EamDb.getInstance().getArtifactInstancesByTypeValueAndCase(correlationType, corValue, targetCases); + instances = EamDb.getInstance().getArtifactInstancesByTypeValuesAndCases(correlationType, Arrays.asList(corValue), targetCases); } int size = instances.size(); if (size > 1) { @@ -301,6 +302,7 @@ final class InterCaseSearchResultsProcessor { */ private class InterCaseByCaseCallback implements InstanceTableCallback { + private static final int VALUE_BATCH_SIZE = 500; final Map> caseCollatedDataSourceCollections = new HashMap<>(); private final int caseID; private final int targetCase; @@ -330,12 +332,12 @@ final class InterCaseSearchResultsProcessor { } values.add(corValue); } - for (String corValue : values) { + for (List valuesChunk : Iterables.partition(values, VALUE_BATCH_SIZE)) { List instances; if (targetCases.isEmpty()) { - instances = EamDb.getInstance().getArtifactInstancesByTypeValue(correlationType, corValue); + instances = EamDb.getInstance().getArtifactInstancesByTypeValues(correlationType, valuesChunk); } else { - instances = EamDb.getInstance().getArtifactInstancesByTypeValueAndCase(correlationType, corValue, targetCases); + instances = EamDb.getInstance().getArtifactInstancesByTypeValuesAndCases(correlationType, valuesChunk, targetCases); } if (instances.size() > 1) { for (CorrelationAttributeInstance instance : instances) { @@ -354,7 +356,7 @@ final class InterCaseSearchResultsProcessor { CommonAttributeValueList valueList = dataSourceToFile.get(dataSourceNameKey); CentralRepoCommonAttributeInstance searchResult = new CentralRepoCommonAttributeInstance(instance.getID(), correlationType, NODE_TYPE.CASE_NODE); searchResult.setCurrentAttributeInst(instance); - CommonAttributeValue commonAttributeValue = new CommonAttributeValue(corValue); + CommonAttributeValue commonAttributeValue = new CommonAttributeValue(instance.getCorrelationValue()); commonAttributeValue.addInstance(searchResult); valueList.addMetadataToList(commonAttributeValue); dataSourceToFile.put(dataSourceNameKey, valueList); From 14d4c3da3757f2c3fbc898149b3893053a4ebab4 Mon Sep 17 00:00:00 2001 From: Kelly Kelly Date: Mon, 25 Feb 2019 17:08:07 -0500 Subject: [PATCH 32/80] Safari support for showing download information --- .../autopsy/recentactivity/ExtractSafari.java | 182 +++++++++++++++++- 1 file changed, 178 insertions(+), 4 deletions(-) diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractSafari.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractSafari.java index cc172d56e8..4c448c1ff2 100755 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractSafari.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractSafari.java @@ -19,6 +19,7 @@ package org.sleuthkit.autopsy.recentactivity; import com.dd.plist.NSArray; +import com.dd.plist.NSDate; import com.dd.plist.NSDictionary; import com.dd.plist.NSObject; import com.dd.plist.NSString; @@ -61,6 +62,7 @@ final class ExtractSafari extends Extract { private static final String HISTORY_FILE_NAME = "History.db"; //NON-NLS private static final String BOOKMARK_FILE_NAME = "Bookmarks.plist"; //NON-NLS + private static final String DOWNLOAD_FILE_NAME = "Downloads.plist"; //NON-NLS private static final String HEAD_URL = "url"; //NON-NLS private static final String HEAD_TITLE = "title"; //NON-NLS @@ -70,6 +72,10 @@ final class ExtractSafari extends Extract { private static final String PLIST_KEY_URL = "URLString"; //NON-NLS private static final String PLIST_KEY_URI = "URIDictionary"; //NON-NLS private static final String PLIST_KEY_TITLE = "title"; //NON-NLS + private static final String PLIST_KEY_DOWNLOAD_URL = "DownloadEntryURL"; //NON-NLS + private static final String PLIST_KEY_DOWNLOAD_DATE = "DownloadEntryDateAddedKey"; //NON-NLS + private static final String PLIST_KEY_DOWNLOAD_PATH = "DownloadEntryPath"; //NON-NLS + private static final String PLIST_KEY_DOWNLOAD_HISTORY = "DownloadHistory"; //NON-NLS private final Logger logger = Logger.getLogger(this.getClass().getName()); @@ -109,6 +115,13 @@ final class ExtractSafari extends Extract { this.addErrorMessage(Bundle.ExtractSafari_Error_Parsing_Bookmark()); logger.log(Level.SEVERE, "Exception thrown while parsing Safari Bookmarks file: {0}", ex); //NON-NLS } + + try { + processDownloadsPList(dataSource, context); + } catch (IOException | TskCoreException | SAXException | PropertyListFormatException | ParseException | ParserConfigurationException ex) { + this.addErrorMessage(Bundle.ExtractSafari_Error_Parsing_Bookmark()); + logger.log(Level.SEVERE, "Exception thrown while parsing Safari Bookmarks file: {0}", ex); //NON-NLS + } } /** @@ -127,7 +140,7 @@ final class ExtractSafari extends Extract { return; } - this.setFoundData(true); + setFoundData(true); for (AbstractFile historyFile : historyFiles) { if (context.dataSourceIngestIsCancelled()) { @@ -139,7 +152,7 @@ final class ExtractSafari extends Extract { } /** - * + * Finds all Bookmark.plist files and looks for bookmark entries * @param dataSource * @param context * @throws TskCoreException @@ -158,7 +171,7 @@ final class ExtractSafari extends Extract { return; } - this.setFoundData(true); + setFoundData(true); for (AbstractFile file : files) { if (context.dataSourceIngestIsCancelled()) { @@ -168,6 +181,38 @@ final class ExtractSafari extends Extract { getBookmarks(context, file); } } + + /** + * Process the safari download.plist file. + * + * @param dataSource + * @param context + * @throws TskCoreException + * @throws IOException + * @throws SAXException + * @throws PropertyListFormatException + * @throws ParseException + * @throws ParserConfigurationException + */ + private void processDownloadsPList(Content dataSource, IngestJobContext context) throws TskCoreException, IOException, SAXException, PropertyListFormatException, ParseException, ParserConfigurationException { + FileManager fileManager = getCurrentCase().getServices().getFileManager(); + + List files = fileManager.findFiles(dataSource, DOWNLOAD_FILE_NAME); + + if (files == null || files.isEmpty()) { + return; + } + + setFoundData(true); + + for (AbstractFile file : files) { + if (context.dataSourceIngestIsCancelled()) { + break; + } + + getDownloads(dataSource, context, file); + } + } /** * Creates a temporary copy of historyFile and creates a list of @@ -234,6 +279,39 @@ final class ExtractSafari extends Extract { } } + + /** + * Creates a temporary downloads file from the AbstractFile and creates + * BlackboardArtifacts for the any downloads found. + * + * @param context IngestJobContext object + * @param file AbstractFile from case + * @throws TskCoreException + * @throws IOException + * @throws SAXException + * @throws PropertyListFormatException + * @throws ParseException + * @throws ParserConfigurationException + */ + private void getDownloads(Content dataSource, IngestJobContext context, AbstractFile file) throws TskCoreException, IOException, SAXException, PropertyListFormatException, ParseException, ParserConfigurationException { + if (file.getSize() == 0) { + return; + } + + File tempFile = createTemporaryFile(context, file); + + try { + Collection bbartifacts = getDownloadArtifacts(dataSource, file, tempFile); + if (!bbartifacts.isEmpty()) { + services.fireModuleDataEvent(new ModuleDataEvent( + RecentActivityExtracterModuleFactory.getModuleName(), + BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_DOWNLOAD, bbartifacts)); + } + } finally { + tempFile.delete(); + } + + } /** * Queries the history db for the history information creating a list of @@ -307,6 +385,66 @@ final class ExtractSafari extends Extract { return bbartifacts; } + + /** + * Finds the download entries in the tempFile and creates a list of artifacts from them. + * + * @param origFile Download.plist file from case + * @param tempFile Temporary copy of download.plist file + * @return Collection of BlackboardArtifacts for the downloads in origFile + * @throws IOException + * @throws PropertyListFormatException + * @throws ParseException + * @throws ParserConfigurationException + * @throws SAXException + * @throws TskCoreException + */ + private Collection getDownloadArtifacts(Content dataSource, AbstractFile origFile, File tempFile)throws IOException, PropertyListFormatException, ParseException, ParserConfigurationException, SAXException, TskCoreException { + Collection bbartifacts = null; + + try { + while(true){ + NSDictionary root = (NSDictionary)PropertyListParser.parse(tempFile); + + if(root == null) + break; + + NSArray nsArray = (NSArray)root.get(PLIST_KEY_DOWNLOAD_HISTORY); + + if(nsArray == null) + break; + + NSObject[] objectArray = nsArray.getArray(); + bbartifacts = new ArrayList<>(); + + for(NSObject obj: objectArray){ + if(obj instanceof NSDictionary){ + bbartifacts.add(parseDownloadDictionary(dataSource, origFile, (NSDictionary)obj)); + } + } + break; + } + + } catch (PropertyListFormatException ex) { + PropertyListFormatException plfe = new PropertyListFormatException(origFile.getName() + ": " + ex.getMessage()); + plfe.setStackTrace(ex.getStackTrace()); + throw plfe; + } catch (ParseException ex) { + ParseException pe = new ParseException(origFile.getName() + ": " + ex.getMessage(), ex.getErrorOffset()); + pe.setStackTrace(ex.getStackTrace()); + throw pe; + } catch (ParserConfigurationException ex) { + ParserConfigurationException pce = new ParserConfigurationException(origFile.getName() + ": " + ex.getMessage()); + pce.setStackTrace(ex.getStackTrace()); + throw pce; + } catch (SAXException ex) { + SAXException se = new SAXException(origFile.getName() + ": " + ex.getMessage()); + se.setStackTrace(ex.getStackTrace()); + throw se; + } + + return bbartifacts; + } /** * Parses the plist object to find the bookmark child objects, then creates @@ -345,9 +483,45 @@ final class ExtractSafari extends Extract { if (url != null || title != null) { BlackboardArtifact bbart = origFile.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_BOOKMARK); - bbart.addAttributes(createBookmarkAttributes(url, title, null, this.getName(), NetworkUtils.extractDomain(url))); + bbart.addAttributes(createBookmarkAttributes(url, title, null, getName(), NetworkUtils.extractDomain(url))); bbartifacts.add(bbart); } } } + + /** + * Parse the NSDictionary object that represents one download. + * + * @param origFile Download.plist file from the case + * @param entry One NSDictionary Object that represents one download instance + * @return a Blackboard Artifact for the download. + * @throws TskCoreException + */ + private BlackboardArtifact parseDownloadDictionary(Content dataSource, AbstractFile origFile, NSDictionary entry)throws TskCoreException { + String url = null; + String path = null; + Long time = null; + Long pathID = null; + + NSString nsstring = (NSString)entry.get(PLIST_KEY_DOWNLOAD_URL); + if(nsstring != null){ + url = nsstring.toString(); + } + + nsstring = (NSString)entry.get(PLIST_KEY_DOWNLOAD_PATH); + if(nsstring != null){ + path = nsstring.toString(); + pathID = Util.findID(dataSource, path); + } + + NSDate date = (NSDate)entry.get(PLIST_KEY_DOWNLOAD_DATE); + if(date != null){ + time = date.getDate().getTime(); + } + + BlackboardArtifact bbart = origFile.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_DOWNLOAD); + bbart.addAttributes(this.createDownloadAttributes(path, pathID, url, time, NetworkUtils.extractDomain(url), getName())); + + return bbart; + } } From e16063cf8eaa028a8008e6521bdb107ecebf41d0 Mon Sep 17 00:00:00 2001 From: "U-BASIS\\dgrove" Date: Tue, 26 Feb 2019 01:46:59 -0500 Subject: [PATCH 33/80] New HTML content viewer added. --- .../autopsy/contentviewers/Bundle.properties | 1 + .../autopsy/contentviewers/FileViewer.form | 1 + .../autopsy/contentviewers/FileViewer.java | 5 +- .../autopsy/contentviewers/HtmlPanel.form | 65 +++++++ .../autopsy/contentviewers/HtmlPanel.java | 166 ++++++++++++++++++ .../autopsy/contentviewers/HtmlViewer.form | 40 +++++ .../autopsy/contentviewers/HtmlViewer.java | 130 ++++++++++++++ .../contentviewers/MessageContentViewer.form | 39 +--- .../contentviewers/MessageContentViewer.java | 84 +++------ 9 files changed, 432 insertions(+), 99 deletions(-) create mode 100755 Core/src/org/sleuthkit/autopsy/contentviewers/HtmlPanel.form create mode 100755 Core/src/org/sleuthkit/autopsy/contentviewers/HtmlPanel.java create mode 100755 Core/src/org/sleuthkit/autopsy/contentviewers/HtmlViewer.form create mode 100755 Core/src/org/sleuthkit/autopsy/contentviewers/HtmlViewer.java diff --git a/Core/src/org/sleuthkit/autopsy/contentviewers/Bundle.properties b/Core/src/org/sleuthkit/autopsy/contentviewers/Bundle.properties index e43fc72267..e6b621092f 100644 --- a/Core/src/org/sleuthkit/autopsy/contentviewers/Bundle.properties +++ b/Core/src/org/sleuthkit/autopsy/contentviewers/Bundle.properties @@ -83,3 +83,4 @@ MediaViewImagePanel.zoomResetButton.text=Reset MediaViewImagePanel.zoomTextField.text= MediaViewImagePanel.rotationTextField.text= MediaViewImagePanel.rotateLeftButton.toolTipText= +HtmlPanel.showImagesToggleButton.text=Show Images diff --git a/Core/src/org/sleuthkit/autopsy/contentviewers/FileViewer.form b/Core/src/org/sleuthkit/autopsy/contentviewers/FileViewer.form index d07831cafe..b3a7244a10 100644 --- a/Core/src/org/sleuthkit/autopsy/contentviewers/FileViewer.form +++ b/Core/src/org/sleuthkit/autopsy/contentviewers/FileViewer.form @@ -11,6 +11,7 @@ + diff --git a/Core/src/org/sleuthkit/autopsy/contentviewers/FileViewer.java b/Core/src/org/sleuthkit/autopsy/contentviewers/FileViewer.java index 84f216e253..ab92f3f543 100644 --- a/Core/src/org/sleuthkit/autopsy/contentviewers/FileViewer.java +++ b/Core/src/org/sleuthkit/autopsy/contentviewers/FileViewer.java @@ -1,7 +1,7 @@ /* * Autopsy Forensic Browser * - * Copyright 2018 Basis Technology Corp. + * Copyright 2018-2019 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -49,7 +49,8 @@ public class FileViewer extends javax.swing.JPanel implements DataContentViewer private final FileTypeViewer[] KNOWN_VIEWERS = new FileTypeViewer[]{ new SQLiteViewer(), new PListViewer(), - new MediaFileViewer() + new MediaFileViewer(), + new HtmlViewer() }; private FileTypeViewer lastViewer; diff --git a/Core/src/org/sleuthkit/autopsy/contentviewers/HtmlPanel.form b/Core/src/org/sleuthkit/autopsy/contentviewers/HtmlPanel.form new file mode 100755 index 0000000000..ec9f0fc569 --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/contentviewers/HtmlPanel.form @@ -0,0 +1,65 @@ + + +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
diff --git a/Core/src/org/sleuthkit/autopsy/contentviewers/HtmlPanel.java b/Core/src/org/sleuthkit/autopsy/contentviewers/HtmlPanel.java new file mode 100755 index 0000000000..058848d00e --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/contentviewers/HtmlPanel.java @@ -0,0 +1,166 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2019 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.contentviewers; + +import org.jsoup.Jsoup; +import org.jsoup.nodes.Document; +import org.openide.util.NbBundle.Messages; +import org.sleuthkit.autopsy.coreutils.Logger; + +/** + * A file content viewer for HTML files. + */ +@SuppressWarnings("PMD.SingularField") // UI widgets cause lots of false positives +final class HtmlPanel extends javax.swing.JPanel { + + private static final long serialVersionUID = 1L; + private static final Logger logger = Logger.getLogger(HtmlPanel.class.getName()); + + private String htmlText; + + /** + * Creates new form HtmlViewerPanel + */ + HtmlPanel() { + initComponents(); + + Utilities.configureTextPaneAsHtml(htmlbodyTextPane); + } + + /** + * Set the text pane's HTML text and refresh the view to display it. + * + * @param htmlText The HTML text to be applied to the text pane. + */ + void setHtmlText(String htmlText) { + this.htmlText = htmlText; + refresh(); + } + + /** + * Clear the HTML in the text pane and disable the show/hide button. + */ + void reset() { + htmlbodyTextPane.setText(""); + showImagesToggleButton.setEnabled(false); + } + + /** + * Guarantee the HTML text has 'html' and 'body' tags. + * + * @param htmlText The HTML text + * + * @return The HTML text with the 'html' and 'body' tags applied. + */ + private String wrapInHtmlBody(String htmlText) { + return "" + htmlText + ""; + } + + /** + * Cleans out input HTML string + * + * @param htmlInString The HTML string to cleanse + * + * @return The cleansed HTML String + */ + private String cleanseHTML(String htmlInString) { + + Document doc = Jsoup.parse(htmlInString); + + // Update all 'img' tags. + doc.select("img[src]").forEach(img -> img.attr("src", "")); + + return doc.html(); + } + + /** + * Refresh the panel to reflect the current show/hide images setting. + */ + @Messages({ + "HtmlPanel_showImagesToggleButton_show=Show Images", + "HtmlPanel_showImagesToggleButton_hide=Hide Images" + }) + private void refresh() { + if (false == htmlText.isEmpty()) { + if (showImagesToggleButton.isSelected()) { + showImagesToggleButton.setText(Bundle.HtmlPanel_showImagesToggleButton_hide()); + this.htmlbodyTextPane.setText(wrapInHtmlBody(htmlText)); + } else { + showImagesToggleButton.setText(Bundle.HtmlPanel_showImagesToggleButton_show()); + this.htmlbodyTextPane.setText(wrapInHtmlBody(cleanseHTML(htmlText))); + } + htmlbodyTextPane.setCaretPosition(0); + showImagesToggleButton.setEnabled(true); + } + } + + /** + * This method is called from within the constructor to initialize the form. + * WARNING: Do NOT modify this code. The content of this method is always + * regenerated by the Form Editor. + */ + @SuppressWarnings("unchecked") + // //GEN-BEGIN:initComponents + private void initComponents() { + + htmlScrollPane = new javax.swing.JScrollPane(); + htmlbodyTextPane = new javax.swing.JTextPane(); + showImagesToggleButton = new javax.swing.JToggleButton(); + + htmlScrollPane.setVerticalScrollBarPolicy(javax.swing.ScrollPaneConstants.VERTICAL_SCROLLBAR_ALWAYS); + + htmlbodyTextPane.setEditable(false); + htmlScrollPane.setViewportView(htmlbodyTextPane); + + org.openide.awt.Mnemonics.setLocalizedText(showImagesToggleButton, org.openide.util.NbBundle.getMessage(HtmlPanel.class, "HtmlPanel.showImagesToggleButton.text")); // NOI18N + showImagesToggleButton.addActionListener(new java.awt.event.ActionListener() { + public void actionPerformed(java.awt.event.ActionEvent evt) { + showImagesToggleButtonActionPerformed(evt); + } + }); + + javax.swing.GroupLayout layout = new javax.swing.GroupLayout(this); + this.setLayout(layout); + layout.setHorizontalGroup( + layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) + .addGroup(javax.swing.GroupLayout.Alignment.TRAILING, layout.createSequentialGroup() + .addGap(0, 203, Short.MAX_VALUE) + .addComponent(showImagesToggleButton)) + .addComponent(htmlScrollPane) + ); + layout.setVerticalGroup( + layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) + .addGroup(layout.createSequentialGroup() + .addComponent(showImagesToggleButton) + .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) + .addComponent(htmlScrollPane, javax.swing.GroupLayout.DEFAULT_SIZE, 71, Short.MAX_VALUE)) + ); + }// //GEN-END:initComponents + + private void showImagesToggleButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_showImagesToggleButtonActionPerformed + refresh(); + }//GEN-LAST:event_showImagesToggleButtonActionPerformed + + + // Variables declaration - do not modify//GEN-BEGIN:variables + private javax.swing.JScrollPane htmlScrollPane; + private javax.swing.JTextPane htmlbodyTextPane; + private javax.swing.JToggleButton showImagesToggleButton; + // End of variables declaration//GEN-END:variables +} diff --git a/Core/src/org/sleuthkit/autopsy/contentviewers/HtmlViewer.form b/Core/src/org/sleuthkit/autopsy/contentviewers/HtmlViewer.form new file mode 100755 index 0000000000..a08d9e9b31 --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/contentviewers/HtmlViewer.form @@ -0,0 +1,40 @@ + + +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
diff --git a/Core/src/org/sleuthkit/autopsy/contentviewers/HtmlViewer.java b/Core/src/org/sleuthkit/autopsy/contentviewers/HtmlViewer.java new file mode 100755 index 0000000000..54c69265b7 --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/contentviewers/HtmlViewer.java @@ -0,0 +1,130 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2019 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.contentviewers; + +import java.awt.Component; +import java.awt.Cursor; +import java.util.Arrays; +import java.util.List; +import java.util.logging.Level; +import org.openide.windows.WindowManager; +import org.sleuthkit.autopsy.coreutils.Logger; +import org.sleuthkit.datamodel.AbstractFile; +import org.sleuthkit.datamodel.BlackboardArtifact; +import org.sleuthkit.datamodel.TskCoreException; + +/** + * A file content viewer for HTML files. + */ +@SuppressWarnings("PMD.SingularField") // UI widgets cause lots of false positives +final class HtmlViewer extends javax.swing.JPanel implements FileTypeViewer { + + private static final long serialVersionUID = 1L; + private static final Logger logger = Logger.getLogger(HtmlViewer.class.getName()); + + private static final String[] SUPPORTED_MIMETYPES = new String[]{"text/html"}; + + private BlackboardArtifact artifact; + private AbstractFile abstractFile; + + /** + * Creates new form HtmlViewerPanel + */ + HtmlViewer() { + initComponents(); + } + + /** + * Retrieve the HTML text content from the supplied file. + * + * @param abstractFile The file to read. + * + * @return The text content of the file. + */ + private String getHtmlText(AbstractFile abstractFile) { + try { + int fileSize = (int) abstractFile.getSize(); + byte[] buffer = new byte[fileSize]; + abstractFile.read(buffer, 0, fileSize); + return new String(buffer); + } catch (TskCoreException ex) { + logger.log(Level.SEVERE, String.format("Unable to read from file '%s' (id=%d).", + abstractFile.getName(), abstractFile.getId()), ex); + } + + return null; + } + + /** + * This method is called from within the constructor to initialize the form. + * WARNING: Do NOT modify this code. The content of this method is always + * regenerated by the Form Editor. + */ + @SuppressWarnings("unchecked") + // //GEN-BEGIN:initComponents + private void initComponents() { + + htmlPanel = new org.sleuthkit.autopsy.contentviewers.HtmlPanel(); + + javax.swing.GroupLayout layout = new javax.swing.GroupLayout(this); + this.setLayout(layout); + layout.setHorizontalGroup( + layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) + .addGroup(layout.createSequentialGroup() + .addContainerGap() + .addComponent(htmlPanel, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) + .addContainerGap()) + ); + layout.setVerticalGroup( + layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) + .addGroup(layout.createSequentialGroup() + .addContainerGap() + .addComponent(htmlPanel, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) + .addContainerGap()) + ); + }// //GEN-END:initComponents + + + // Variables declaration - do not modify//GEN-BEGIN:variables + private org.sleuthkit.autopsy.contentviewers.HtmlPanel htmlPanel; + // End of variables declaration//GEN-END:variables + + @Override + public List getSupportedMIMETypes() { + return Arrays.asList(SUPPORTED_MIMETYPES); + } + + @Override + public void setFile(AbstractFile file) { + WindowManager.getDefault().getMainWindow().setCursor(Cursor.getPredefinedCursor(Cursor.WAIT_CURSOR)); + abstractFile = file; + htmlPanel.setHtmlText(getHtmlText(file)); + WindowManager.getDefault().getMainWindow().setCursor(Cursor.getPredefinedCursor(Cursor.DEFAULT_CURSOR)); + } + + @Override + public Component getComponent() { + return this; + } + + @Override + public void resetComponent() { + htmlPanel.reset(); + } +} diff --git a/Core/src/org/sleuthkit/autopsy/contentviewers/MessageContentViewer.form b/Core/src/org/sleuthkit/autopsy/contentviewers/MessageContentViewer.form index c1400964a9..1172483699 100644 --- a/Core/src/org/sleuthkit/autopsy/contentviewers/MessageContentViewer.form +++ b/Core/src/org/sleuthkit/autopsy/contentviewers/MessageContentViewer.form @@ -274,50 +274,17 @@ - - - - - - + - - - - - - + - - - - - - - - - - - - - - - - - - - - - - - - + diff --git a/Core/src/org/sleuthkit/autopsy/contentviewers/MessageContentViewer.java b/Core/src/org/sleuthkit/autopsy/contentviewers/MessageContentViewer.java index ef631eeb02..bd66b35449 100644 --- a/Core/src/org/sleuthkit/autopsy/contentviewers/MessageContentViewer.java +++ b/Core/src/org/sleuthkit/autopsy/contentviewers/MessageContentViewer.java @@ -1,7 +1,7 @@ /* * Autopsy Forensic Browser * - * Copyright 2017-2018 Basis Technology Corp. + * Copyright 2017-2019 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -106,9 +106,12 @@ public class MessageContentViewer extends javax.swing.JPanel implements DataCont attachmentsScrollPane.setViewportView(drp); msgbodyTabbedPane.setEnabledAt(ATTM_TAB_INDEX, true); - textAreas = Arrays.asList(headersTextArea, textbodyTextArea, htmlbodyTextPane, rtfbodyTextPane); + /* + * HTML tab uses the HtmlPanel instead of an internal text pane, so we + * use 'null' for that index. + */ + textAreas = Arrays.asList(headersTextArea, textbodyTextArea, null, rtfbodyTextPane); - Utilities.configureTextPaneAsHtml(htmlbodyTextPane); Utilities.configureTextPaneAsRtf(rtfbodyTextPane); resetComponent(); @@ -150,9 +153,7 @@ public class MessageContentViewer extends javax.swing.JPanel implements DataCont textbodyScrollPane = new javax.swing.JScrollPane(); textbodyTextArea = new javax.swing.JTextArea(); htmlPane = new javax.swing.JPanel(); - htmlScrollPane = new javax.swing.JScrollPane(); - htmlbodyTextPane = new javax.swing.JTextPane(); - showImagesToggleButton = new javax.swing.JToggleButton(); + htmlPanel = new org.sleuthkit.autopsy.contentviewers.HtmlPanel(); rtfbodyScrollPane = new javax.swing.JScrollPane(); rtfbodyTextPane = new javax.swing.JTextPane(); attachmentsPanel = new javax.swing.JPanel(); @@ -265,35 +266,15 @@ public class MessageContentViewer extends javax.swing.JPanel implements DataCont msgbodyTabbedPane.addTab(org.openide.util.NbBundle.getMessage(MessageContentViewer.class, "MessageContentViewer.textbodyScrollPane.TabConstraints.tabTitle"), textbodyScrollPane); // NOI18N - htmlScrollPane.setVerticalScrollBarPolicy(javax.swing.ScrollPaneConstants.VERTICAL_SCROLLBAR_ALWAYS); - - htmlbodyTextPane.setEditable(false); - htmlScrollPane.setViewportView(htmlbodyTextPane); - - org.openide.awt.Mnemonics.setLocalizedText(showImagesToggleButton, "Show Images"); - showImagesToggleButton.addActionListener(new java.awt.event.ActionListener() { - public void actionPerformed(java.awt.event.ActionEvent evt) { - showImagesToggleButtonActionPerformed(evt); - } - }); - javax.swing.GroupLayout htmlPaneLayout = new javax.swing.GroupLayout(htmlPane); htmlPane.setLayout(htmlPaneLayout); htmlPaneLayout.setHorizontalGroup( htmlPaneLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) - .addComponent(htmlScrollPane) - .addGroup(javax.swing.GroupLayout.Alignment.TRAILING, htmlPaneLayout.createSequentialGroup() - .addContainerGap(533, Short.MAX_VALUE) - .addComponent(showImagesToggleButton) - .addGap(3, 3, 3)) + .addComponent(htmlPanel, javax.swing.GroupLayout.DEFAULT_SIZE, 647, Short.MAX_VALUE) ); htmlPaneLayout.setVerticalGroup( htmlPaneLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) - .addGroup(htmlPaneLayout.createSequentialGroup() - .addComponent(showImagesToggleButton) - .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) - .addComponent(htmlScrollPane, javax.swing.GroupLayout.DEFAULT_SIZE, 333, Short.MAX_VALUE) - .addGap(0, 0, 0)) + .addComponent(htmlPanel, javax.swing.GroupLayout.DEFAULT_SIZE, 362, Short.MAX_VALUE) ); msgbodyTabbedPane.addTab(org.openide.util.NbBundle.getMessage(MessageContentViewer.class, "MessageContentViewer.htmlPane.TabConstraints.tabTitle"), htmlPane); // NOI18N @@ -358,26 +339,6 @@ public class MessageContentViewer extends javax.swing.JPanel implements DataCont ); }// //GEN-END:initComponents - @NbBundle.Messages({ - "MessageContentViewer.showImagesToggleButton.hide.text=Hide Images", - "MessageContentViewer.showImagesToggleButton.text=Show Images"}) - private void showImagesToggleButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_showImagesToggleButtonActionPerformed - try { - String htmlText = getAttributeValueSafe(artifact, TSK_EMAIL_CONTENT_HTML); - if (false == htmlText.isEmpty()) { - if (showImagesToggleButton.isSelected()) { - showImagesToggleButton.setText(Bundle.MessageContentViewer_showImagesToggleButton_hide_text()); - this.htmlbodyTextPane.setText(wrapInHtmlBody(htmlText)); - } else { - showImagesToggleButton.setText(Bundle.MessageContentViewer_showImagesToggleButton_text()); - this.htmlbodyTextPane.setText(wrapInHtmlBody(cleanseHTML(htmlText))); - } - } - } catch (TskCoreException ex) { - LOGGER.log(Level.WARNING, "Failed to get attributes for email message.", ex); //NON-NLS - } - }//GEN-LAST:event_showImagesToggleButtonActionPerformed - private void viewInNewWindowButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_viewInNewWindowButtonActionPerformed new NewWindowViewAction("View in new window", drpExplorerManager.getSelectedNodes()[0]).actionPerformed(evt); }//GEN-LAST:event_viewInNewWindowButtonActionPerformed @@ -396,12 +357,10 @@ public class MessageContentViewer extends javax.swing.JPanel implements DataCont private javax.swing.JScrollPane headersScrollPane; private javax.swing.JTextArea headersTextArea; private javax.swing.JPanel htmlPane; - private javax.swing.JScrollPane htmlScrollPane; - private javax.swing.JTextPane htmlbodyTextPane; + private org.sleuthkit.autopsy.contentviewers.HtmlPanel htmlPanel; private javax.swing.JTabbedPane msgbodyTabbedPane; private javax.swing.JScrollPane rtfbodyScrollPane; private javax.swing.JTextPane rtfbodyTextPane; - private javax.swing.JToggleButton showImagesToggleButton; private javax.swing.JLabel subjectLabel; private javax.swing.JLabel subjectText; private javax.swing.JScrollPane textbodyScrollPane; @@ -505,9 +464,9 @@ public class MessageContentViewer extends javax.swing.JPanel implements DataCont headersTextArea.setText(""); rtfbodyTextPane.setText(""); - htmlbodyTextPane.setText(""); + htmlPanel.reset(); //DLG: htmlbodyTextPane.setText(""); textbodyTextArea.setText(""); - showImagesToggleButton.setEnabled(false); + //DLG: showImagesToggleButton.setEnabled(false); msgbodyTabbedPane.setEnabled(false); } @@ -567,12 +526,15 @@ public class MessageContentViewer extends javax.swing.JPanel implements DataCont String attributeText = getAttributeValueSafe(artifact, type); if (index == HTML_TAB_INDEX && StringUtils.isNotBlank(attributeText)) { - //special case for HTML, we need to 'cleanse' it - attributeText = wrapInHtmlBody(cleanseHTML(attributeText)); + htmlPanel.setHtmlText(attributeText); + } else { + JTextComponent textComponent = textAreas.get(index); + if (textComponent != null) { + textComponent.setText(attributeText); + textComponent.setCaretPosition(0); //make sure we start at the top + } } - JTextComponent textComponent = textAreas.get(index); - textComponent.setText(attributeText); - textComponent.setCaretPosition(0); //make sure we start at the top + final boolean hasText = attributeText.length() > 0; msgbodyTabbedPane.setEnabledAt(index, hasText); @@ -613,9 +575,9 @@ public class MessageContentViewer extends javax.swing.JPanel implements DataCont directionText.setEnabled(false); ccLabel.setEnabled(true); - showImagesToggleButton.setEnabled(true); - showImagesToggleButton.setText("Show Images"); - showImagesToggleButton.setSelected(false); + //DLG: showImagesToggleButton.setEnabled(true); + //DLG: showImagesToggleButton.setText("Show Images"); + //DLG: showImagesToggleButton.setSelected(false); try { this.fromText.setText(getAttributeValueSafe(artifact, TSK_EMAIL_FROM)); From 59ddc074daf2adddd28bb7faaf208dcf93678b88 Mon Sep 17 00:00:00 2001 From: "U-BASIS\\dgrove" Date: Tue, 26 Feb 2019 01:51:48 -0500 Subject: [PATCH 34/80] Removed developer messages. --- .../autopsy/contentviewers/MessageContentViewer.java | 7 +------ 1 file changed, 1 insertion(+), 6 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/contentviewers/MessageContentViewer.java b/Core/src/org/sleuthkit/autopsy/contentviewers/MessageContentViewer.java index bd66b35449..00c87b3417 100644 --- a/Core/src/org/sleuthkit/autopsy/contentviewers/MessageContentViewer.java +++ b/Core/src/org/sleuthkit/autopsy/contentviewers/MessageContentViewer.java @@ -464,9 +464,8 @@ public class MessageContentViewer extends javax.swing.JPanel implements DataCont headersTextArea.setText(""); rtfbodyTextPane.setText(""); - htmlPanel.reset(); //DLG: htmlbodyTextPane.setText(""); + htmlPanel.reset(); textbodyTextArea.setText(""); - //DLG: showImagesToggleButton.setEnabled(false); msgbodyTabbedPane.setEnabled(false); } @@ -575,10 +574,6 @@ public class MessageContentViewer extends javax.swing.JPanel implements DataCont directionText.setEnabled(false); ccLabel.setEnabled(true); - //DLG: showImagesToggleButton.setEnabled(true); - //DLG: showImagesToggleButton.setText("Show Images"); - //DLG: showImagesToggleButton.setSelected(false); - try { this.fromText.setText(getAttributeValueSafe(artifact, TSK_EMAIL_FROM)); this.fromText.setToolTipText(getAttributeValueSafe(artifact, TSK_EMAIL_FROM)); From 00cec7d4785fed80fc41bd8aa80916c848ede368 Mon Sep 17 00:00:00 2001 From: "U-BASIS\\dgrove" Date: Tue, 26 Feb 2019 02:05:08 -0500 Subject: [PATCH 35/80] Addressed Codacy issues. --- Core/src/org/sleuthkit/autopsy/contentviewers/HtmlPanel.java | 2 -- Core/src/org/sleuthkit/autopsy/contentviewers/HtmlViewer.java | 4 ---- 2 files changed, 6 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/contentviewers/HtmlPanel.java b/Core/src/org/sleuthkit/autopsy/contentviewers/HtmlPanel.java index 058848d00e..96badc44c3 100755 --- a/Core/src/org/sleuthkit/autopsy/contentviewers/HtmlPanel.java +++ b/Core/src/org/sleuthkit/autopsy/contentviewers/HtmlPanel.java @@ -21,7 +21,6 @@ package org.sleuthkit.autopsy.contentviewers; import org.jsoup.Jsoup; import org.jsoup.nodes.Document; import org.openide.util.NbBundle.Messages; -import org.sleuthkit.autopsy.coreutils.Logger; /** * A file content viewer for HTML files. @@ -30,7 +29,6 @@ import org.sleuthkit.autopsy.coreutils.Logger; final class HtmlPanel extends javax.swing.JPanel { private static final long serialVersionUID = 1L; - private static final Logger logger = Logger.getLogger(HtmlPanel.class.getName()); private String htmlText; diff --git a/Core/src/org/sleuthkit/autopsy/contentviewers/HtmlViewer.java b/Core/src/org/sleuthkit/autopsy/contentviewers/HtmlViewer.java index 54c69265b7..2aa1e41ffd 100755 --- a/Core/src/org/sleuthkit/autopsy/contentviewers/HtmlViewer.java +++ b/Core/src/org/sleuthkit/autopsy/contentviewers/HtmlViewer.java @@ -26,7 +26,6 @@ import java.util.logging.Level; import org.openide.windows.WindowManager; import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.datamodel.AbstractFile; -import org.sleuthkit.datamodel.BlackboardArtifact; import org.sleuthkit.datamodel.TskCoreException; /** @@ -40,9 +39,6 @@ final class HtmlViewer extends javax.swing.JPanel implements FileTypeViewer { private static final String[] SUPPORTED_MIMETYPES = new String[]{"text/html"}; - private BlackboardArtifact artifact; - private AbstractFile abstractFile; - /** * Creates new form HtmlViewerPanel */ From 9009550a8b25d7dc2d7af197230820026aa225f9 Mon Sep 17 00:00:00 2001 From: William Schaefer Date: Tue, 26 Feb 2019 16:26:17 -0500 Subject: [PATCH 36/80] 4757 perform mime type filtering as part of file hash query --- .../AllInterCaseCommonAttributeSearcher.java | 9 +- .../CommonAttributeCaseSearchResults.java | 32 ++----- .../CommonAttributeCountSearchResults.java | 38 +------- .../InterCaseSearchResultsProcessor.java | 89 +++++++++++++------ ...ingleInterCaseCommonAttributeSearcher.java | 12 +-- 5 files changed, 86 insertions(+), 94 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/commonpropertiessearch/AllInterCaseCommonAttributeSearcher.java b/Core/src/org/sleuthkit/autopsy/commonpropertiessearch/AllInterCaseCommonAttributeSearcher.java index e718c01041..e9088dbbb6 100644 --- a/Core/src/org/sleuthkit/autopsy/commonpropertiessearch/AllInterCaseCommonAttributeSearcher.java +++ b/Core/src/org/sleuthkit/autopsy/commonpropertiessearch/AllInterCaseCommonAttributeSearcher.java @@ -56,7 +56,6 @@ public class AllInterCaseCommonAttributeSearcher extends InterCaseCommonAttribut @Override public CommonAttributeCountSearchResults findMatchesByCount() throws TskCoreException, NoCurrentCaseException, SQLException, EamDbException { InterCaseSearchResultsProcessor eamDbAttrInst = new InterCaseSearchResultsProcessor(corAttrType); - Map interCaseCommonFiles = eamDbAttrInst.findInterCaseValuesByCount(Case.getCurrentCase()); Set mimeTypesToFilterOn = new HashSet<>(); if (isFilterByMedia()) { mimeTypesToFilterOn.addAll(MEDIA_PICS_VIDEO_MIME_TYPES); @@ -64,13 +63,14 @@ public class AllInterCaseCommonAttributeSearcher extends InterCaseCommonAttribut if (isFilterByDoc()) { mimeTypesToFilterOn.addAll(TEXT_FILES_MIME_TYPES); } - return new CommonAttributeCountSearchResults(interCaseCommonFiles, this.frequencyPercentageThreshold, this.corAttrType, mimeTypesToFilterOn); + Map interCaseCommonFiles = eamDbAttrInst.findInterCaseValuesByCount(Case.getCurrentCase(), mimeTypesToFilterOn); + + return new CommonAttributeCountSearchResults(interCaseCommonFiles, this.frequencyPercentageThreshold, this.corAttrType); } @Override public CommonAttributeCaseSearchResults findMatchesByCase() throws TskCoreException, NoCurrentCaseException, SQLException, EamDbException { InterCaseSearchResultsProcessor eamDbAttrInst = new InterCaseSearchResultsProcessor(corAttrType); - Map> interCaseCommonFiles = eamDbAttrInst.findInterCaseValuesByCase(Case.getCurrentCase()); Set mimeTypesToFilterOn = new HashSet<>(); if (isFilterByMedia()) { mimeTypesToFilterOn.addAll(MEDIA_PICS_VIDEO_MIME_TYPES); @@ -78,7 +78,8 @@ public class AllInterCaseCommonAttributeSearcher extends InterCaseCommonAttribut if (isFilterByDoc()) { mimeTypesToFilterOn.addAll(TEXT_FILES_MIME_TYPES); } - return new CommonAttributeCaseSearchResults(interCaseCommonFiles, this.frequencyPercentageThreshold, this.corAttrType, mimeTypesToFilterOn); + Map> interCaseCommonFiles = eamDbAttrInst.findInterCaseValuesByCase(Case.getCurrentCase(), mimeTypesToFilterOn); + return new CommonAttributeCaseSearchResults(interCaseCommonFiles, this.frequencyPercentageThreshold, this.corAttrType); } @NbBundle.Messages({ diff --git a/Core/src/org/sleuthkit/autopsy/commonpropertiessearch/CommonAttributeCaseSearchResults.java b/Core/src/org/sleuthkit/autopsy/commonpropertiessearch/CommonAttributeCaseSearchResults.java index 1bd94f4a8a..317c5a26ab 100644 --- a/Core/src/org/sleuthkit/autopsy/commonpropertiessearch/CommonAttributeCaseSearchResults.java +++ b/Core/src/org/sleuthkit/autopsy/commonpropertiessearch/CommonAttributeCaseSearchResults.java @@ -33,7 +33,6 @@ import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationAttributeNor import org.sleuthkit.autopsy.centralrepository.datamodel.EamDb; import org.sleuthkit.autopsy.centralrepository.datamodel.EamDbException; import org.sleuthkit.autopsy.coreutils.Logger; -import org.sleuthkit.datamodel.AbstractFile; /** * Stores the results from the various types of common attribute searching @@ -55,11 +54,9 @@ final public class CommonAttributeCaseSearchResults { * common, value of 0 is disabled * @param resultType The type of Correlation Attribute being * searched for - * @param mimeTypesToFilterOn Set of mime types to include for intercase - * searches */ - CommonAttributeCaseSearchResults(Map> metadata, int percentageThreshold, CorrelationAttributeInstance.Type resultType, Set mimeTypesToFilterOn) { - this.caseNameToDataSources = filterMetadata(metadata, percentageThreshold, resultType.getId(), mimeTypesToFilterOn); + CommonAttributeCaseSearchResults(Map> metadata, int percentageThreshold, CorrelationAttributeInstance.Type resultType) { + this.caseNameToDataSources = filterMetadata(metadata, percentageThreshold, resultType.getId()); } /** @@ -71,7 +68,7 @@ final public class CommonAttributeCaseSearchResults { * common, value of 0 is disabled */ CommonAttributeCaseSearchResults(Map> metadata, int percentageThreshold) { - this.caseNameToDataSources = filterMetadata(metadata, percentageThreshold, CorrelationAttributeInstance.FILES_TYPE_ID, new HashSet<>()); + this.caseNameToDataSources = filterMetadata(metadata, percentageThreshold, CorrelationAttributeInstance.FILES_TYPE_ID); } /** @@ -114,7 +111,7 @@ final public class CommonAttributeCaseSearchResults { * * @return metadata */ - private Map> filterMetadata(Map> metadata, int percentageThreshold, int resultTypeId, Set mimeTypesToFilterOn) { + private Map> filterMetadata(Map> metadata, int percentageThreshold, int resultTypeId) { try { final String currentCaseName; try { @@ -134,7 +131,7 @@ final public class CommonAttributeCaseSearchResults { //Call countUniqueDataSources once to reduce the number of DB queries needed to get the frequencyPercentage Double uniqueCaseDataSourceTuples = EamDb.getInstance().getCountUniqueDataSources().doubleValue(); Map> filteredCaseNameToDataSourcesTree = new HashMap<>(); - Map valuesToKeepCurrentCase = getValuesToKeepFromCurrentCase(currentCaseDataSourceMap, attributeType, percentageThreshold, uniqueCaseDataSourceTuples, mimeTypesToFilterOn); + Map valuesToKeepCurrentCase = getValuesToKeepFromCurrentCase(currentCaseDataSourceMap, attributeType, percentageThreshold, uniqueCaseDataSourceTuples); for (Entry> mapOfDataSources : Collections.unmodifiableMap(metadata).entrySet()) { if (!mapOfDataSources.getKey().equals(currentCaseName)) { //rebuild the metadata structure with items from the current case substituted for their matches in other cases results we want to filter out removed @@ -169,14 +166,14 @@ final public class CommonAttributeCaseSearchResults { * * @throws EamDbException */ - private Map getValuesToKeepFromCurrentCase(Map dataSourceToValueList, CorrelationAttributeInstance.Type attributeType, int maximumPercentageThreshold, Double uniqueCaseDataSourceTuples, Set mimeTypesToFilterOn) throws EamDbException { + private Map getValuesToKeepFromCurrentCase(Map dataSourceToValueList, CorrelationAttributeInstance.Type attributeType, int maximumPercentageThreshold, Double uniqueCaseDataSourceTuples) throws EamDbException { Map valuesToKeep = new HashMap<>(); Set valuesToRemove = new HashSet<>(); for (Entry mapOfValueLists : Collections.unmodifiableMap(dataSourceToValueList).entrySet()) { for (CommonAttributeValue value : mapOfValueLists.getValue().getDelayedMetadataSet()) { if (valuesToRemove.contains(value.getValue())) { //do nothing this value will not be added - } else if (filterValue(attributeType, value, maximumPercentageThreshold, uniqueCaseDataSourceTuples, mimeTypesToFilterOn)) { + } else if (filterValue(attributeType, value, maximumPercentageThreshold, uniqueCaseDataSourceTuples)) { valuesToRemove.add(value.getValue()); } else { valuesToKeep.put(value.getValue(), value); @@ -234,20 +231,7 @@ final public class CommonAttributeCaseSearchResults { * * @throws EamDbException */ - private boolean filterValue(CorrelationAttributeInstance.Type attributeType, CommonAttributeValue value, int maximumPercentageThreshold, Double uniqueCaseDataSourceTuples, Set mimeTypesToInclude) throws EamDbException { - //Intracase common attribute searches will have been created with an empty mimeTypesToInclude list - //because when performing intra case search this filtering will have been done during the query of the case database - if (!mimeTypesToInclude.isEmpty()) { //only do the mime type filtering when mime types aren't empty - for (AbstractCommonAttributeInstance commonAttr : value.getInstances()) { - AbstractFile abstractFile = commonAttr.getAbstractFile(); - if (abstractFile != null) { - String mimeType = abstractFile.getMIMEType(); - if (mimeType != null && !mimeTypesToInclude.contains(mimeType)) { - return true; - } - } - } - } + private boolean filterValue(CorrelationAttributeInstance.Type attributeType, CommonAttributeValue value, int maximumPercentageThreshold, Double uniqueCaseDataSourceTuples) throws EamDbException { if (maximumPercentageThreshold != 0) { //only do the frequency filtering when a max % was set try { Double uniqueTypeValueTuples = EamDb.getInstance().getCountUniqueCaseDataSourceTuplesHavingTypeValue( diff --git a/Core/src/org/sleuthkit/autopsy/commonpropertiessearch/CommonAttributeCountSearchResults.java b/Core/src/org/sleuthkit/autopsy/commonpropertiessearch/CommonAttributeCountSearchResults.java index 15de0dd09a..4845218680 100644 --- a/Core/src/org/sleuthkit/autopsy/commonpropertiessearch/CommonAttributeCountSearchResults.java +++ b/Core/src/org/sleuthkit/autopsy/commonpropertiessearch/CommonAttributeCountSearchResults.java @@ -45,7 +45,6 @@ final public class CommonAttributeCountSearchResults { // maps instance count to list of attribute values. private final Map instanceCountToAttributeValues; - private final Set mimeTypesToInclude; private final int percentageThreshold; private final int resultTypeId; @@ -58,15 +57,13 @@ final public class CommonAttributeCountSearchResults { * common, value of 0 is disabled * @param resultType The type of Correlation Attribute being * searched for - * @param mimeTypesToFilterOn Set of mime types to include for intercase - * searches + */ - CommonAttributeCountSearchResults(Map metadata, int percentageThreshold, CorrelationAttributeInstance.Type resultType, Set mimeTypesToFilterOn) { + CommonAttributeCountSearchResults(Map metadata, int percentageThreshold, CorrelationAttributeInstance.Type resultType) { //wrap in a new object in case any client code has used an unmodifiable collection this.instanceCountToAttributeValues = new HashMap<>(metadata); this.percentageThreshold = percentageThreshold; this.resultTypeId = resultType.getId(); - this.mimeTypesToInclude = mimeTypesToFilterOn; } /** @@ -82,7 +79,6 @@ final public class CommonAttributeCountSearchResults { this.instanceCountToAttributeValues = new HashMap<>(metadata); this.percentageThreshold = percentageThreshold; this.resultTypeId = CorrelationAttributeInstance.FILES_TYPE_ID; - this.mimeTypesToInclude = new HashSet<>(); //don't filter on mimetypes } /** @@ -153,35 +149,7 @@ final public class CommonAttributeCountSearchResults { final CommonAttributeValueList values = listOfValues.getValue(); for (CommonAttributeValue value : values.getDelayedMetadataSet()) { // Need the real metadata - - //Intracase common attribute searches will have been created with an empty mimeTypesToInclude list - //because when performing intra case search this filtering will have been done during the query of the case database - boolean mimeTypeToRemove = false; //allow code to be more efficient by not attempting to remove the same value multiple times - if (!mimeTypesToInclude.isEmpty()) { //only do the mime type filtering when mime types aren't empty - for (AbstractCommonAttributeInstance commonAttr : value.getInstances()) { - AbstractFile abstractFile = commonAttr.getAbstractFile(); - if (abstractFile != null) { - String mimeType = commonAttr.getAbstractFile().getMIMEType(); - if (mimeType != null && !mimeTypesToInclude.contains(mimeType)) { - if (itemsToRemove.containsKey(key)) { - itemsToRemove.get(key).add(value); - } else { - List toRemove = new ArrayList<>(); - toRemove.add(value); - itemsToRemove.put(key, toRemove); - } - //value will be removed as the mime type existed and was not in the set to be included - //because value is removed this value does not need to be checked further - mimeTypeToRemove = true; - break; - } - } - if (mimeTypeToRemove) { - break; - } - } - } - if (!mimeTypeToRemove && maximumPercentageThreshold != 0) { //only do the frequency filtering when a max % was set + if (maximumPercentageThreshold != 0) { //only do the frequency filtering when a max % was set try { Double uniqueTypeValueTuples = eamDb.getCountUniqueCaseDataSourceTuplesHavingTypeValue( attributeType, value.getValue()).doubleValue(); diff --git a/Core/src/org/sleuthkit/autopsy/commonpropertiessearch/InterCaseSearchResultsProcessor.java b/Core/src/org/sleuthkit/autopsy/commonpropertiessearch/InterCaseSearchResultsProcessor.java index 2b4677cfe4..9b3500971f 100644 --- a/Core/src/org/sleuthkit/autopsy/commonpropertiessearch/InterCaseSearchResultsProcessor.java +++ b/Core/src/org/sleuthkit/autopsy/commonpropertiessearch/InterCaseSearchResultsProcessor.java @@ -30,6 +30,8 @@ import java.util.Set; import java.util.List; import java.util.Map; import java.util.logging.Level; +import java.util.stream.Collectors; +import java.util.stream.IntStream; import org.openide.util.Exceptions; import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationAttributeInstance; @@ -42,8 +44,13 @@ import org.sleuthkit.autopsy.centralrepository.datamodel.EamDbException; import org.sleuthkit.autopsy.centralrepository.datamodel.InstanceTableCallback; import org.sleuthkit.autopsy.commonpropertiessearch.AbstractCommonAttributeInstance.NODE_TYPE; import org.sleuthkit.autopsy.coreutils.Logger; +import org.sleuthkit.datamodel.AbstractFile; +import org.sleuthkit.datamodel.BlackboardArtifact; +import org.sleuthkit.datamodel.BlackboardAttribute; +import org.sleuthkit.datamodel.CaseDbAccessManager; import org.sleuthkit.datamodel.TskData; import org.sleuthkit.datamodel.HashUtility; +import org.sleuthkit.datamodel.TskCoreException; /** * Used to process and return CorrelationCase values from the EamDB for @@ -51,13 +58,12 @@ import org.sleuthkit.datamodel.HashUtility; */ final class InterCaseSearchResultsProcessor { + private static final Logger LOGGER = Logger.getLogger(CommonAttributePanel.class.getName()); /** * The CorrelationAttributeInstance.Type this Processor will query on */ private final Type correlationType; - private static final Logger LOGGER = Logger.getLogger(CommonAttributePanel.class.getName()); - /** * The initial CorrelationAttributeInstance ids lookup query. */ @@ -112,6 +118,15 @@ final class InterCaseSearchResultsProcessor { return null; } + private String getFileQuery(Set mimeTypesToFilterOn) throws EamDbException { + String query; + query = "md5 as value from tsk_files where known!=" + TskData.FileKnown.KNOWN.getFileKnownValue() + " AND md5 IS NOT NULL"; + if (!mimeTypesToFilterOn.isEmpty()) { + query = query + " AND mime_type IS NOT NULL AND mime_type IN ('" + String.join("', '", mimeTypesToFilterOn) + "')"; + } + return query; + } + /** * Given the current case, fins all intercase common files from the EamDb * and builds maps of case name to maps of data source name to @@ -122,21 +137,25 @@ final class InterCaseSearchResultsProcessor { * @return map of Case name to Maps of Datasources and their * CommonAttributeValueLists */ - Map> findInterCaseValuesByCase(Case currentCase) { + Map> findInterCaseValuesByCase(Case currentCase, Set mimeTypesToFilterOn) { try { EamDb dbManager = EamDb.getInstance(); - int caseId = dbManager.getCase(currentCase).getID(); InterCaseByCaseCallback instancetableCallback = new InterCaseByCaseCallback(caseId); - dbManager.processInstanceTableWhere(correlationType, String.format(interCaseWhereClause, caseId, - TskData.FileKnown.KNOWN.getFileKnownValue()), - instancetableCallback); - + if (correlationType.getId() == CorrelationAttributeInstance.FILES_TYPE_ID) { + currentCase.getSleuthkitCase().getCaseDbAccessManager().select(getFileQuery(mimeTypesToFilterOn), instancetableCallback); + } else { + dbManager.processInstanceTableWhere(correlationType, String.format(interCaseWhereClause, caseId, + TskData.FileKnown.KNOWN.getFileKnownValue()), + instancetableCallback); + } return instancetableCallback.getInstanceCollatedCommonFiles(); } catch (EamDbException ex) { LOGGER.log(Level.SEVERE, "Error accessing EamDb processing CaseInstancesTable.", ex); + } catch (TskCoreException ex) { + Exceptions.printStackTrace(ex); } return new HashMap<>(); } @@ -147,21 +166,26 @@ final class InterCaseSearchResultsProcessor { * * @param currentCase The current TSK Case. */ - Map findInterCaseValuesByCount(Case currentCase) { + Map findInterCaseValuesByCount(Case currentCase, Set mimeTypesToFilterOn) { try { EamDb dbManager = EamDb.getInstance(); int caseId = dbManager.getCase(currentCase).getID(); InterCaseByCountCallback instancetableCallback = new InterCaseByCountCallback(caseId); - dbManager.processInstanceTableWhere(correlationType, String.format(interCaseWhereClause, caseId, - TskData.FileKnown.KNOWN.getFileKnownValue()), - instancetableCallback); - + if (correlationType.getId() == CorrelationAttributeInstance.FILES_TYPE_ID) { + currentCase.getSleuthkitCase().getCaseDbAccessManager().select(getFileQuery(mimeTypesToFilterOn), instancetableCallback); + } else { + dbManager.processInstanceTableWhere(correlationType, String.format(interCaseWhereClause, caseId, + TskData.FileKnown.KNOWN.getFileKnownValue()), + instancetableCallback); + } return instancetableCallback.getInstanceCollatedCommonFiles(); } catch (EamDbException ex) { LOGGER.log(Level.SEVERE, "Error accessing EamDb processing CaseInstancesTable.", ex); + } catch (TskCoreException ex) { + Exceptions.printStackTrace(ex); } return new HashMap<>(); } @@ -174,17 +198,24 @@ final class InterCaseSearchResultsProcessor { * @param currentCase The current TSK Case. * @param singleCase The case of interest. Matches must exist in this case. */ - Map findSingleInterCaseValuesByCount(Case currentCase, CorrelationCase singleCase) { + Map findSingleInterCaseValuesByCount(Case currentCase, Set mimeTypesToFilterOn, CorrelationCase singleCase) { try { EamDb dbManager = EamDb.getInstance(); int caseId = dbManager.getCase(currentCase).getID(); int targetCaseId = singleCase.getID(); InterCaseByCountCallback instancetableCallback = new InterCaseByCountCallback(caseId, targetCaseId); - dbManager.processInstanceTableWhere(correlationType, String.format(singleInterCaseWhereClause, caseId, - TskData.FileKnown.KNOWN.getFileKnownValue()), instancetableCallback); + if (correlationType.getId() == CorrelationAttributeInstance.FILES_TYPE_ID) { + currentCase.getSleuthkitCase().getCaseDbAccessManager().select(getFileQuery(mimeTypesToFilterOn), instancetableCallback); + } else { + dbManager.processInstanceTableWhere(correlationType, String.format(interCaseWhereClause, caseId, + TskData.FileKnown.KNOWN.getFileKnownValue()), + instancetableCallback); + } return instancetableCallback.getInstanceCollatedCommonFiles(); } catch (EamDbException ex) { LOGGER.log(Level.SEVERE, "Error accessing EamDb processing CaseInstancesTable.", ex); + } catch (TskCoreException ex) { + Exceptions.printStackTrace(ex); } return new HashMap<>(); } @@ -202,30 +233,36 @@ final class InterCaseSearchResultsProcessor { * @param currentCase The current TSK Case. * @param singleCase The case of interest. Matches must exist in this case. */ - Map> findSingleInterCaseValuesByCase(Case currentCase, CorrelationCase singleCase) { + Map> findSingleInterCaseValuesByCase(Case currentCase, Set mimeTypesToFilterOn, CorrelationCase singleCase) { try { EamDb dbManager = EamDb.getInstance(); int caseId = dbManager.getCase(currentCase).getID(); int targetCaseId = singleCase.getID(); InterCaseByCaseCallback instancetableCallback = new InterCaseByCaseCallback(caseId, targetCaseId); - dbManager.processInstanceTableWhere(correlationType, String.format(singleInterCaseWhereClause, caseId, - TskData.FileKnown.KNOWN.getFileKnownValue()), instancetableCallback); - + if (correlationType.getId() == CorrelationAttributeInstance.FILES_TYPE_ID) { + currentCase.getSleuthkitCase().getCaseDbAccessManager().select(getFileQuery(mimeTypesToFilterOn), instancetableCallback); + } else { + dbManager.processInstanceTableWhere(correlationType, String.format(interCaseWhereClause, caseId, + TskData.FileKnown.KNOWN.getFileKnownValue()), + instancetableCallback); + } return instancetableCallback.getInstanceCollatedCommonFiles(); } catch (EamDbException ex) { LOGGER.log(Level.SEVERE, "Error accessing EamDb processing CaseInstancesTable.", ex); + } catch (TskCoreException ex) { + Exceptions.printStackTrace(ex); } return new HashMap<>(); } /** * Callback to use with findInterCaseValuesByCount which generates a list of - * md5s for common files search + * values for common property search */ - private class InterCaseByCountCallback implements InstanceTableCallback { + private class InterCaseByCountCallback implements CaseDbAccessManager.CaseDbAccessQueryCallback, InstanceTableCallback { - final Map instanceCollatedCommonFiles = new HashMap<>(); + private final Map instanceCollatedCommonFiles = new HashMap<>(); private final int caseID; private final int targetCase; @@ -298,12 +335,12 @@ final class InterCaseSearchResultsProcessor { /** * Callback to use with findInterCaseValuesByCount which generates a list of - * md5s for common files search + * values for common property search */ - private class InterCaseByCaseCallback implements InstanceTableCallback { + private class InterCaseByCaseCallback implements CaseDbAccessManager.CaseDbAccessQueryCallback, InstanceTableCallback { private static final int VALUE_BATCH_SIZE = 500; - final Map> caseCollatedDataSourceCollections = new HashMap<>(); + private final Map> caseCollatedDataSourceCollections = new HashMap<>(); private final int caseID; private final int targetCase; diff --git a/Core/src/org/sleuthkit/autopsy/commonpropertiessearch/SingleInterCaseCommonAttributeSearcher.java b/Core/src/org/sleuthkit/autopsy/commonpropertiessearch/SingleInterCaseCommonAttributeSearcher.java index 8129acaf16..e098c1ea3d 100644 --- a/Core/src/org/sleuthkit/autopsy/commonpropertiessearch/SingleInterCaseCommonAttributeSearcher.java +++ b/Core/src/org/sleuthkit/autopsy/commonpropertiessearch/SingleInterCaseCommonAttributeSearcher.java @@ -76,15 +76,16 @@ public class SingleInterCaseCommonAttributeSearcher extends InterCaseCommonAttri CorrelationCase correlationCase = this.getCorrelationCaseFromId(this.corrleationCaseId); this.correlationCaseName = correlationCase.getDisplayName(); InterCaseSearchResultsProcessor eamDbAttrInst = new InterCaseSearchResultsProcessor(this.corAttrType); - Map interCaseCommonFiles = eamDbAttrInst.findSingleInterCaseValuesByCount(Case.getCurrentCase(), correlationCase); - Set mimeTypesToFilterOn = new HashSet<>(); + Set mimeTypesToFilterOn = new HashSet<>(); if (isFilterByMedia()) { mimeTypesToFilterOn.addAll(MEDIA_PICS_VIDEO_MIME_TYPES); } if (isFilterByDoc()) { mimeTypesToFilterOn.addAll(TEXT_FILES_MIME_TYPES); } - return new CommonAttributeCountSearchResults(interCaseCommonFiles, this.frequencyPercentageThreshold, this.corAttrType, mimeTypesToFilterOn); + Map interCaseCommonFiles = eamDbAttrInst.findSingleInterCaseValuesByCount(Case.getCurrentCase(), mimeTypesToFilterOn, correlationCase); + + return new CommonAttributeCountSearchResults(interCaseCommonFiles, this.frequencyPercentageThreshold, this.corAttrType); } /** @@ -104,7 +105,6 @@ public class SingleInterCaseCommonAttributeSearcher extends InterCaseCommonAttri CorrelationCase correlationCase = this.getCorrelationCaseFromId(this.corrleationCaseId); this.correlationCaseName = correlationCase.getDisplayName(); InterCaseSearchResultsProcessor eamDbAttrInst = new InterCaseSearchResultsProcessor(this.corAttrType); - Map> interCaseCommonFiles = eamDbAttrInst.findSingleInterCaseValuesByCase(Case.getCurrentCase(), correlationCase); Set mimeTypesToFilterOn = new HashSet<>(); if (isFilterByMedia()) { mimeTypesToFilterOn.addAll(MEDIA_PICS_VIDEO_MIME_TYPES); @@ -112,7 +112,9 @@ public class SingleInterCaseCommonAttributeSearcher extends InterCaseCommonAttri if (isFilterByDoc()) { mimeTypesToFilterOn.addAll(TEXT_FILES_MIME_TYPES); } - return new CommonAttributeCaseSearchResults(interCaseCommonFiles, this.frequencyPercentageThreshold, this.corAttrType, mimeTypesToFilterOn); + Map> interCaseCommonFiles = eamDbAttrInst.findSingleInterCaseValuesByCase(Case.getCurrentCase(), mimeTypesToFilterOn, correlationCase); + + return new CommonAttributeCaseSearchResults(interCaseCommonFiles, this.frequencyPercentageThreshold, this.corAttrType); } @NbBundle.Messages({ From 5e7fe17d072acf46c84e689fade9b10117bb7f2d Mon Sep 17 00:00:00 2001 From: William Schaefer Date: Tue, 26 Feb 2019 17:33:59 -0500 Subject: [PATCH 37/80] 4757 first pass at clean up for common property search changes --- .../datamodel/AbstractSqlEamDb.java | 93 +++++++------- .../centralrepository/datamodel/EamDb.java | 68 +++++++---- .../datamodel/SqliteEamDb.java | 27 ----- .../AllInterCaseCommonAttributeSearcher.java | 3 +- .../CommonAttributeCaseSearchResults.java | 7 +- .../CommonAttributeCountSearchResults.java | 7 +- .../CommonAttributeValueList.java | 57 ++++----- .../InstanceDataSourceNode.java | 2 +- .../InterCaseSearchResultsProcessor.java | 114 ++++++++---------- ...ingleInterCaseCommonAttributeSearcher.java | 4 +- 10 files changed, 167 insertions(+), 215 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/AbstractSqlEamDb.java b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/AbstractSqlEamDb.java index 26af02e7e2..520908f125 100644 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/AbstractSqlEamDb.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/AbstractSqlEamDb.java @@ -41,7 +41,6 @@ import java.util.Set; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; import java.util.logging.Level; -import org.openide.util.Exceptions; import org.openide.util.NbBundle.Messages; import org.sleuthkit.autopsy.casemodule.Case; import static org.sleuthkit.autopsy.centralrepository.datamodel.EamDbUtil.updateSchemaVersion; @@ -1054,38 +1053,43 @@ abstract class AbstractSqlEamDb implements EamDb { } } - /** - * Retrieves eamArtifact instances from the database that are associated - * with the eamArtifactType and eamArtifactValue of the given eamArtifact. - * - * @param aType The type of the artifact - * @param value The correlation value - * - * @return List of artifact instances for a given type/value - * - * @throws EamDbException - */ @Override public List getArtifactInstancesByTypeValue(CorrelationAttributeInstance.Type aType, String value) throws EamDbException, CorrelationAttributeNormalizationException { return getArtifactInstancesByTypeValues(aType, Arrays.asList(value)); } - - /** - * Retrieves eamArtifact instances from the database that are associated - * with the eamArtifactType and eamArtifactValue of the given eamArtifact. - * - * @param aType The type of the artifact - * @param value The correlation value - * - * @return List of artifact instances for a given type/value - * - * @throws EamDbException - */ + + @Override public List getArtifactInstancesByTypeValues(CorrelationAttributeInstance.Type aType, List values) throws EamDbException, CorrelationAttributeNormalizationException { return getArtifactInstances(prepareGetInstancesSql(aType, values), aType); } - + + @Override + public List getArtifactInstancesByTypeValuesAndCases(CorrelationAttributeInstance.Type aType, List values, List caseIds) throws EamDbException, CorrelationAttributeNormalizationException { + String tableName = EamDbUtil.correlationTypeToInstanceTableName(aType); + String sql + = " and " + + tableName + + ".case_id in ('"; + StringBuilder inValuesBuilder = new StringBuilder(prepareGetInstancesSql(aType, values)); + inValuesBuilder.append(sql); + inValuesBuilder.append(caseIds.stream().map(String::valueOf).collect(Collectors.joining("', '"))); + inValuesBuilder.append("')"); + return getArtifactInstances(inValuesBuilder.toString(), aType); + } + + /** + * Get the select statement for retrieving correlation attribute instances + * from the CR for a given type with values matching the specified values + * + * @param aType The type of the artifact + * @param values The list of correlation values to get + * CorrelationAttributeInstances for + * + * @return the select statement as a String + * + * @throws CorrelationAttributeNormalizationException + */ private String prepareGetInstancesSql(CorrelationAttributeInstance.Type aType, List values) throws CorrelationAttributeNormalizationException { String tableName = EamDbUtil.correlationTypeToInstanceTableName(aType); String sql @@ -1118,6 +1122,20 @@ abstract class AbstractSqlEamDb implements EamDb { return inValuesBuilder.toString(); } + /** + * Retrieves eamArtifact instances from the database that are associated + * with the eamArtifactType and eamArtifactValues of the given eamArtifact. + * + * @param aType The type of the artifact + * @param values The list of correlation values to get + * CorrelationAttributeInstances for + * + * @return List of artifact instances for a given type with the specified + * values + * + * @throws CorrelationAttributeNormalizationException + * @throws EamDbException + */ private List getArtifactInstances(String sql, CorrelationAttributeInstance.Type aType) throws CorrelationAttributeNormalizationException, EamDbException { Connection conn = connect(); List artifactInstances = new ArrayList<>(); @@ -1141,31 +1159,6 @@ abstract class AbstractSqlEamDb implements EamDb { return artifactInstances; } - /** - * Retrieves eamArtifact instances from the database that are associated - * with the eamArtifactType and eamArtifactValue of the given eamArtifact. - * - * @param aType The type of the artifact - * @param value The correlation value - * - * @return List of artifact instances for a given type/value - * - * @throws EamDbException - */ - @Override - public List getArtifactInstancesByTypeValuesAndCases(CorrelationAttributeInstance.Type aType, List values, List caseIds) throws EamDbException, CorrelationAttributeNormalizationException { - String tableName = EamDbUtil.correlationTypeToInstanceTableName(aType); - String sql - = " and " - + tableName - + ".case_id in ('"; - StringBuilder inValuesBuilder = new StringBuilder(prepareGetInstancesSql(aType, values)); - inValuesBuilder.append(sql); - inValuesBuilder.append(caseIds.stream().map(String::valueOf).collect(Collectors.joining("', '"))); - inValuesBuilder.append("')"); - return getArtifactInstances(inValuesBuilder.toString(), aType); - } - /** * Retrieves eamArtifact instances from the database that are associated * with the aType and filePath diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/EamDb.java b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/EamDb.java index ce5052d0a9..1050c01ffc 100755 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/EamDb.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/EamDb.java @@ -24,7 +24,6 @@ import java.util.Set; import org.sleuthkit.datamodel.TskData; import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.coordinationservice.CoordinationService; -import org.sleuthkit.datamodel.CaseDbSchemaVersionNumber; /** * Main interface for interacting with the database @@ -200,27 +199,29 @@ public interface EamDb { * Creates new Data Source in the database * * @param eamDataSource the data source to add - * - * @return - A CorrelationDataSource object with data source's central repository id + * + * @return - A CorrelationDataSource object with data source's central + * repository id */ CorrelationDataSource newDataSource(CorrelationDataSource eamDataSource) throws EamDbException; - + /** * Updates the MD5 hash value in an existing data source in the database. * * @param eamDataSource The data source to update */ void updateDataSourceMd5Hash(CorrelationDataSource eamDataSource) throws EamDbException; - + /** * Updates the SHA-1 hash value in an existing data source in the database. * * @param eamDataSource The data source to update */ void updateDataSourceSha1Hash(CorrelationDataSource eamDataSource) throws EamDbException; - + /** - * Updates the SHA-256 hash value in an existing data source in the database. + * Updates the SHA-256 hash value in an existing data source in the + * database. * * @param eamDataSource The data source to update */ @@ -257,14 +258,14 @@ public interface EamDb { /** * Changes the name of a data source in the DB - * - * @param eamDataSource The data source - * @param newName The new name - * - * @throws EamDbException + * + * @param eamDataSource The data source + * @param newName The new name + * + * @throws EamDbException */ void updateDataSourceName(CorrelationDataSource eamDataSource, String newName) throws EamDbException; - + /** * Inserts new Artifact(s) into the database. Should add associated Case and * Data Source first. @@ -275,12 +276,17 @@ public interface EamDb { /** * Retrieves eamArtifact instances from the database that are associated - * with the eamArtifactType and eamArtifactValue of the given eamArtifact. + * with the eamArtifactType and eamArtifactValues of the given eamArtifact. * - * @param aType EamArtifact.Type to search for - * @param value Value to search for + * @param aType EamArtifact.Type to search for + * @param values The list of correlation values to get + * CorrelationAttributeInstances for * - * @return List of artifact instances for a given type/value + * @return List of artifact instances for a given type with the specified + * values + * + * @throws CorrelationAttributeNormalizationException + * @throws EamDbException */ List getArtifactInstancesByTypeValues(CorrelationAttributeInstance.Type aType, List values) throws EamDbException, CorrelationAttributeNormalizationException; @@ -288,23 +294,35 @@ public interface EamDb { * Retrieves eamArtifact instances from the database that are associated * with the eamArtifactType and eamArtifactValue of the given eamArtifact. * - * @param aType EamArtifact.Type to search for - * @param value Value to search for + * @param aType The type of the artifact + * @param value The correlation value * * @return List of artifact instances for a given type/value + * + * @throws CorrelationAttributeNormalizationException + * @throws EamDbException */ List getArtifactInstancesByTypeValue(CorrelationAttributeInstance.Type aType, String value) throws EamDbException, CorrelationAttributeNormalizationException; - + /** * Retrieves eamArtifact instances from the database that are associated - * with the eamArtifactType and eamArtifactValue of the given eamArtifact. + * with the eamArtifactType and eamArtifactValues of the given eamArtifact + * for the specified cases. * - * @param aType EamArtifact.Type to search for - * @param value Value to search for + * @param aType The type of the artifact + * @param values The list of correlation values to get + * CorrelationAttributeInstances for + * @param caseIds The list of central repository case ids to get + * CorrelationAttributeInstances for * - * @return List of artifact instances for a given type/value + * @return List of artifact instances for a given type with the specified + * values for the specified cases + * + * @throws CorrelationAttributeNormalizationException + * @throws EamDbException */ List getArtifactInstancesByTypeValuesAndCases(CorrelationAttributeInstance.Type aType, List values, List caseIds) throws EamDbException, CorrelationAttributeNormalizationException; + /** * Retrieves eamArtifact instances from the database that are associated * with the aType and filePath @@ -362,7 +380,7 @@ public interface EamDb { * Retrieves number of eamArtifact instances in the database that are * associated with the given data source. * - * @param correlationDataSource Data source to search for + * @param correlationDataSource Data source to search for * * @return Number of artifact instances having caseDisplayName and * dataSource diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/SqliteEamDb.java b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/SqliteEamDb.java index aa02a57839..163d747a4f 100644 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/SqliteEamDb.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/SqliteEamDb.java @@ -513,15 +513,6 @@ final class SqliteEamDb extends AbstractSqlEamDb { } } - /** - * Retrieves eamArtifact instances from the database that are associated - * with the eamArtifactType and eamArtifactValue of the given eamArtifact. - * - * @param aType The type of the artifact - * @param value The correlation value - * - * @return List of artifact instances for a given type/value - */ @Override public List getArtifactInstancesByTypeValue(CorrelationAttributeInstance.Type aType, String value) throws EamDbException, CorrelationAttributeNormalizationException { try { @@ -532,15 +523,6 @@ final class SqliteEamDb extends AbstractSqlEamDb { } } - /** - * Retrieves eamArtifact instances from the database that are associated - * with the eamArtifactType and eamArtifactValue of the given eamArtifact. - * - * @param aType The type of the artifact - * @param value The correlation value - * - * @return List of artifact instances for a given type/value - */ @Override public List getArtifactInstancesByTypeValues(CorrelationAttributeInstance.Type aType, List values) throws EamDbException, CorrelationAttributeNormalizationException { try { @@ -551,15 +533,6 @@ final class SqliteEamDb extends AbstractSqlEamDb { } } - /** - * Retrieves eamArtifact instances from the database that are associated - * with the eamArtifactType and eamArtifactValue of the given eamArtifact. - * - * @param aType The type of the artifact - * @param value The correlation value - * - * @return List of artifact instances for a given type/value - */ @Override public List getArtifactInstancesByTypeValuesAndCases(CorrelationAttributeInstance.Type aType, List values, List caseIds) throws EamDbException, CorrelationAttributeNormalizationException { try { diff --git a/Core/src/org/sleuthkit/autopsy/commonpropertiessearch/AllInterCaseCommonAttributeSearcher.java b/Core/src/org/sleuthkit/autopsy/commonpropertiessearch/AllInterCaseCommonAttributeSearcher.java index e9088dbbb6..ee9c70211f 100644 --- a/Core/src/org/sleuthkit/autopsy/commonpropertiessearch/AllInterCaseCommonAttributeSearcher.java +++ b/Core/src/org/sleuthkit/autopsy/commonpropertiessearch/AllInterCaseCommonAttributeSearcher.java @@ -2,7 +2,7 @@ * * Autopsy Forensic Browser * - * Copyright 2018 Basis Technology Corp. + * Copyright 2018-2019 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -64,7 +64,6 @@ public class AllInterCaseCommonAttributeSearcher extends InterCaseCommonAttribut mimeTypesToFilterOn.addAll(TEXT_FILES_MIME_TYPES); } Map interCaseCommonFiles = eamDbAttrInst.findInterCaseValuesByCount(Case.getCurrentCase(), mimeTypesToFilterOn); - return new CommonAttributeCountSearchResults(interCaseCommonFiles, this.frequencyPercentageThreshold, this.corAttrType); } diff --git a/Core/src/org/sleuthkit/autopsy/commonpropertiessearch/CommonAttributeCaseSearchResults.java b/Core/src/org/sleuthkit/autopsy/commonpropertiessearch/CommonAttributeCaseSearchResults.java index 317c5a26ab..7b927e4e12 100644 --- a/Core/src/org/sleuthkit/autopsy/commonpropertiessearch/CommonAttributeCaseSearchResults.java +++ b/Core/src/org/sleuthkit/autopsy/commonpropertiessearch/CommonAttributeCaseSearchResults.java @@ -2,7 +2,7 @@ * * Autopsy Forensic Browser * - * Copyright 2018 Basis Technology Corp. + * Copyright 2018-2019 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -107,7 +107,6 @@ final public class CommonAttributeCaseSearchResults { * not be more common than * @param resultTypeId the ID of the result type contained in the * metadata - * @param mimeTypesToFilterOn the mimetypes to include in our results * * @return metadata */ @@ -121,7 +120,7 @@ final public class CommonAttributeCaseSearchResults { } Map currentCaseDataSourceMap = metadata.get(currentCaseName); if (currentCaseDataSourceMap == null) { - throw new EamDbException("No data for current case found in results, indicating there are no results and nothing will be filtered"); + return null; } CorrelationAttributeInstance.Type attributeType = CorrelationAttributeInstance .getDefaultCorrelationTypes() @@ -159,7 +158,6 @@ final public class CommonAttributeCaseSearchResults { * should not be more common than * @param uniqueCaseDataSourceTuples the number of unique data sources in * the CR - * @param mimeTypesToFilterOn the mimetypes to include in our results * * @return a map of correlation value to CommonAttributeValue for results * from the current case @@ -223,7 +221,6 @@ final public class CommonAttributeCaseSearchResults { * should not be more common than * @param uniqueCaseDataSourceTuples the number of unique data sources in * the CR - * @param mimeTypesToInclude the mimetypes to include in our results * * @return true if the value should be filtered and removed from what is * shown to the user, false if the value should not be removed and diff --git a/Core/src/org/sleuthkit/autopsy/commonpropertiessearch/CommonAttributeCountSearchResults.java b/Core/src/org/sleuthkit/autopsy/commonpropertiessearch/CommonAttributeCountSearchResults.java index 4845218680..38da9bec48 100644 --- a/Core/src/org/sleuthkit/autopsy/commonpropertiessearch/CommonAttributeCountSearchResults.java +++ b/Core/src/org/sleuthkit/autopsy/commonpropertiessearch/CommonAttributeCountSearchResults.java @@ -2,7 +2,7 @@ * * Autopsy Forensic Browser * - * Copyright 2018 Basis Technology Corp. + * Copyright 2018-2019 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -22,18 +22,15 @@ package org.sleuthkit.autopsy.commonpropertiessearch; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; -import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Map.Entry; -import java.util.Set; import java.util.logging.Level; import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationAttributeInstance; import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationAttributeNormalizationException; import org.sleuthkit.autopsy.centralrepository.datamodel.EamDb; import org.sleuthkit.autopsy.centralrepository.datamodel.EamDbException; import org.sleuthkit.autopsy.coreutils.Logger; -import org.sleuthkit.datamodel.AbstractFile; /** * Stores the results from the various types of common attribute searching @@ -194,7 +191,7 @@ final public class CommonAttributeCountSearchResults { int count = 0; for (CommonAttributeValueList data : this.instanceCountToAttributeValues.values()) { - for (CommonAttributeValue md5 : data.getDelayedMetadataList()) { + for (CommonAttributeValue md5 : data.getDelayedMetadataSet()) { count += md5.getInstanceCount(); } } diff --git a/Core/src/org/sleuthkit/autopsy/commonpropertiessearch/CommonAttributeValueList.java b/Core/src/org/sleuthkit/autopsy/commonpropertiessearch/CommonAttributeValueList.java index 50f29dff13..134d750ace 100644 --- a/Core/src/org/sleuthkit/autopsy/commonpropertiessearch/CommonAttributeValueList.java +++ b/Core/src/org/sleuthkit/autopsy/commonpropertiessearch/CommonAttributeValueList.java @@ -1,16 +1,16 @@ /* - * + * * Autopsy Forensic Browser - * - * Copyright 2018 Basis Technology Corp. + * + * Copyright 2018-2019 Basis Technology Corp. * Contact: carrier sleuthkit org - * + * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -36,10 +36,10 @@ final public class CommonAttributeValueList { * The list of value nodes, which begins empty. */ private final List metadataList; - + /** - * The backing list of value nodes, which will be dynamically loaded - * when requested. + * The backing list of value nodes, which will be dynamically loaded when + * requested. */ private final List delayedMetadataList; @@ -60,44 +60,38 @@ final public class CommonAttributeValueList { } /** - * Get the list of value nodes. Will be empty if - * displayDelayedMetadata() has not been called for the - * parent InstanceCountNode + * Get the list of value nodes. Will be empty if displayDelayedMetadata() + * has not been called for the parent InstanceCountNode + * * @return metadataList the list of nodes */ public List getMetadataList() { return Collections.unmodifiableList(this.metadataList); } - - public Set getMetadataSet() { - return new HashSet<>(this.metadataList); - } - + /** - * Get the delayed list of value nodes. Only use for - * determining how many CommonAttributeValues - * actually exist in the list. - * @return metadataList the list of nodes + * Get the delayed set of value nodes. Only use for determining which values and how many + * CommonAttributeValues actually exist in the list. + * + * @return metadataList the set of nodes */ - List getDelayedMetadataList() { - return Collections.unmodifiableList(this.delayedMetadataList); - } - - Set getDelayedMetadataSet() { + Set getDelayedMetadataSet() { + //Allows nodes to be de-duped return new HashSet<>(this.delayedMetadataList); } - + void removeMetaData(CommonAttributeValue commonVal) { this.delayedMetadataList.remove(commonVal); } - + /** - * Return the size of the backing list, in case - * displayDelayedMetadata() has not be called yet. + * Return the size of the backing list, in case displayDelayedMetadata() has + * not be called yet. + * * @return int the number of matches for this value */ int getCommonAttributeListSize() { - return this.delayedMetadataList.size(); + return this.delayedMetadataList.size(); } /** @@ -113,6 +107,7 @@ final public class CommonAttributeValueList { /** * A a value node to the list, to be loaded later. + * * @param metadata the node to add */ void addMetadataToList(CommonAttributeValue metadata) { diff --git a/Core/src/org/sleuthkit/autopsy/commonpropertiessearch/InstanceDataSourceNode.java b/Core/src/org/sleuthkit/autopsy/commonpropertiessearch/InstanceDataSourceNode.java index 28c08e3c78..fe6b507f7d 100644 --- a/Core/src/org/sleuthkit/autopsy/commonpropertiessearch/InstanceDataSourceNode.java +++ b/Core/src/org/sleuthkit/autopsy/commonpropertiessearch/InstanceDataSourceNode.java @@ -2,7 +2,7 @@ * * Autopsy Forensic Browser * - * Copyright 2018 Basis Technology Corp. + * Copyright 2018-2019 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/Core/src/org/sleuthkit/autopsy/commonpropertiessearch/InterCaseSearchResultsProcessor.java b/Core/src/org/sleuthkit/autopsy/commonpropertiessearch/InterCaseSearchResultsProcessor.java index 9b3500971f..941e95bf6b 100644 --- a/Core/src/org/sleuthkit/autopsy/commonpropertiessearch/InterCaseSearchResultsProcessor.java +++ b/Core/src/org/sleuthkit/autopsy/commonpropertiessearch/InterCaseSearchResultsProcessor.java @@ -1,7 +1,7 @@ /* * Autopsy Forensic Browser * - * Copyright 2018 Basis Technology Corp. + * Copyright 2018-2019 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -30,9 +30,6 @@ import java.util.Set; import java.util.List; import java.util.Map; import java.util.logging.Level; -import java.util.stream.Collectors; -import java.util.stream.IntStream; -import org.openide.util.Exceptions; import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationAttributeInstance; import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationAttributeInstance.Type; @@ -44,9 +41,6 @@ import org.sleuthkit.autopsy.centralrepository.datamodel.EamDbException; import org.sleuthkit.autopsy.centralrepository.datamodel.InstanceTableCallback; import org.sleuthkit.autopsy.commonpropertiessearch.AbstractCommonAttributeInstance.NODE_TYPE; import org.sleuthkit.autopsy.coreutils.Logger; -import org.sleuthkit.datamodel.AbstractFile; -import org.sleuthkit.datamodel.BlackboardArtifact; -import org.sleuthkit.datamodel.BlackboardAttribute; import org.sleuthkit.datamodel.CaseDbAccessManager; import org.sleuthkit.datamodel.TskData; import org.sleuthkit.datamodel.HashUtility; @@ -59,21 +53,12 @@ import org.sleuthkit.datamodel.TskCoreException; final class InterCaseSearchResultsProcessor { private static final Logger LOGGER = Logger.getLogger(CommonAttributePanel.class.getName()); + private static final String INTER_CASE_WHERE_CLAUSE = "case_id=%s AND (known_status !=%s OR known_status IS NULL)"; //NON-NLS /** * The CorrelationAttributeInstance.Type this Processor will query on */ private final Type correlationType; - /** - * The initial CorrelationAttributeInstance ids lookup query. - */ - private final String interCaseWhereClause; - - /** - * The single CorrelationAttributeInstance object retrieval query - */ - private final String singleInterCaseWhereClause; - /** * Used in the InterCaseCommonAttributeSearchers to find common attribute * instances and generate nodes at the UI level. @@ -83,16 +68,6 @@ final class InterCaseSearchResultsProcessor { */ InterCaseSearchResultsProcessor(CorrelationAttributeInstance.Type theType) { this.correlationType = theType; - interCaseWhereClause = getInterCaseWhereClause(); - singleInterCaseWhereClause = getSingleInterCaseWhereClause(); - } - - private String getInterCaseWhereClause() { - return "case_id=%s AND (known_status !=%s OR known_status IS NULL)"; - } - - private String getSingleInterCaseWhereClause() { - return "case_id=%s AND (known_status !=%s OR known_status IS NULL)"; } /** @@ -118,6 +93,17 @@ final class InterCaseSearchResultsProcessor { return null; } + /** + * Get the portion of the select query which will get md5 values for files + * from the current case which are potentially being correlated on. + * + * @param mimeTypesToFilterOn the set of mime types to filter on + * + * @return the portion of a query which follows the SELECT keyword for + * finding MD5s which we are correlating on + * + * @throws EamDbException + */ private String getFileQuery(Set mimeTypesToFilterOn) throws EamDbException { String query; query = "md5 as value from tsk_files where known!=" + TskData.FileKnown.KNOWN.getFileKnownValue() + " AND md5 IS NOT NULL"; @@ -132,7 +118,8 @@ final class InterCaseSearchResultsProcessor { * and builds maps of case name to maps of data source name to * CommonAttributeValueList. * - * @param currentCase The current TSK Case. + * @param currentCase The current TSK Case. + * @param mimeTypesToFilterOn the set of mime types to filter on * * @return map of Case name to Maps of Datasources and their * CommonAttributeValueLists @@ -146,25 +133,26 @@ final class InterCaseSearchResultsProcessor { if (correlationType.getId() == CorrelationAttributeInstance.FILES_TYPE_ID) { currentCase.getSleuthkitCase().getCaseDbAccessManager().select(getFileQuery(mimeTypesToFilterOn), instancetableCallback); } else { - dbManager.processInstanceTableWhere(correlationType, String.format(interCaseWhereClause, caseId, + dbManager.processInstanceTableWhere(correlationType, String.format(INTER_CASE_WHERE_CLAUSE, caseId, TskData.FileKnown.KNOWN.getFileKnownValue()), instancetableCallback); } return instancetableCallback.getInstanceCollatedCommonFiles(); - } catch (EamDbException ex) { + } catch (EamDbException | TskCoreException ex) { LOGGER.log(Level.SEVERE, "Error accessing EamDb processing CaseInstancesTable.", ex); - } catch (TskCoreException ex) { - Exceptions.printStackTrace(ex); - } + } return new HashMap<>(); } /** * Given the current case, fins all intercase common files from the EamDb - * and builds maps of obj id to md5 and case. + * and builds maps of obj id to value and case. * - * @param currentCase The current TSK Case. + * @param currentCase The current TSK Case. + * @param mimeTypesToFilterOn the set of mime types to filter on + * + * @return map of number of instances to CommonAttributeValueLists */ Map findInterCaseValuesByCount(Case currentCase, Set mimeTypesToFilterOn) { try { @@ -176,27 +164,29 @@ final class InterCaseSearchResultsProcessor { if (correlationType.getId() == CorrelationAttributeInstance.FILES_TYPE_ID) { currentCase.getSleuthkitCase().getCaseDbAccessManager().select(getFileQuery(mimeTypesToFilterOn), instancetableCallback); } else { - dbManager.processInstanceTableWhere(correlationType, String.format(interCaseWhereClause, caseId, + dbManager.processInstanceTableWhere(correlationType, String.format(INTER_CASE_WHERE_CLAUSE, caseId, TskData.FileKnown.KNOWN.getFileKnownValue()), instancetableCallback); } return instancetableCallback.getInstanceCollatedCommonFiles(); - } catch (EamDbException ex) { + } catch (EamDbException | TskCoreException ex) { LOGGER.log(Level.SEVERE, "Error accessing EamDb processing CaseInstancesTable.", ex); - } catch (TskCoreException ex) { - Exceptions.printStackTrace(ex); - } + } return new HashMap<>(); } /** * Given the current case, and a specific case of interest, finds common * files which exist between cases from the EamDb. Builds maps of obj id to - * md5 and case. + * value and case. * - * @param currentCase The current TSK Case. - * @param singleCase The case of interest. Matches must exist in this case. + * @param currentCase The current TSK Case. + * @param mimeTypesToFilterOn the set of mime types to filter on + * @param singleCase The case of interest. Matches must exist in + * this case. + * + * @return map of number of instances to CommonAttributeValueLists */ Map findSingleInterCaseValuesByCount(Case currentCase, Set mimeTypesToFilterOn, CorrelationCase singleCase) { try { @@ -207,15 +197,13 @@ final class InterCaseSearchResultsProcessor { if (correlationType.getId() == CorrelationAttributeInstance.FILES_TYPE_ID) { currentCase.getSleuthkitCase().getCaseDbAccessManager().select(getFileQuery(mimeTypesToFilterOn), instancetableCallback); } else { - dbManager.processInstanceTableWhere(correlationType, String.format(interCaseWhereClause, caseId, + dbManager.processInstanceTableWhere(correlationType, String.format(INTER_CASE_WHERE_CLAUSE, caseId, TskData.FileKnown.KNOWN.getFileKnownValue()), instancetableCallback); } return instancetableCallback.getInstanceCollatedCommonFiles(); - } catch (EamDbException ex) { + } catch (EamDbException | TskCoreException ex) { LOGGER.log(Level.SEVERE, "Error accessing EamDb processing CaseInstancesTable.", ex); - } catch (TskCoreException ex) { - Exceptions.printStackTrace(ex); } return new HashMap<>(); } @@ -225,13 +213,13 @@ final class InterCaseSearchResultsProcessor { * files which exist between cases from the EamDb. Builds map of case name * to maps of data source name to CommonAttributeValueList. * - * @param currentCase The current TSK Case. + * @param currentCase The current TSK Case. + * @param mimeTypesToFilterOn the set of mime types to filter on + * @param singleCase The case of interest. Matches must exist in + * this case. * * @return map of Case name to Maps of Datasources and their * CommonAttributeValueLists - * - * @param currentCase The current TSK Case. - * @param singleCase The case of interest. Matches must exist in this case. */ Map> findSingleInterCaseValuesByCase(Case currentCase, Set mimeTypesToFilterOn, CorrelationCase singleCase) { try { @@ -243,16 +231,14 @@ final class InterCaseSearchResultsProcessor { if (correlationType.getId() == CorrelationAttributeInstance.FILES_TYPE_ID) { currentCase.getSleuthkitCase().getCaseDbAccessManager().select(getFileQuery(mimeTypesToFilterOn), instancetableCallback); } else { - dbManager.processInstanceTableWhere(correlationType, String.format(interCaseWhereClause, caseId, + dbManager.processInstanceTableWhere(correlationType, String.format(INTER_CASE_WHERE_CLAUSE, caseId, TskData.FileKnown.KNOWN.getFileKnownValue()), instancetableCallback); } return instancetableCallback.getInstanceCollatedCommonFiles(); - } catch (EamDbException ex) { + } catch (EamDbException | TskCoreException ex) { LOGGER.log(Level.SEVERE, "Error accessing EamDb processing CaseInstancesTable.", ex); - } catch (TskCoreException ex) { - Exceptions.printStackTrace(ex); - } + } return new HashMap<>(); } @@ -319,13 +305,9 @@ final class InterCaseSearchResultsProcessor { } } } - } catch (SQLException ex) { + } catch (SQLException | EamDbException | CorrelationAttributeNormalizationException ex) { LOGGER.log(Level.WARNING, "Error getting artifact instances from database.", ex); // NON-NLS - } catch (EamDbException ex) { - Exceptions.printStackTrace(ex); - } catch (CorrelationAttributeNormalizationException ex) { - Exceptions.printStackTrace(ex); - } + } } Map getInstanceCollatedCommonFiles() { @@ -334,7 +316,7 @@ final class InterCaseSearchResultsProcessor { } /** - * Callback to use with findInterCaseValuesByCount which generates a list of + * Callback to use with findInterCaseValuesByCase which generates a map of maps of * values for common property search */ private class InterCaseByCaseCallback implements CaseDbAccessManager.CaseDbAccessQueryCallback, InstanceTableCallback { @@ -401,11 +383,9 @@ final class InterCaseSearchResultsProcessor { } } } - } catch (EamDbException | SQLException ex) { + } catch (EamDbException | SQLException | CorrelationAttributeNormalizationException ex) { LOGGER.log(Level.WARNING, "Error getting artifact instances from database.", ex); // NON-NLS - } catch (CorrelationAttributeNormalizationException ex) { - Exceptions.printStackTrace(ex); - } + } } Map> getInstanceCollatedCommonFiles() { diff --git a/Core/src/org/sleuthkit/autopsy/commonpropertiessearch/SingleInterCaseCommonAttributeSearcher.java b/Core/src/org/sleuthkit/autopsy/commonpropertiessearch/SingleInterCaseCommonAttributeSearcher.java index e098c1ea3d..8d0cca8412 100644 --- a/Core/src/org/sleuthkit/autopsy/commonpropertiessearch/SingleInterCaseCommonAttributeSearcher.java +++ b/Core/src/org/sleuthkit/autopsy/commonpropertiessearch/SingleInterCaseCommonAttributeSearcher.java @@ -2,7 +2,7 @@ * * Autopsy Forensic Browser * - * Copyright 2018 Basis Technology Corp. + * Copyright 2018-2019 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -76,7 +76,7 @@ public class SingleInterCaseCommonAttributeSearcher extends InterCaseCommonAttri CorrelationCase correlationCase = this.getCorrelationCaseFromId(this.corrleationCaseId); this.correlationCaseName = correlationCase.getDisplayName(); InterCaseSearchResultsProcessor eamDbAttrInst = new InterCaseSearchResultsProcessor(this.corAttrType); - Set mimeTypesToFilterOn = new HashSet<>(); + Set mimeTypesToFilterOn = new HashSet<>(); if (isFilterByMedia()) { mimeTypesToFilterOn.addAll(MEDIA_PICS_VIDEO_MIME_TYPES); } From 8c6d035d49cca8d83a53f9ab359f33cf8f3ac83f Mon Sep 17 00:00:00 2001 From: William Schaefer Date: Tue, 26 Feb 2019 17:36:42 -0500 Subject: [PATCH 38/80] 4757 auto format of modified files --- .../datamodel/AbstractSqlEamDb.java | 5 ++-- .../datamodel/SqliteEamDb.java | 29 ++++++++++--------- .../CommonAttributeCountSearchResults.java | 2 +- .../CommonAttributeValueList.java | 4 +-- .../InstanceDataSourceNode.java | 3 +- .../InterCaseSearchResultsProcessor.java | 14 ++++----- 6 files changed, 28 insertions(+), 29 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/AbstractSqlEamDb.java b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/AbstractSqlEamDb.java index 520908f125..df30f3f5f8 100644 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/AbstractSqlEamDb.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/AbstractSqlEamDb.java @@ -1058,12 +1058,11 @@ abstract class AbstractSqlEamDb implements EamDb { return getArtifactInstancesByTypeValues(aType, Arrays.asList(value)); } - @Override public List getArtifactInstancesByTypeValues(CorrelationAttributeInstance.Type aType, List values) throws EamDbException, CorrelationAttributeNormalizationException { return getArtifactInstances(prepareGetInstancesSql(aType, values), aType); } - + @Override public List getArtifactInstancesByTypeValuesAndCases(CorrelationAttributeInstance.Type aType, List values, List caseIds) throws EamDbException, CorrelationAttributeNormalizationException { String tableName = EamDbUtil.correlationTypeToInstanceTableName(aType); @@ -1077,7 +1076,7 @@ abstract class AbstractSqlEamDb implements EamDb { inValuesBuilder.append("')"); return getArtifactInstances(inValuesBuilder.toString(), aType); } - + /** * Get the select statement for retrieving correlation attribute instances * from the CR for a given type with values matching the specified values diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/SqliteEamDb.java b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/SqliteEamDb.java index 163d747a4f..9e486855bb 100644 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/SqliteEamDb.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/SqliteEamDb.java @@ -275,7 +275,7 @@ final class SqliteEamDb extends AbstractSqlEamDb { } @Override - public void addDataSourceObjectId(int rowId, long dataSourceObjectId) throws EamDbException{ + public void addDataSourceObjectId(int rowId, long dataSourceObjectId) throws EamDbException { try { acquireExclusiveLock(); super.addDataSourceObjectId(rowId, dataSourceObjectId); @@ -433,14 +433,14 @@ final class SqliteEamDb extends AbstractSqlEamDb { releaseSharedLock(); } } - + /** * Changes the name of a data source in the DB - * - * @param eamDataSource The data source - * @param newName The new name - * - * @throws EamDbException + * + * @param eamDataSource The data source + * @param newName The new name + * + * @throws EamDbException */ @Override public void updateDataSourceName(CorrelationDataSource eamDataSource, String newName) throws EamDbException { @@ -451,7 +451,7 @@ final class SqliteEamDb extends AbstractSqlEamDb { releaseExclusiveLock(); } } - + /** * Updates the MD5 hash value in an existing data source in the database. * @@ -466,7 +466,7 @@ final class SqliteEamDb extends AbstractSqlEamDb { releaseExclusiveLock(); } } - + /** * Updates the SHA-1 hash value in an existing data source in the database. * @@ -481,9 +481,10 @@ final class SqliteEamDb extends AbstractSqlEamDb { releaseExclusiveLock(); } } - + /** - * Updates the SHA-256 hash value in an existing data source in the database. + * Updates the SHA-256 hash value in an existing data source in the + * database. * * @param eamDataSource The data source to update */ @@ -512,7 +513,7 @@ final class SqliteEamDb extends AbstractSqlEamDb { releaseExclusiveLock(); } } - + @Override public List getArtifactInstancesByTypeValue(CorrelationAttributeInstance.Type aType, String value) throws EamDbException, CorrelationAttributeNormalizationException { try { @@ -522,7 +523,7 @@ final class SqliteEamDb extends AbstractSqlEamDb { releaseSharedLock(); } } - + @Override public List getArtifactInstancesByTypeValues(CorrelationAttributeInstance.Type aType, List values) throws EamDbException, CorrelationAttributeNormalizationException { try { @@ -542,7 +543,7 @@ final class SqliteEamDb extends AbstractSqlEamDb { releaseSharedLock(); } } - + /** * Retrieves eamArtifact instances from the database that are associated * with the aType and filePath diff --git a/Core/src/org/sleuthkit/autopsy/commonpropertiessearch/CommonAttributeCountSearchResults.java b/Core/src/org/sleuthkit/autopsy/commonpropertiessearch/CommonAttributeCountSearchResults.java index 38da9bec48..c1d2dcb481 100644 --- a/Core/src/org/sleuthkit/autopsy/commonpropertiessearch/CommonAttributeCountSearchResults.java +++ b/Core/src/org/sleuthkit/autopsy/commonpropertiessearch/CommonAttributeCountSearchResults.java @@ -54,7 +54,7 @@ final public class CommonAttributeCountSearchResults { * common, value of 0 is disabled * @param resultType The type of Correlation Attribute being * searched for - + * */ CommonAttributeCountSearchResults(Map metadata, int percentageThreshold, CorrelationAttributeInstance.Type resultType) { //wrap in a new object in case any client code has used an unmodifiable collection diff --git a/Core/src/org/sleuthkit/autopsy/commonpropertiessearch/CommonAttributeValueList.java b/Core/src/org/sleuthkit/autopsy/commonpropertiessearch/CommonAttributeValueList.java index 134d750ace..20ad09c797 100644 --- a/Core/src/org/sleuthkit/autopsy/commonpropertiessearch/CommonAttributeValueList.java +++ b/Core/src/org/sleuthkit/autopsy/commonpropertiessearch/CommonAttributeValueList.java @@ -70,8 +70,8 @@ final public class CommonAttributeValueList { } /** - * Get the delayed set of value nodes. Only use for determining which values and how many - * CommonAttributeValues actually exist in the list. + * Get the delayed set of value nodes. Only use for determining which values + * and how many CommonAttributeValues actually exist in the list. * * @return metadataList the set of nodes */ diff --git a/Core/src/org/sleuthkit/autopsy/commonpropertiessearch/InstanceDataSourceNode.java b/Core/src/org/sleuthkit/autopsy/commonpropertiessearch/InstanceDataSourceNode.java index fe6b507f7d..8451ec140d 100644 --- a/Core/src/org/sleuthkit/autopsy/commonpropertiessearch/InstanceDataSourceNode.java +++ b/Core/src/org/sleuthkit/autopsy/commonpropertiessearch/InstanceDataSourceNode.java @@ -110,8 +110,7 @@ public final class InstanceDataSourceNode extends DisplayableItemNode { } /** - * ChildFactory which builds DisplayableItem from the metadata data - * sources. + * ChildFactory which builds DisplayableItem from the metadata data sources. */ static class FileInstanceNodeFactory extends ChildFactory { diff --git a/Core/src/org/sleuthkit/autopsy/commonpropertiessearch/InterCaseSearchResultsProcessor.java b/Core/src/org/sleuthkit/autopsy/commonpropertiessearch/InterCaseSearchResultsProcessor.java index 941e95bf6b..3f2187aa5b 100644 --- a/Core/src/org/sleuthkit/autopsy/commonpropertiessearch/InterCaseSearchResultsProcessor.java +++ b/Core/src/org/sleuthkit/autopsy/commonpropertiessearch/InterCaseSearchResultsProcessor.java @@ -141,7 +141,7 @@ final class InterCaseSearchResultsProcessor { } catch (EamDbException | TskCoreException ex) { LOGGER.log(Level.SEVERE, "Error accessing EamDb processing CaseInstancesTable.", ex); - } + } return new HashMap<>(); } @@ -172,7 +172,7 @@ final class InterCaseSearchResultsProcessor { } catch (EamDbException | TskCoreException ex) { LOGGER.log(Level.SEVERE, "Error accessing EamDb processing CaseInstancesTable.", ex); - } + } return new HashMap<>(); } @@ -238,7 +238,7 @@ final class InterCaseSearchResultsProcessor { return instancetableCallback.getInstanceCollatedCommonFiles(); } catch (EamDbException | TskCoreException ex) { LOGGER.log(Level.SEVERE, "Error accessing EamDb processing CaseInstancesTable.", ex); - } + } return new HashMap<>(); } @@ -307,7 +307,7 @@ final class InterCaseSearchResultsProcessor { } } catch (SQLException | EamDbException | CorrelationAttributeNormalizationException ex) { LOGGER.log(Level.WARNING, "Error getting artifact instances from database.", ex); // NON-NLS - } + } } Map getInstanceCollatedCommonFiles() { @@ -316,8 +316,8 @@ final class InterCaseSearchResultsProcessor { } /** - * Callback to use with findInterCaseValuesByCase which generates a map of maps of - * values for common property search + * Callback to use with findInterCaseValuesByCase which generates a map of + * maps of values for common property search */ private class InterCaseByCaseCallback implements CaseDbAccessManager.CaseDbAccessQueryCallback, InstanceTableCallback { @@ -385,7 +385,7 @@ final class InterCaseSearchResultsProcessor { } } catch (EamDbException | SQLException | CorrelationAttributeNormalizationException ex) { LOGGER.log(Level.WARNING, "Error getting artifact instances from database.", ex); // NON-NLS - } + } } Map> getInstanceCollatedCommonFiles() { From 0b2ccb260881179909bc9508e6545056999248e2 Mon Sep 17 00:00:00 2001 From: William Schaefer Date: Tue, 26 Feb 2019 17:50:16 -0500 Subject: [PATCH 39/80] 4757 fix pottential NPE when no results are present --- .../CommonAttributeCaseSearchResults.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/commonpropertiessearch/CommonAttributeCaseSearchResults.java b/Core/src/org/sleuthkit/autopsy/commonpropertiessearch/CommonAttributeCaseSearchResults.java index 7b927e4e12..300006daf6 100644 --- a/Core/src/org/sleuthkit/autopsy/commonpropertiessearch/CommonAttributeCaseSearchResults.java +++ b/Core/src/org/sleuthkit/autopsy/commonpropertiessearch/CommonAttributeCaseSearchResults.java @@ -119,8 +119,8 @@ final public class CommonAttributeCaseSearchResults { throw new EamDbException("Unable to get current case while performing filtering", ex); } Map currentCaseDataSourceMap = metadata.get(currentCaseName); - if (currentCaseDataSourceMap == null) { - return null; + if (currentCaseDataSourceMap == null) { //there are no results + return new HashMap<>(); } CorrelationAttributeInstance.Type attributeType = CorrelationAttributeInstance .getDefaultCorrelationTypes() From e26ce3139bee97b5ef8dbc57671e66faa5f9416c Mon Sep 17 00:00:00 2001 From: William Schaefer Date: Tue, 26 Feb 2019 17:55:55 -0500 Subject: [PATCH 40/80] 4757 add non-nls comment to string wither query code --- .../InterCaseSearchResultsProcessor.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/commonpropertiessearch/InterCaseSearchResultsProcessor.java b/Core/src/org/sleuthkit/autopsy/commonpropertiessearch/InterCaseSearchResultsProcessor.java index 3f2187aa5b..52f2cec276 100644 --- a/Core/src/org/sleuthkit/autopsy/commonpropertiessearch/InterCaseSearchResultsProcessor.java +++ b/Core/src/org/sleuthkit/autopsy/commonpropertiessearch/InterCaseSearchResultsProcessor.java @@ -106,9 +106,9 @@ final class InterCaseSearchResultsProcessor { */ private String getFileQuery(Set mimeTypesToFilterOn) throws EamDbException { String query; - query = "md5 as value from tsk_files where known!=" + TskData.FileKnown.KNOWN.getFileKnownValue() + " AND md5 IS NOT NULL"; + query = "md5 AS value FROM tsk_files WHERE known!=" + TskData.FileKnown.KNOWN.getFileKnownValue() + " AND md5 IS NOT NULL"; //NON-NLS if (!mimeTypesToFilterOn.isEmpty()) { - query = query + " AND mime_type IS NOT NULL AND mime_type IN ('" + String.join("', '", mimeTypesToFilterOn) + "')"; + query = query + " AND mime_type IS NOT NULL AND mime_type IN ('" + String.join("', '", mimeTypesToFilterOn) + "')"; //NON-NLS } return query; } From e65c135975399f274abcb21389fcfc889442e0dc Mon Sep 17 00:00:00 2001 From: William Schaefer Date: Tue, 26 Feb 2019 18:17:43 -0500 Subject: [PATCH 41/80] 4757 fix display of results when everything has been filtered out --- .../CommonAttributeCaseSearchResults.java | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/commonpropertiessearch/CommonAttributeCaseSearchResults.java b/Core/src/org/sleuthkit/autopsy/commonpropertiessearch/CommonAttributeCaseSearchResults.java index 300006daf6..3ff7d40070 100644 --- a/Core/src/org/sleuthkit/autopsy/commonpropertiessearch/CommonAttributeCaseSearchResults.java +++ b/Core/src/org/sleuthkit/autopsy/commonpropertiessearch/CommonAttributeCaseSearchResults.java @@ -119,8 +119,9 @@ final public class CommonAttributeCaseSearchResults { throw new EamDbException("Unable to get current case while performing filtering", ex); } Map currentCaseDataSourceMap = metadata.get(currentCaseName); + Map> filteredCaseNameToDataSourcesTree = new HashMap<>(); if (currentCaseDataSourceMap == null) { //there are no results - return new HashMap<>(); + return filteredCaseNameToDataSourcesTree; } CorrelationAttributeInstance.Type attributeType = CorrelationAttributeInstance .getDefaultCorrelationTypes() @@ -129,13 +130,14 @@ final public class CommonAttributeCaseSearchResults { .findFirst().get(); //Call countUniqueDataSources once to reduce the number of DB queries needed to get the frequencyPercentage Double uniqueCaseDataSourceTuples = EamDb.getInstance().getCountUniqueDataSources().doubleValue(); - Map> filteredCaseNameToDataSourcesTree = new HashMap<>(); Map valuesToKeepCurrentCase = getValuesToKeepFromCurrentCase(currentCaseDataSourceMap, attributeType, percentageThreshold, uniqueCaseDataSourceTuples); for (Entry> mapOfDataSources : Collections.unmodifiableMap(metadata).entrySet()) { if (!mapOfDataSources.getKey().equals(currentCaseName)) { //rebuild the metadata structure with items from the current case substituted for their matches in other cases results we want to filter out removed Map newTreeForCase = createTreeForCase(valuesToKeepCurrentCase, mapOfDataSources.getValue()); - filteredCaseNameToDataSourcesTree.put(mapOfDataSources.getKey(), newTreeForCase); + if (!newTreeForCase.isEmpty()) { + filteredCaseNameToDataSourcesTree.put(mapOfDataSources.getKey(), newTreeForCase); + } } } return filteredCaseNameToDataSourcesTree; From 0dd1a8a521eef984ca73e43e83ba9defe4b751c6 Mon Sep 17 00:00:00 2001 From: William Schaefer Date: Wed, 27 Feb 2019 13:43:34 -0500 Subject: [PATCH 42/80] 4626 make constants static, update copyright date --- .../datamodel/PostgresEamDbSettings.java | 28 +++++++++---------- .../datamodel/SqliteEamDbSettings.java | 28 +++++++++---------- 2 files changed, 28 insertions(+), 28 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/PostgresEamDbSettings.java b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/PostgresEamDbSettings.java index c72cc5f58f..71f23f4317 100644 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/PostgresEamDbSettings.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/PostgresEamDbSettings.java @@ -1,7 +1,7 @@ /* * Central Repository * - * Copyright 2015-2017 Basis Technology Corp. + * Copyright 2015-2019 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -43,16 +43,16 @@ import static org.sleuthkit.autopsy.centralrepository.datamodel.AbstractSqlEamDb public final class PostgresEamDbSettings { private final static Logger LOGGER = Logger.getLogger(PostgresEamDbSettings.class.getName()); - private final String DEFAULT_HOST = ""; // NON-NLS - private final int DEFAULT_PORT = 5432; - private final String DEFAULT_DBNAME = "central_repository"; // NON-NLS - private final String DEFAULT_USERNAME = ""; - private final String DEFAULT_PASSWORD = ""; - private final String VALIDATION_QUERY = "SELECT version()"; // NON-NLS - private final String JDBC_BASE_URI = "jdbc:postgresql://"; // NON-NLS - private final String JDBC_DRIVER = "org.postgresql.Driver"; // NON-NLS - private final String DB_NAMES_REGEX = "[a-z][a-z0-9_]*"; // only lower case - private final String DB_USER_NAMES_REGEX = "[a-zA-Z]\\w*"; + private final static String DEFAULT_HOST = ""; // NON-NLS + private final static int DEFAULT_PORT = 5432; + private final static String DEFAULT_DBNAME = "central_repository"; // NON-NLS + private final static String DEFAULT_USERNAME = ""; + private final static String DEFAULT_PASSWORD = ""; + private final static String VALIDATION_QUERY = "SELECT version()"; // NON-NLS + private final static String JDBC_BASE_URI = "jdbc:postgresql://"; // NON-NLS + private final static String JDBC_DRIVER = "org.postgresql.Driver"; // NON-NLS + private final static String DB_NAMES_REGEX = "[a-z][a-z0-9_]*"; // only lower case + private final static String DB_USER_NAMES_REGEX = "[a-zA-Z]\\w*"; private String host; private int port; private String dbName; @@ -425,7 +425,7 @@ public final class PostgresEamDbSettings { stmt.execute(createDataSourcesTable.toString()); stmt.execute(dataSourceIdx1); stmt.execute(dataSourceIdx2); - + stmt.execute(createReferenceSetsTable.toString()); stmt.execute(referenceSetsIdx1); @@ -561,8 +561,8 @@ public final class PostgresEamDbSettings { * instance table. %s will exist in the template where the name of the new * table will be addedd. * - * @return a String which is a template for adding an index to the file_obj_id - * column of a _instances table + * @return a String which is a template for adding an index to the + * file_obj_id column of a _instances table */ static String getAddObjectIdIndexTemplate() { // Each "%s" will be replaced with the relevant TYPE_instances table name. diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/SqliteEamDbSettings.java b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/SqliteEamDbSettings.java index 4baa423a04..784c93df33 100644 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/SqliteEamDbSettings.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/SqliteEamDbSettings.java @@ -1,7 +1,7 @@ /* * Central Repository * - * Copyright 2015-2017 Basis Technology Corp. + * Copyright 2015-2019 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -43,19 +43,19 @@ import static org.sleuthkit.autopsy.centralrepository.datamodel.AbstractSqlEamDb public final class SqliteEamDbSettings { private final static Logger LOGGER = Logger.getLogger(SqliteEamDbSettings.class.getName()); - private final String DEFAULT_DBNAME = "central_repository.db"; // NON-NLS - private final String DEFAULT_DBDIRECTORY = PlatformUtil.getUserDirectory() + File.separator + "central_repository"; // NON-NLS - private final String JDBC_DRIVER = "org.sqlite.JDBC"; // NON-NLS - private final String JDBC_BASE_URI = "jdbc:sqlite:"; // NON-NLS - private final String VALIDATION_QUERY = "SELECT count(*) from sqlite_master"; // NON-NLS - private static final String PRAGMA_SYNC_OFF = "PRAGMA synchronous = OFF"; - private static final String PRAGMA_SYNC_NORMAL = "PRAGMA synchronous = NORMAL"; - private static final String PRAGMA_JOURNAL_WAL = "PRAGMA journal_mode = WAL"; - private static final String PRAGMA_READ_UNCOMMITTED_TRUE = "PRAGMA read_uncommitted = True"; - private static final String PRAGMA_ENCODING_UTF8 = "PRAGMA encoding = 'UTF-8'"; - private static final String PRAGMA_PAGE_SIZE_4096 = "PRAGMA page_size = 4096"; - private static final String PRAGMA_FOREIGN_KEYS_ON = "PRAGMA foreign_keys = ON"; - private final String DB_NAMES_REGEX = "[a-z][a-z0-9_]*(\\.db)?"; + private final static String DEFAULT_DBNAME = "central_repository.db"; // NON-NLS + private final static String DEFAULT_DBDIRECTORY = PlatformUtil.getUserDirectory() + File.separator + "central_repository"; // NON-NLS + private final static String JDBC_DRIVER = "org.sqlite.JDBC"; // NON-NLS + private final static String JDBC_BASE_URI = "jdbc:sqlite:"; // NON-NLS + private final static String VALIDATION_QUERY = "SELECT count(*) from sqlite_master"; // NON-NLS + private final static String PRAGMA_SYNC_OFF = "PRAGMA synchronous = OFF"; + private final static String PRAGMA_SYNC_NORMAL = "PRAGMA synchronous = NORMAL"; + private final static String PRAGMA_JOURNAL_WAL = "PRAGMA journal_mode = WAL"; + private final static String PRAGMA_READ_UNCOMMITTED_TRUE = "PRAGMA read_uncommitted = True"; + private final static String PRAGMA_ENCODING_UTF8 = "PRAGMA encoding = 'UTF-8'"; + private final static String PRAGMA_PAGE_SIZE_4096 = "PRAGMA page_size = 4096"; + private final static String PRAGMA_FOREIGN_KEYS_ON = "PRAGMA foreign_keys = ON"; + private final static String DB_NAMES_REGEX = "[a-z][a-z0-9_]*(\\.db)?"; private String dbName; private String dbDirectory; private int bulkThreshold; From d800c28c1b328f9fc0589c020832c430e7d822cb Mon Sep 17 00:00:00 2001 From: William Schaefer Date: Wed, 27 Feb 2019 13:46:32 -0500 Subject: [PATCH 43/80] 4626 change unique constraint on cr data sources table for newly created CRs --- .../centralrepository/datamodel/PostgresEamDbSettings.java | 2 +- .../centralrepository/datamodel/SqliteEamDbSettings.java | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/PostgresEamDbSettings.java b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/PostgresEamDbSettings.java index 71f23f4317..b7c84979b0 100644 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/PostgresEamDbSettings.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/PostgresEamDbSettings.java @@ -350,7 +350,7 @@ public final class PostgresEamDbSettings { createDataSourcesTable.append("sha1 text DEFAULT NULL,"); createDataSourcesTable.append("sha256 text DEFAULT NULL,"); createDataSourcesTable.append("foreign key (case_id) references cases(id) ON UPDATE SET NULL ON DELETE SET NULL,"); - createDataSourcesTable.append("CONSTRAINT datasource_unique UNIQUE (case_id, device_id, name)"); + createDataSourcesTable.append("CONSTRAINT datasource_unique UNIQUE (case_id, device_id, name, datasource_obj_id)"); createDataSourcesTable.append(")"); String dataSourceIdx1 = "CREATE INDEX IF NOT EXISTS data_sources_name ON data_sources (name)"; diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/SqliteEamDbSettings.java b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/SqliteEamDbSettings.java index 784c93df33..53e9966104 100644 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/SqliteEamDbSettings.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/SqliteEamDbSettings.java @@ -293,7 +293,7 @@ public final class SqliteEamDbSettings { createDataSourcesTable.append("sha1 text DEFAULT NULL,"); createDataSourcesTable.append("sha256 text DEFAULT NULL,"); createDataSourcesTable.append("foreign key (case_id) references cases(id) ON UPDATE SET NULL ON DELETE SET NULL,"); - createDataSourcesTable.append("CONSTRAINT datasource_unique UNIQUE (case_id, device_id, name)"); + createDataSourcesTable.append("CONSTRAINT datasource_unique UNIQUE (case_id, device_id, name, datasource_obj_id)"); createDataSourcesTable.append(")"); String dataSourceIdx1 = "CREATE INDEX IF NOT EXISTS data_sources_name ON data_sources (name)"; From be304fc01e313675cceb72c5c876c106204e661a Mon Sep 17 00:00:00 2001 From: "U-BASIS\\dgrove" Date: Wed, 27 Feb 2019 14:05:30 -0500 Subject: [PATCH 44/80] Removed invalid line. --- Core/src/org/sleuthkit/autopsy/contentviewers/HtmlViewer.java | 1 - 1 file changed, 1 deletion(-) diff --git a/Core/src/org/sleuthkit/autopsy/contentviewers/HtmlViewer.java b/Core/src/org/sleuthkit/autopsy/contentviewers/HtmlViewer.java index 2aa1e41ffd..657c2aad38 100755 --- a/Core/src/org/sleuthkit/autopsy/contentviewers/HtmlViewer.java +++ b/Core/src/org/sleuthkit/autopsy/contentviewers/HtmlViewer.java @@ -109,7 +109,6 @@ final class HtmlViewer extends javax.swing.JPanel implements FileTypeViewer { @Override public void setFile(AbstractFile file) { WindowManager.getDefault().getMainWindow().setCursor(Cursor.getPredefinedCursor(Cursor.WAIT_CURSOR)); - abstractFile = file; htmlPanel.setHtmlText(getHtmlText(file)); WindowManager.getDefault().getMainWindow().setCursor(Cursor.getPredefinedCursor(Cursor.DEFAULT_CURSOR)); } From ed0bf660dbef1f54bfadf62dee1e1a65a954d28b Mon Sep 17 00:00:00 2001 From: Raman Date: Wed, 27 Feb 2019 15:18:08 -0500 Subject: [PATCH 45/80] Few changes to reporting Web Cache artifacts: - Do not add derived files for the HTTP Header data segment - Add HTTP headers as an attribute --- .../recentactivity/ChromeCacheExtractor.java | 91 +++++++++++++++---- 1 file changed, 74 insertions(+), 17 deletions(-) diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ChromeCacheExtractor.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ChromeCacheExtractor.java index 17e17a8656..976f83141f 100644 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ChromeCacheExtractor.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ChromeCacheExtractor.java @@ -293,6 +293,8 @@ final class ChromeCacheExtractor { logger.log(Level.INFO, "{0}- Now reading Cache index file from path {1}", new Object[]{moduleName, cachePath }); //NON-NLS List derivedFiles = new ArrayList<>(); + Collection sourceArtifacts = new ArrayList<>(); + Collection webCacheArtifacts = new ArrayList<>(); ByteBuffer indexFileROBuffer = indexFile.get().getByteBuffer(); IndexFileHeader indexHdr = new IndexFileHeader(indexFileROBuffer); @@ -306,11 +308,11 @@ final class ChromeCacheExtractor { if (addr.isInitialized()) { try { - List addedFiles = this.getCacheEntry(addr); + List addedFiles = this.getCacheEntry(addr, sourceArtifacts, webCacheArtifacts); derivedFiles.addAll(addedFiles); } catch (TskCoreException | IngestModuleException ex) { - logger.log(Level.SEVERE, String.format("Failed to get cache entry at address %s", addr)); //NON-NLS + logger.log(Level.SEVERE, String.format("Failed to get cache entry at address %s", addr), ex); //NON-NLS } } } @@ -321,8 +323,8 @@ final class ChromeCacheExtractor { context.addFilesToJob(derivedFiles); - services.fireModuleDataEvent(new ModuleDataEvent(moduleName, BlackboardArtifact.ARTIFACT_TYPE.TSK_SOURCE)); - services.fireModuleDataEvent(new ModuleDataEvent(moduleName, BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_CACHE)); + services.fireModuleDataEvent(new ModuleDataEvent(moduleName, BlackboardArtifact.ARTIFACT_TYPE.TSK_SOURCE, !sourceArtifacts.isEmpty() ? sourceArtifacts : null)); + services.fireModuleDataEvent(new ModuleDataEvent(moduleName, BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_CACHE, !webCacheArtifacts.isEmpty() ? webCacheArtifacts : null)); cleanup(); } @@ -333,10 +335,12 @@ final class ChromeCacheExtractor { * Extracts the files if needed and adds as derived files, creates artifacts * * @param cacheEntryAddress cache entry address + * @param sourceArtifacts any source artifacts created are added to this collection + * @param webCacheArtifacts any web cache artifacts created are added to this collection * * @return Optional derived file, is a derived file is added for the given entry */ - List getCacheEntry(CacheAddress cacheEntryAddress) throws TskCoreException, IngestModuleException { + List getCacheEntry(CacheAddress cacheEntryAddress, Collection sourceArtifacts, Collection webCacheArtifacts ) throws TskCoreException, IngestModuleException { List derivedFiles = new ArrayList<>(); @@ -361,7 +365,14 @@ final class ChromeCacheExtractor { moduleName, cacheEntry.getCreationTime()); - for (int j = 0; j < dataEntries.size(); j++) { + BlackboardAttribute hhtpHeaderAttr = new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_HEADERS, + moduleName, + cacheEntry.getHTTPHeaders()); + + + // Only process the first payload data segment in each entry + // first data segement has the HTTP headers, 2nd is the payload + for (int j = 1; j < dataEntries.size() && j < 2; j++) { CacheData data = dataEntries.get(j); String dataFilename = data.getAddress().getFilename(); Optional dataFile = this.findCacheFile(dataFilename, cachePath); @@ -378,8 +389,7 @@ final class ChromeCacheExtractor { Collection webCacheAttributes = new ArrayList<>(); webCacheAttributes.add(urlAttr); webCacheAttributes.add(createTimeAttr); - - // RAMAN TBD: how to associate the entry data_n file as the source of this artifact?? + webCacheAttributes.add(hhtpHeaderAttr); if (dataFile.isPresent()) { if (data.isInExternalFile() ) { @@ -387,11 +397,13 @@ final class ChromeCacheExtractor { BlackboardArtifact sourceArtifact = dataFile.get().newArtifact(ARTIFACT_TYPE.TSK_SOURCE); if (sourceArtifact != null) { sourceArtifact.addAttributes(sourceArtifactAttributes); + sourceArtifacts.add(sourceArtifact); } BlackboardArtifact webCacheArtifact = dataFile.get().newArtifact(ARTIFACT_TYPE.TSK_WEB_CACHE); - if (sourceArtifact != null) { + if (webCacheArtifact != null) { webCacheArtifact.addAttributes(webCacheAttributes); + webCacheArtifacts.add(webCacheArtifact); } if (isBrotliCompressed) { @@ -407,8 +419,8 @@ final class ChromeCacheExtractor { String filename = data.save(); String relPathname = getRelOutputFolderName() + data.getAddress().getCachePath() + filename; - - DerivedFile derivedFile = fileManager.addDerivedFile(filename, relPathname, + try { + DerivedFile derivedFile = fileManager.addDerivedFile(filename, relPathname, data.getDataLength(), cacheEntry.getCreationTime(), cacheEntry.getCreationTime(), cacheEntry.getCreationTime(), cacheEntry.getCreationTime(), // TBD true, @@ -419,16 +431,27 @@ final class ChromeCacheExtractor { "", TskData.EncodingType.NONE); - - try { BlackboardArtifact sourceArtifact = derivedFile.newArtifact(ARTIFACT_TYPE.TSK_SOURCE); if (sourceArtifact != null) { sourceArtifact.addAttributes(sourceArtifactAttributes); + sourceArtifacts.add(sourceArtifact); } - - BlackboardArtifact webCacheArtifact = derivedFile.newArtifact(ARTIFACT_TYPE.TSK_WEB_CACHE); - if (sourceArtifact != null) { + + BlackboardArtifact webCacheArtifact = derivedFile.newArtifact(ARTIFACT_TYPE.TSK_WEB_CACHE); + if (webCacheArtifact != null) { webCacheArtifact.addAttributes(webCacheAttributes); + + webCacheArtifact.addAttribute(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PATH, + moduleName, + dataFile.get().getUniquePath())); //NON-NLS + + long pathID = Util.findID(dataSource, dataFile.get().getUniquePath()); //NON-NLS + if (pathID != -1) { + webCacheArtifact.addAttribute(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PATH_ID, + moduleName, pathID)); + } + + webCacheArtifacts.add(webCacheArtifact); } if (isBrotliCompressed) { @@ -436,7 +459,6 @@ final class ChromeCacheExtractor { derivedFile.save(); } - derivedFiles.add(derivedFile); } catch (TskException ex) { logger.log(Level.SEVERE, "Error while trying to add an artifact", ex); //NON-NLS @@ -834,6 +856,28 @@ final class ChromeCacheExtractor { return this.httpHeaders.get(key); } + /** + * Returns all HTTP headers as a single '\n' separated string + * + * @return + */ + String getHTTPHeaders() { + if (!hasHTTPHeaders()) { + return ""; + } + + StringBuilder sb = new StringBuilder(); + httpHeaders.entrySet().forEach((entry) -> { + if (sb.length() > 0) { + sb.append(" \n"); + } + sb.append(String.format("%s : %s", + entry.getKey(), entry.getValue())); + }); + + return sb.toString(); + } + String getHTTPRespone() { return httpResponse; } @@ -1245,6 +1289,19 @@ final class ChromeCacheExtractor { return dataList.get(0).getHTTPHeader(key); } + /** + * Returns the all the HTTP headers as a single string + * + * @return header value, null if not found + */ + String getHTTPHeaders() { + if ((dataList == null) || dataList.isEmpty()) { + return null; + } + // First data segment has the HTTP headers, if any + return dataList.get(0).getHTTPHeaders(); + } + /** * Returns if the entry is compressed with Brotli * From deb9670dd89dc19e138411e6b2bc5a6cad4f2475 Mon Sep 17 00:00:00 2001 From: William Schaefer Date: Wed, 27 Feb 2019 15:25:56 -0500 Subject: [PATCH 46/80] 4626 add upgrade code for Central repository data_sources unique constraint change --- .../datamodel/AbstractSqlEamDb.java | 31 +++++-- .../datamodel/PostgresEamDbSettings.java | 82 +++++++++++-------- .../datamodel/SqliteEamDbSettings.java | 81 ++++++++++-------- 3 files changed, 119 insertions(+), 75 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/AbstractSqlEamDb.java b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/AbstractSqlEamDb.java index bfcbcb6cb5..0e4dfcc624 100644 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/AbstractSqlEamDb.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/AbstractSqlEamDb.java @@ -61,7 +61,7 @@ abstract class AbstractSqlEamDb implements EamDb { static final String SCHEMA_MINOR_VERSION_KEY = "SCHEMA_MINOR_VERSION"; static final String CREATION_SCHEMA_MAJOR_VERSION_KEY = "CREATION_SCHEMA_MAJOR_VERSION"; static final String CREATION_SCHEMA_MINOR_VERSION_KEY = "CREATION_SCHEMA_MINOR_VERSION"; - static final CaseDbSchemaVersionNumber SOFTWARE_CR_DB_SCHEMA_VERSION = new CaseDbSchemaVersionNumber(1, 2); + static final CaseDbSchemaVersionNumber SOFTWARE_CR_DB_SCHEMA_VERSION = new CaseDbSchemaVersionNumber(1, 3); protected final List defaultCorrelationTypes; @@ -625,7 +625,7 @@ abstract class AbstractSqlEamDb implements EamDb { // This data source is already in the central repo return eamDataSource; } - + Connection conn = connect(); PreparedStatement preparedStatement = null; @@ -650,7 +650,7 @@ abstract class AbstractSqlEamDb implements EamDb { /* * If nothing was inserted, then return the data source that * exists in the Central Repository. - * + * * This is expected to occur with PostgreSQL Central Repository * databases. */ @@ -675,7 +675,7 @@ abstract class AbstractSqlEamDb implements EamDb { * If an exception was thrown causing us to not return a new data * source, attempt to get an existing data source with the same case * ID and data source object ID. - * + * * This exception block is expected to occur with SQLite Central * Repository databases. */ @@ -3582,7 +3582,28 @@ abstract class AbstractSqlEamDb implements EamDb { statement.execute("INSERT INTO db_info (name, value) VALUES ('" + AbstractSqlEamDb.CREATION_SCHEMA_MAJOR_VERSION_KEY + "','" + creationMajorVer + "')"); statement.execute("INSERT INTO db_info (name, value) VALUES ('" + AbstractSqlEamDb.CREATION_SCHEMA_MINOR_VERSION_KEY + "','" + creationMinorVer + "')"); } - + /* + * Update to 1.3 + */ + if (dbSchemaVersion.compareTo(new CaseDbSchemaVersionNumber(1, 3)) < 0) { + switch (selectedPlatform) { + case POSTGRESQL: + statement.execute("ALTER TABLE data_sources DROP CONSTRAINT datasource_unique"); + statement.execute("ALTER TABLE data_sources ADD CONSTRAINT datasource_unique UNIQUE (case_id, device_id, name, datasource_obj_id)"); + + break; + case SQLITE: + statement.execute("ALTER TABLE data_sources RENAME TO old_data_sources"); + statement.execute(SqliteEamDbSettings.getCreateDataSourcesTableStatement()); + statement.execute(SqliteEamDbSettings.getAddDataSourcesNameIndexStatement()); + statement.execute(SqliteEamDbSettings.getAddDataSourcesObjectIdIndexStatement()); + statement.execute("INSERT INTO data_sources SELECT * FROM old_data_sources"); + statement.execute("DROP TABLE old_data_sources"); + break; + default: + throw new EamDbException("Currently selected database platform \"" + selectedPlatform.name() + "\" can not be upgraded."); + } + } updateSchemaVersion(conn); conn.commit(); logger.log(Level.INFO, String.format("Central Repository schema updated to version %s", SOFTWARE_CR_DB_SCHEMA_VERSION)); diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/PostgresEamDbSettings.java b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/PostgresEamDbSettings.java index b7c84979b0..5e6f7e53ed 100644 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/PostgresEamDbSettings.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/PostgresEamDbSettings.java @@ -339,23 +339,6 @@ public final class PostgresEamDbSettings { String casesIdx1 = "CREATE INDEX IF NOT EXISTS cases_org_id ON cases (org_id)"; String casesIdx2 = "CREATE INDEX IF NOT EXISTS cases_case_uid ON cases (case_uid)"; - StringBuilder createDataSourcesTable = new StringBuilder(); - createDataSourcesTable.append("CREATE TABLE IF NOT EXISTS data_sources ("); - createDataSourcesTable.append("id SERIAL PRIMARY KEY,"); - createDataSourcesTable.append("case_id integer NOT NULL,"); - createDataSourcesTable.append("device_id text NOT NULL,"); - createDataSourcesTable.append("name text NOT NULL,"); - createDataSourcesTable.append("datasource_obj_id BIGINT,"); - createDataSourcesTable.append("md5 text DEFAULT NULL,"); - createDataSourcesTable.append("sha1 text DEFAULT NULL,"); - createDataSourcesTable.append("sha256 text DEFAULT NULL,"); - createDataSourcesTable.append("foreign key (case_id) references cases(id) ON UPDATE SET NULL ON DELETE SET NULL,"); - createDataSourcesTable.append("CONSTRAINT datasource_unique UNIQUE (case_id, device_id, name, datasource_obj_id)"); - createDataSourcesTable.append(")"); - - String dataSourceIdx1 = "CREATE INDEX IF NOT EXISTS data_sources_name ON data_sources (name)"; - String dataSourceIdx2 = "CREATE INDEX IF NOT EXISTS data_sources_object_id ON data_sources (datasource_obj_id)"; - StringBuilder createReferenceSetsTable = new StringBuilder(); createReferenceSetsTable.append("CREATE TABLE IF NOT EXISTS reference_sets ("); createReferenceSetsTable.append("id SERIAL PRIMARY KEY,"); @@ -422,9 +405,9 @@ public final class PostgresEamDbSettings { stmt.execute(casesIdx1); stmt.execute(casesIdx2); - stmt.execute(createDataSourcesTable.toString()); - stmt.execute(dataSourceIdx1); - stmt.execute(dataSourceIdx2); + stmt.execute(getCreateDataSourcesTableStatement()); + stmt.execute(getAddDataSourcesNameIndexStatement()); + stmt.execute(getAddDataSourcesObjectIdIndexStatement()); stmt.execute(createReferenceSetsTable.toString()); stmt.execute(referenceSetsIdx1); @@ -487,21 +470,50 @@ public final class PostgresEamDbSettings { */ static String getCreateArtifactInstancesTableTemplate() { // Each "%s" will be replaced with the relevant TYPE_instances table name. - StringBuilder createArtifactInstancesTableTemplate = new StringBuilder(); - createArtifactInstancesTableTemplate.append("CREATE TABLE IF NOT EXISTS %s ("); - createArtifactInstancesTableTemplate.append("id SERIAL PRIMARY KEY,"); - createArtifactInstancesTableTemplate.append("case_id integer NOT NULL,"); - createArtifactInstancesTableTemplate.append("data_source_id integer NOT NULL,"); - createArtifactInstancesTableTemplate.append("value text NOT NULL,"); - createArtifactInstancesTableTemplate.append("file_path text NOT NULL,"); - createArtifactInstancesTableTemplate.append("known_status integer NOT NULL,"); - createArtifactInstancesTableTemplate.append("comment text,"); - createArtifactInstancesTableTemplate.append("file_obj_id BIGINT,"); - createArtifactInstancesTableTemplate.append("CONSTRAINT %s_multi_unique_ UNIQUE (data_source_id, value, file_path),"); - createArtifactInstancesTableTemplate.append("foreign key (case_id) references cases(id) ON UPDATE SET NULL ON DELETE SET NULL,"); - createArtifactInstancesTableTemplate.append("foreign key (data_source_id) references data_sources(id) ON UPDATE SET NULL ON DELETE SET NULL"); - createArtifactInstancesTableTemplate.append(")"); - return createArtifactInstancesTableTemplate.toString(); + return ("CREATE TABLE IF NOT EXISTS %s (id SERIAL PRIMARY KEY,case_id integer NOT NULL," + + "data_source_id integer NOT NULL,value text NOT NULL,file_path text NOT NULL," + + "known_status integer NOT NULL,comment text,file_obj_id BIGINT," + + "CONSTRAINT %s_multi_unique_ UNIQUE (data_source_id, value, file_path)," + + "foreign key (case_id) references cases(id) ON UPDATE SET NULL ON DELETE SET NULL," + + "foreign key (data_source_id) references data_sources(id) ON UPDATE SET NULL ON DELETE SET NULL)"); + } + + /** + * Get the statement String for creating a new data_sources table in a + * Postgres central repository. + * + * @return a String which is a statement for cretating a new data_sources + * table + */ + static String getCreateDataSourcesTableStatement() { + return "CREATE TABLE IF NOT EXISTS data_sources " + + "(id SERIAL PRIMARY KEY,case_id integer NOT NULL,device_id text NOT NULL," + + "name text NOT NULL,datasource_obj_id BIGINT,md5 text DEFAULT NULL," + + "sha1 text DEFAULT NULL,sha256 text DEFAULT NULL," + + "foreign key (case_id) references cases(id) ON UPDATE SET NULL ON DELETE SET NULL," + + "CONSTRAINT datasource_unique UNIQUE (case_id, device_id, name, datasource_obj_id))"; + } + + /** + * Get the statement for creating an index on the name column of the + * data_sources table. + * + * @return a String which is a statement for adding an index on the name + * column of the data_sources table. + */ + static String getAddDataSourcesNameIndexStatement() { + return "CREATE INDEX IF NOT EXISTS data_sources_name ON data_sources (name)"; + } + + /** + * Get the statement for creating an index on the data_sources_object_id + * column of the data_sources table. + * + * @return a String which is a statement for adding an index on the + * data_sources_object_id column of the data_sources table. + */ + static String getAddDataSourcesObjectIdIndexStatement() { + return "CREATE INDEX IF NOT EXISTS data_sources_object_id ON data_sources (datasource_obj_id)"; } /** diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/SqliteEamDbSettings.java b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/SqliteEamDbSettings.java index 53e9966104..ae8e419e99 100644 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/SqliteEamDbSettings.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/SqliteEamDbSettings.java @@ -282,23 +282,6 @@ public final class SqliteEamDbSettings { String casesIdx1 = "CREATE INDEX IF NOT EXISTS cases_org_id ON cases (org_id)"; String casesIdx2 = "CREATE INDEX IF NOT EXISTS cases_case_uid ON cases (case_uid)"; - StringBuilder createDataSourcesTable = new StringBuilder(); - createDataSourcesTable.append("CREATE TABLE IF NOT EXISTS data_sources ("); - createDataSourcesTable.append("id integer primary key autoincrement NOT NULL,"); - createDataSourcesTable.append("case_id integer NOT NULL,"); - createDataSourcesTable.append("device_id text NOT NULL,"); - createDataSourcesTable.append("name text NOT NULL,"); - createDataSourcesTable.append("datasource_obj_id integer,"); - createDataSourcesTable.append("md5 text DEFAULT NULL,"); - createDataSourcesTable.append("sha1 text DEFAULT NULL,"); - createDataSourcesTable.append("sha256 text DEFAULT NULL,"); - createDataSourcesTable.append("foreign key (case_id) references cases(id) ON UPDATE SET NULL ON DELETE SET NULL,"); - createDataSourcesTable.append("CONSTRAINT datasource_unique UNIQUE (case_id, device_id, name, datasource_obj_id)"); - createDataSourcesTable.append(")"); - - String dataSourceIdx1 = "CREATE INDEX IF NOT EXISTS data_sources_name ON data_sources (name)"; - String dataSourceIdx2 = "CREATE INDEX IF NOT EXISTS data_sources_object_id ON data_sources (datasource_obj_id)"; - StringBuilder createReferenceSetsTable = new StringBuilder(); createReferenceSetsTable.append("CREATE TABLE IF NOT EXISTS reference_sets ("); createReferenceSetsTable.append("id integer primary key autoincrement NOT NULL,"); @@ -371,9 +354,9 @@ public final class SqliteEamDbSettings { stmt.execute(casesIdx1); stmt.execute(casesIdx2); - stmt.execute(createDataSourcesTable.toString()); - stmt.execute(dataSourceIdx1); - stmt.execute(dataSourceIdx2); + stmt.execute(getCreateDataSourcesTableStatement()); + stmt.execute(getAddDataSourcesNameIndexStatement()); + stmt.execute(getAddDataSourcesObjectIdIndexStatement()); stmt.execute(createReferenceSetsTable.toString()); stmt.execute(referenceSetsIdx1); @@ -435,21 +418,49 @@ public final class SqliteEamDbSettings { */ static String getCreateArtifactInstancesTableTemplate() { // Each "%s" will be replaced with the relevant TYPE_instances table name. - StringBuilder createArtifactInstancesTableTemplate = new StringBuilder(); - createArtifactInstancesTableTemplate.append("CREATE TABLE IF NOT EXISTS %s ("); - createArtifactInstancesTableTemplate.append("id integer primary key autoincrement NOT NULL,"); - createArtifactInstancesTableTemplate.append("case_id integer NOT NULL,"); - createArtifactInstancesTableTemplate.append("data_source_id integer NOT NULL,"); - createArtifactInstancesTableTemplate.append("value text NOT NULL,"); - createArtifactInstancesTableTemplate.append("file_path text NOT NULL,"); - createArtifactInstancesTableTemplate.append("known_status integer NOT NULL,"); - createArtifactInstancesTableTemplate.append("comment text,"); - createArtifactInstancesTableTemplate.append("file_obj_id integer,"); - createArtifactInstancesTableTemplate.append("CONSTRAINT %s_multi_unique UNIQUE(data_source_id, value, file_path) ON CONFLICT IGNORE,"); - createArtifactInstancesTableTemplate.append("foreign key (case_id) references cases(id) ON UPDATE SET NULL ON DELETE SET NULL,"); - createArtifactInstancesTableTemplate.append("foreign key (data_source_id) references data_sources(id) ON UPDATE SET NULL ON DELETE SET NULL"); - createArtifactInstancesTableTemplate.append(")"); - return createArtifactInstancesTableTemplate.toString(); + return "CREATE TABLE IF NOT EXISTS %s (id integer primary key autoincrement NOT NULL," + + "case_id integer NOT NULL,data_source_id integer NOT NULL,value text NOT NULL," + + "file_path text NOT NULL,known_status integer NOT NULL,comment text,file_obj_id integer," + + "CONSTRAINT %s_multi_unique UNIQUE(data_source_id, value, file_path) ON CONFLICT IGNORE," + + "foreign key (case_id) references cases(id) ON UPDATE SET NULL ON DELETE SET NULL," + + "foreign key (data_source_id) references data_sources(id) ON UPDATE SET NULL ON DELETE SET NULL)"; + } + + /** + * Get the statement String for creating a new data_sources table in a + * Sqlite central repository. + * + * @return a String which is a statement for cretating a new data_sources + * table + */ + static String getCreateDataSourcesTableStatement() { + return "CREATE TABLE IF NOT EXISTS data_sources (id integer primary key autoincrement NOT NULL," + + "case_id integer NOT NULL,device_id text NOT NULL,name text NOT NULL,datasource_obj_id integer," + + "md5 text DEFAULT NULL,sha1 text DEFAULT NULL,sha256 text DEFAULT NULL," + + "foreign key (case_id) references cases(id) ON UPDATE SET NULL ON DELETE SET NULL," + + "CONSTRAINT datasource_unique UNIQUE (case_id, device_id, name, datasource_obj_id))"; + } + + /** + * Get the statement for creating an index on the name column of the + * data_sources table. + * + * @return a String which is a statement for adding an index on the name + * column of the data_sources table. + */ + static String getAddDataSourcesNameIndexStatement() { + return "CREATE INDEX IF NOT EXISTS data_sources_name ON data_sources (name)"; + } + + /** + * Get the statement for creating an index on the data_sources_object_id + * column of the data_sources table. + * + * @return a String which is a statement for adding an index on the + * data_sources_object_id column of the data_sources table. + */ + static String getAddDataSourcesObjectIdIndexStatement() { + return "CREATE INDEX IF NOT EXISTS data_sources_object_id ON data_sources (datasource_obj_id)"; } /** From d55d3837acb9b75dd3f9914eae714694621b13ea Mon Sep 17 00:00:00 2001 From: Kelly Kelly Date: Wed, 27 Feb 2019 17:11:05 -0500 Subject: [PATCH 47/80] Added support for reading the Cookies.binarycookies file 1199-safari-cookies --- .../recentactivity/BinaryCookieReader.java | 429 ++++++++++++++++++ .../autopsy/recentactivity/ExtractSafari.java | 162 +++++-- 2 files changed, 561 insertions(+), 30 deletions(-) create mode 100755 RecentActivity/src/org/sleuthkit/autopsy/recentactivity/BinaryCookieReader.java diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/BinaryCookieReader.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/BinaryCookieReader.java new file mode 100755 index 0000000000..04e0893e1b --- /dev/null +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/BinaryCookieReader.java @@ -0,0 +1,429 @@ +/* + * + * Autopsy Forensic Browser + * + * Copyright 2019 Basis Technology Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.recentactivity; + +import java.io.DataInputStream; +import java.io.File; +import java.io.FileInputStream; +import java.io.FileNotFoundException; +import java.io.IOException; +import java.nio.ByteBuffer; +import java.nio.ByteOrder; +import java.util.Iterator; +import java.util.NoSuchElementException; +import java.util.logging.Level; +import org.sleuthkit.autopsy.coreutils.Logger; +import org.sleuthkit.autopsy.recentactivity.BinaryCookieReader.Cookie; + +/** + * SafariCookieReader wraps all the knowledge of how to read the mac + * .binarycookie files into one class. + * + * The binarycookie file has a header which describes how many pages of cookies + * and where they are located. Each cookie page has a header and a list of + * cookies. + * + */ +public final class BinaryCookieReader implements Iterable { + + private static final int MAGIC_SIZE = 4; + private static final int SIZEOF_INT_BYTES = 4; + private static final int PAGE_HEADER_VALUE = 256; + + private static final String COOKIE_MAGIC = "cook"; //NON-NLS + + private static final int MAC_EPOC_FIX = 978307200; + + private final int[] pageSizeArray; + private final File cookieFile; + + private final Logger logger = Logger.getLogger(this.getClass().getName()); + + /** + * SafariCookieReader wraps all the knowledge of how to read the mac + * .binarycookie files into one class. + * + */ + private BinaryCookieReader(File file, int[] sizeArray) { + cookieFile = file; + pageSizeArray = sizeArray; + } + + /** + * initalizeReader opens the given file, reads the header and checks that + * the file is a binarycookie file. This function does not keep the file + * open. + * + * @param file binarycookie file + * @return An instance of the reader + * @throws FileNotFoundException + * @throws IOException + */ + public static BinaryCookieReader initalizeReader(File file) throws FileNotFoundException, IOException { + BinaryCookieReader reader = null; + try (DataInputStream dataStream = new DataInputStream(new FileInputStream(file))) { + + byte[] magic = new byte[MAGIC_SIZE]; + if (dataStream.read(magic) != MAGIC_SIZE) { + throw new IOException("Failed to read header, invalid file size" + file.getName()); //NON-NLS + } + + if (!(new String(magic)).equals(COOKIE_MAGIC)) { + throw new IOException(file.getName() + " is not a cookie file"); //NON-NLS + } + + int[] sizeArray = null; + int pageCount = dataStream.readInt(); + if (pageCount != 0) { + sizeArray = new int[pageCount]; + + for (int cnt = 0; cnt < pageCount; cnt++) { + sizeArray[cnt] = dataStream.readInt(); + } + } + + reader = new BinaryCookieReader(file, sizeArray); + } + + return reader; + } + + /** + * Creates and returns a instance of CookiePageIterator + * + * @return CookiePageIterator + */ + @Override + public Iterator iterator() { + return new CookiePageIterator(); + } + + /** + * The cookiePageIterator iterates the binarycookie file by page. + */ + private class CookiePageIterator implements Iterator { + + int pageIndex = 0; + CookiePage currentPage = null; + Iterator currentIterator = null; + DataInputStream dataStream = null; + + /** + * The cookiePageIterator iterates the binarycookie file by page. + */ + CookiePageIterator() { + try { + dataStream = new DataInputStream(new FileInputStream(cookieFile)); + // skip to the first page + dataStream.skipBytes((2 * SIZEOF_INT_BYTES) + (pageSizeArray.length * SIZEOF_INT_BYTES)); + } catch (IOException ex) { + logger.log(Level.WARNING, "Error occured creating DataInputStream", new Object[]{cookieFile.getName(), ex}); //NON-NLS + closeStream(); // Just incase the error was from skip + } + } + + /** + * hasNext manages reading of each cookie page and creating the Cookie + * Page objects. If the currentIterator returns false from hadNext, read + * the next page and create and new instance of + * CookiePage\CookieIterator + * + * @return True if there are more cookies + */ + @Override + public boolean hasNext() { + + if (dataStream == null) { + return false; + } + + if (currentIterator == null || !currentIterator.hasNext()) { + try { + + if (pageIndex < pageSizeArray.length) { + byte[] nextPage = new byte[pageSizeArray[pageIndex]]; + dataStream.read(nextPage); + + currentPage = new CookiePage(nextPage); + currentIterator = currentPage.iterator(); + } else { + closeStream(); + return false; + } + + pageIndex++; + } catch (IOException ex) { + closeStream(); + logger.log(Level.WARNING, "A read error occured for file {0} page {1} {2}", new Object[]{cookieFile.getName(), pageIndex, ex}); //NON-NLS + return false; + } + } + + return currentIterator.hasNext(); + } + + /** + * Get the next cookie from current the current CookieIterator + * + * @return The next cookie + */ + @Override + public Cookie next() { + // Just in case someone uses next without hasNext, this check will + // make sure there is more elements and that we iterate properly + // through the pages. + if (!hasNext()) { + throw new NoSuchElementException(); + } + return currentIterator.next(); + } + + /** + * Close the DataInputStream + */ + private void closeStream() { + if (dataStream != null) { + try { + dataStream.close(); + dataStream = null; + } catch (IOException ex) { + logger.log(Level.WARNING, "SafariCookieReader unable to close DataInputStream for file {0} {1}", new Object[]{cookieFile.getName(), ex}); //NON-NLS + } + } + } + } + + /** + * Wrapper class for an instance of a CookiePage in the binarycookie file + */ + private class CookiePage implements Iterable { + + int[] cookieOffSets; + ByteBuffer pageBuffer; + + /** + * Setup the CookiePage object. Calidates that the page bytes are in the + * correct format by checking for the header value of 0x0100 + * + * @param page + * @throws IOException + */ + CookiePage(byte[] page) throws IOException { + if (page == null || page.length == 0) { + throw new IllegalArgumentException("Invalid value for page passes to CookiePage constructor"); //NON-NLS + } + + pageBuffer = ByteBuffer.wrap(page); + + if (pageBuffer.getInt() != PAGE_HEADER_VALUE) { + pageBuffer = null; + throw new IOException("Invalid file format, bad page head value found"); //NON-NLS + } + + pageBuffer.order(ByteOrder.LITTLE_ENDIAN); + int count = pageBuffer.getInt(); + cookieOffSets = new int[count]; + + for (int cnt = 0; cnt < count; cnt++) { + cookieOffSets[cnt] = pageBuffer.getInt(); + } + + pageBuffer.getInt(); // All 0, not needed + } + + /** + * Returns in instance of a CookieIterator + * + * @return CookieIterator + */ + @Override + public Iterator iterator() { + return new CookieIterator(); + } + + /** + * Implements Iterator to iterate over the cookies in the page + */ + private class CookieIterator implements Iterator { + + int index = 0; + + /** + * Checks to see if there are more cookies + * + * @return True if there are more cookies, false if there are not + */ + @Override + public boolean hasNext() { + if (pageBuffer == null) { + return false; + } + + return index < cookieOffSets.length; + } + + /** + * Gets the next cookie from the page + * + * @return Next cookie + */ + @Override + public Cookie next() { + if (!hasNext()) { + throw new NoSuchElementException(); + } + + int offset = cookieOffSets[index]; + int size = pageBuffer.getInt(offset); + byte[] cookieBytes = new byte[size]; + pageBuffer.get(cookieBytes, 0, size); + index++; + + return new Cookie(cookieBytes); + } + } + } + + /** + * Represents an instance of a cookie from the binarycookie file + */ + public class Cookie { + + private final static int COOKIE_HEAD_SKIP = 16; + + private final Double expirationDate; + private final Double creationDate; + + private final String name; + private final String url; + private final String path; + private final String value; + + /** + * Creates a cookie object from the given array of bytes + * + * @param cookieBytes Byte array for the cookie + */ + protected Cookie(byte[] cookieBytes) { + if (cookieBytes == null || cookieBytes.length == 0) { + throw new IllegalArgumentException("Invalid value for cookieBytes passes to Cookie constructor"); //NON-NLS + } + + ByteBuffer byteBuffer = ByteBuffer.wrap(cookieBytes); + byteBuffer.order(ByteOrder.LITTLE_ENDIAN); + + // Skip past the four int values that we are not interested in + byteBuffer.position(byteBuffer.position() + COOKIE_HEAD_SKIP); + + int urlOffset = byteBuffer.getInt(); + int nameOffset = byteBuffer.getInt(); + int pathOffset = byteBuffer.getInt(); + int valueOffset = byteBuffer.getInt(); + byteBuffer.getLong(); // 8 bytes of not needed + + expirationDate = byteBuffer.getDouble(); + creationDate = byteBuffer.getDouble(); + + url = decodeString(cookieBytes, urlOffset); + name = decodeString(cookieBytes, nameOffset); + path = decodeString(cookieBytes, pathOffset); + value = decodeString(cookieBytes, valueOffset); + } + + /** + * Returns the expiration date of the cookie represented by this cookie + * object + * + * @return + */ + public final Long getExpirationDate() { + return expirationDate.longValue() + MAC_EPOC_FIX; + } + + /** + * Returns the creation date of the cookie represented by this cookie + * object + * + * @return + */ + public final Long getCreationDate() { + return creationDate.longValue() + MAC_EPOC_FIX; + } + + /** + * Returns the url of the cookie represented by this cookie object + * + * @return the cookie URL + */ + public final String getURL() { + return url; + } + + /** + * Returns the name of the cookie represented by this cookie object + * + * @return The cookie name + */ + public final String getName() { + return name; + } + + /** + * Returns the path of the cookie represented by this cookie object + * + * @return The cookie path + */ + public final String getPath() { + return path; + } + + /** + * Returns the value of the cookie represented by this cookie object + * + * @return The cookie value + */ + public final String getValue() { + return value; + } + + /** + * Give an array a bytes and an offset in the array this function will + * copy the bytes from offset to the first null terminator into a new + * array. The bytes in the new array will be returned as a string not + * including the null terminator + * + * @param byteArray + * @param offset + * @return + */ + private String decodeString(byte[] byteArray, int offset) { + byte[] stringBytes = new byte[byteArray.length - offset]; + for (int index = 0; index < stringBytes.length; index++) { + byte nibble = byteArray[offset + index]; + if (nibble != '\0') { + stringBytes[index] = nibble; + } else { + break; + } + } + + return new String(stringBytes); + } + } +} diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractSafari.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractSafari.java index 4c448c1ff2..4bbca9fd85 100755 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractSafari.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractSafari.java @@ -32,6 +32,7 @@ import java.text.ParseException; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; +import java.util.Iterator; import java.util.List; import java.util.logging.Level; import javax.xml.parsers.ParserConfigurationException; @@ -43,6 +44,7 @@ import org.sleuthkit.autopsy.datamodel.ContentUtils; import org.sleuthkit.autopsy.ingest.IngestJobContext; import org.sleuthkit.autopsy.ingest.IngestServices; import org.sleuthkit.autopsy.ingest.ModuleDataEvent; +import org.sleuthkit.autopsy.recentactivity.BinaryCookieReader.Cookie; import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.BlackboardArtifact; import org.sleuthkit.datamodel.Content; @@ -63,6 +65,7 @@ final class ExtractSafari extends Extract { private static final String HISTORY_FILE_NAME = "History.db"; //NON-NLS private static final String BOOKMARK_FILE_NAME = "Bookmarks.plist"; //NON-NLS private static final String DOWNLOAD_FILE_NAME = "Downloads.plist"; //NON-NLS + private static final String COOKIE_FILE_NAME = "Cookies.binarycookies"; //NON-NLS private static final String HEAD_URL = "url"; //NON-NLS private static final String HEAD_TITLE = "title"; //NON-NLS @@ -82,7 +85,9 @@ final class ExtractSafari extends Extract { @Messages({ "ExtractSafari_Module_Name=Safari", "ExtractSafari_Error_Getting_History=An error occurred while processing Safari history files.", - "ExtractSafari_Error_Parsing_Bookmark=An error occured while processing Safari Bookmark files",}) + "ExtractSafari_Error_Parsing_Bookmark=An error occured while processing Safari Bookmark files", + "ExtractSafari_Error_Parsing_Cookies=An error occured while processing Safari Bookmark files", + }) /** * Extract the bookmarks, cookies, downloads and history from Safari @@ -122,6 +127,13 @@ final class ExtractSafari extends Extract { this.addErrorMessage(Bundle.ExtractSafari_Error_Parsing_Bookmark()); logger.log(Level.SEVERE, "Exception thrown while parsing Safari Bookmarks file: {0}", ex); //NON-NLS } + + try { + processBinaryCookieFile(dataSource, context); + } catch (IOException | TskCoreException ex) { + this.addErrorMessage(Bundle.ExtractSafari_Error_Parsing_Cookies()); + logger.log(Level.SEVERE, "Exception thrown while processing Safarri cookies file: {0}", ex); //NON-NLS + } } /** @@ -213,6 +225,32 @@ final class ExtractSafari extends Extract { getDownloads(dataSource, context, file); } } + + /** + * Process the Safari Cookie file. + * @param dataSource + * @param context + * @throws TskCoreException + * @throws IOException + */ + private void processBinaryCookieFile(Content dataSource, IngestJobContext context) throws TskCoreException, IOException { + FileManager fileManager = getCurrentCase().getServices().getFileManager(); + + List files = fileManager.findFiles(dataSource, COOKIE_FILE_NAME); + + if (files == null || files.isEmpty()) { + return; + } + + setFoundData(true); + + for (AbstractFile file : files) { + if (context.dataSourceIngestIsCancelled()) { + break; + } + getCookies(context, file); + } + } /** * Creates a temporary copy of historyFile and creates a list of @@ -312,6 +350,39 @@ final class ExtractSafari extends Extract { } } + + /** + * Creates a temporary copy of the Cookie file and creates a list of cookie + * BlackboardArtifacts + * + * @param context IngetstJobContext + * @param file Original Cookie file from the case + * @throws TskCoreException + * @throws IOException + */ + private void getCookies(IngestJobContext context, AbstractFile file) throws TskCoreException, IOException { + if (file.getSize() == 0) { + return; + } + + File tempFile = null; + + try { + tempFile = createTemporaryFile(context, file); + + Collection bbartifacts = getCookieArtifacts(file, tempFile); + + if (!bbartifacts.isEmpty()) { + services.fireModuleDataEvent(new ModuleDataEvent( + RecentActivityExtracterModuleFactory.getModuleName(), + BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_COOKIE, bbartifacts)); + } + } finally { + if (tempFile != null) { + tempFile.delete(); + } + } + } /** * Queries the history db for the history information creating a list of @@ -445,6 +516,36 @@ final class ExtractSafari extends Extract { return bbartifacts; } + + /** + * Finds the cookies in the tempFile creating a list of BlackboardArtifacts + * each representing one cookie. + * + * @param origFile Original Cookies.binarycookie file from case + * @param tempFile Temporary copy of the cookies file + * @return List of Blackboard Artifacts, one for each cookie + * @throws TskCoreException + * @throws IOException + */ + private Collection getCookieArtifacts(AbstractFile origFile, File tempFile) throws TskCoreException, IOException { + Collection bbartifacts = null; + BinaryCookieReader reader = BinaryCookieReader.initalizeReader(tempFile); + + if (reader != null) { + bbartifacts = new ArrayList<>(); + + Iterator iter = reader.iterator(); + while (iter.hasNext()) { + Cookie cookie = iter.next(); + + BlackboardArtifact bbart = origFile.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_COOKIE); + bbart.addAttributes(createCookieAttributes(cookie.getURL(), cookie.getCreationDate(), cookie.getName(), cookie.getValue(), this.getName(), NetworkUtils.extractDomain(cookie.getURL()))); + bbartifacts.add(bbart); + } + } + + return bbartifacts; + } /** * Parses the plist object to find the bookmark child objects, then creates @@ -491,37 +592,38 @@ final class ExtractSafari extends Extract { /** * Parse the NSDictionary object that represents one download. - * + * * @param origFile Download.plist file from the case - * @param entry One NSDictionary Object that represents one download instance + * @param entry One NSDictionary Object that represents one download + * instance * @return a Blackboard Artifact for the download. - * @throws TskCoreException + * @throws TskCoreException */ - private BlackboardArtifact parseDownloadDictionary(Content dataSource, AbstractFile origFile, NSDictionary entry)throws TskCoreException { - String url = null; - String path = null; - Long time = null; - Long pathID = null; - - NSString nsstring = (NSString)entry.get(PLIST_KEY_DOWNLOAD_URL); - if(nsstring != null){ - url = nsstring.toString(); - } - - nsstring = (NSString)entry.get(PLIST_KEY_DOWNLOAD_PATH); - if(nsstring != null){ - path = nsstring.toString(); - pathID = Util.findID(dataSource, path); - } - - NSDate date = (NSDate)entry.get(PLIST_KEY_DOWNLOAD_DATE); - if(date != null){ - time = date.getDate().getTime(); - } - - BlackboardArtifact bbart = origFile.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_DOWNLOAD); - bbart.addAttributes(this.createDownloadAttributes(path, pathID, url, time, NetworkUtils.extractDomain(url), getName())); - - return bbart; + private BlackboardArtifact parseDownloadDictionary(Content dataSource, AbstractFile origFile, NSDictionary entry) throws TskCoreException { + String url = null; + String path = null; + Long time = null; + Long pathID = null; + + NSString nsstring = (NSString) entry.get(PLIST_KEY_DOWNLOAD_URL); + if (nsstring != null) { + url = nsstring.toString(); + } + + nsstring = (NSString) entry.get(PLIST_KEY_DOWNLOAD_PATH); + if (nsstring != null) { + path = nsstring.toString(); + pathID = Util.findID(dataSource, path); + } + + NSDate date = (NSDate) entry.get(PLIST_KEY_DOWNLOAD_DATE); + if (date != null) { + time = date.getDate().getTime(); + } + + BlackboardArtifact bbart = origFile.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_DOWNLOAD); + bbart.addAttributes(this.createDownloadAttributes(path, pathID, url, time, NetworkUtils.extractDomain(url), getName())); + + return bbart; } } From 076c76c13156d3e0785279c6066a34d0a04e0998 Mon Sep 17 00:00:00 2001 From: "U-BASIS\\dgrove" Date: Thu, 28 Feb 2019 00:09:28 -0500 Subject: [PATCH 48/80] Case and data source counts added. --- .../DataContentViewerOtherCases.form | 48 ++++------- .../DataContentViewerOtherCases.java | 80 ++++++++++++------- 2 files changed, 66 insertions(+), 62 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/contentviewer/DataContentViewerOtherCases.form b/Core/src/org/sleuthkit/autopsy/centralrepository/contentviewer/DataContentViewerOtherCases.form index 1f1e497aa7..9fdbfcfb67 100644 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/contentviewer/DataContentViewerOtherCases.form +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/contentviewer/DataContentViewerOtherCases.form @@ -73,7 +73,7 @@ - + @@ -96,10 +96,10 @@ - + - + @@ -117,31 +117,28 @@ - - - - - + - + - + + + - - + + + - - - + @@ -198,26 +195,13 @@ - + - - + + - - - - - - - - - - - - - - + diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/contentviewer/DataContentViewerOtherCases.java b/Core/src/org/sleuthkit/autopsy/centralrepository/contentviewer/DataContentViewerOtherCases.java index 6082dce81b..fe5df99a0f 100644 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/contentviewer/DataContentViewerOtherCases.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/contentviewer/DataContentViewerOtherCases.java @@ -1,7 +1,7 @@ /* * Central Repository * - * Copyright 2015-2018 Basis Technology Corp. + * Copyright 2015-2019 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -33,10 +33,12 @@ import java.util.ArrayList; import java.util.Calendar; import java.util.Collection; import java.util.HashMap; +import java.util.HashSet; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Objects; +import java.util.Set; import java.util.logging.Level; import org.sleuthkit.autopsy.coreutils.Logger; import javax.swing.JFileChooser; @@ -306,6 +308,7 @@ public class DataContentViewerOtherCases extends JPanel implements DataContentVi tableModel.clearTable(); correlationAttributes.clear(); earliestCaseDate.setText(Bundle.DataContentViewerOtherCases_earliestCaseNotAvailable()); + foundInLabel.setText(""); } @Override @@ -337,6 +340,34 @@ public class DataContentViewerOtherCases extends JPanel implements DataContentVi public int isPreferred(Node node) { return 1; } + + /** + * Set the number of unique cases and data sources. + */ + @Messages({ + "DataContentViewerOtherCases.foundIn.text=Found in %d cases and %d data sources." + }) + private void setOccurrenceCounts() { + DataContentViewerOtherCasesTableModel model = (DataContentViewerOtherCasesTableModel) otherCasesTable.getModel(); + + // Note: Relying on the case name isn't a fool-proof way of determining + // a case to be unique. We should improve this in the future. + int caseColumnIndex = DataContentViewerOtherCasesTableModel.TableColumns.CASE_NAME.ordinal(); + Set cases = new HashSet<>(); + for (int i=0; i < model.getRowCount(); i++) { + String caseName = (String) model.getValueAt(i, caseColumnIndex); + cases.add(caseName); + } + + int deviceColumnIndex = DataContentViewerOtherCasesTableModel.TableColumns.DEVICE.ordinal(); + Set devices = new HashSet<>(); + for (int i=0; i < model.getRowCount(); i++) { + String deviceId = (String) model.getValueAt(i, deviceColumnIndex); + devices.add(deviceId); + } + + foundInLabel.setText(String.format(Bundle.DataContentViewerOtherCases_foundIn_text(), cases.size(), devices.size())); + } /** * Get the associated BlackboardArtifact from a node, if it exists. @@ -720,7 +751,9 @@ public class DataContentViewerOtherCases extends JPanel implements DataContentVi } else { setColumnWidths(); } + setEarliestCaseDate(); + setOccurrenceCounts(); } /** @@ -772,7 +805,7 @@ public class DataContentViewerOtherCases extends JPanel implements DataContentVi otherCasesTable = new javax.swing.JTable(); earliestCaseLabel = new javax.swing.JLabel(); earliestCaseDate = new javax.swing.JLabel(); - tableStatusPanel = new javax.swing.JPanel(); + foundInLabel = new javax.swing.JLabel(); rightClickPopupMenu.addPopupMenuListener(new javax.swing.event.PopupMenuListener() { public void popupMenuCanceled(javax.swing.event.PopupMenuEvent evt) { @@ -818,44 +851,31 @@ public class DataContentViewerOtherCases extends JPanel implements DataContentVi org.openide.awt.Mnemonics.setLocalizedText(earliestCaseDate, org.openide.util.NbBundle.getMessage(DataContentViewerOtherCases.class, "DataContentViewerOtherCases.earliestCaseDate.text")); // NOI18N - tableStatusPanel.setPreferredSize(new java.awt.Dimension(1500, 16)); - - javax.swing.GroupLayout tableStatusPanelLayout = new javax.swing.GroupLayout(tableStatusPanel); - tableStatusPanel.setLayout(tableStatusPanelLayout); - tableStatusPanelLayout.setHorizontalGroup( - tableStatusPanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) - .addGap(0, 0, Short.MAX_VALUE) - ); - tableStatusPanelLayout.setVerticalGroup( - tableStatusPanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) - .addGap(0, 16, Short.MAX_VALUE) - ); + org.openide.awt.Mnemonics.setLocalizedText(foundInLabel, org.openide.util.NbBundle.getMessage(DataContentViewerOtherCases.class, "DataContentViewerOtherCases.foundInLabel.text")); // NOI18N javax.swing.GroupLayout tableContainerPanelLayout = new javax.swing.GroupLayout(tableContainerPanel); tableContainerPanel.setLayout(tableContainerPanelLayout); tableContainerPanelLayout.setHorizontalGroup( tableContainerPanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) - .addGroup(javax.swing.GroupLayout.Alignment.TRAILING, tableContainerPanelLayout.createSequentialGroup() - .addComponent(tableStatusPanel, javax.swing.GroupLayout.DEFAULT_SIZE, 1282, Short.MAX_VALUE) - .addGap(218, 218, 218)) - .addComponent(tableScrollPane, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) + .addComponent(tableScrollPane, javax.swing.GroupLayout.DEFAULT_SIZE, 1508, Short.MAX_VALUE) .addGroup(tableContainerPanelLayout.createSequentialGroup() .addComponent(earliestCaseLabel) - .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) + .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED) .addComponent(earliestCaseDate) - .addContainerGap(javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)) + .addGap(66, 66, 66) + .addComponent(foundInLabel) + .addGap(0, 1157, Short.MAX_VALUE)) ); tableContainerPanelLayout.setVerticalGroup( tableContainerPanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(javax.swing.GroupLayout.Alignment.TRAILING, tableContainerPanelLayout.createSequentialGroup() - .addComponent(tableScrollPane, javax.swing.GroupLayout.DEFAULT_SIZE, 27, Short.MAX_VALUE) - .addGap(2, 2, 2) + .addComponent(tableScrollPane, javax.swing.GroupLayout.DEFAULT_SIZE, 71, Short.MAX_VALUE) + .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addGroup(tableContainerPanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) .addComponent(earliestCaseLabel) - .addComponent(earliestCaseDate)) - .addGap(0, 0, 0) - .addComponent(tableStatusPanel, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) - .addGap(0, 0, 0)) + .addComponent(earliestCaseDate) + .addComponent(foundInLabel)) + .addGap(6, 6, 6)) ); javax.swing.GroupLayout otherCasesPanelLayout = new javax.swing.GroupLayout(otherCasesPanel); @@ -868,10 +888,10 @@ public class DataContentViewerOtherCases extends JPanel implements DataContentVi ); otherCasesPanelLayout.setVerticalGroup( otherCasesPanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) - .addGap(0, 483, Short.MAX_VALUE) + .addGap(0, 61, Short.MAX_VALUE) .addGroup(otherCasesPanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(otherCasesPanelLayout.createSequentialGroup() - .addComponent(tableContainerPanel, javax.swing.GroupLayout.DEFAULT_SIZE, 59, Short.MAX_VALUE) + .addComponent(tableContainerPanel, javax.swing.GroupLayout.DEFAULT_SIZE, 61, Short.MAX_VALUE) .addGap(0, 0, 0))) ); @@ -883,7 +903,7 @@ public class DataContentViewerOtherCases extends JPanel implements DataContentVi ); layout.setVerticalGroup( layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) - .addComponent(otherCasesPanel, javax.swing.GroupLayout.DEFAULT_SIZE, 59, Short.MAX_VALUE) + .addComponent(otherCasesPanel, javax.swing.GroupLayout.DEFAULT_SIZE, 61, Short.MAX_VALUE) ); }// //GEN-END:initComponents @@ -907,6 +927,7 @@ public class DataContentViewerOtherCases extends JPanel implements DataContentVi private javax.swing.JLabel earliestCaseDate; private javax.swing.JLabel earliestCaseLabel; private javax.swing.JMenuItem exportToCSVMenuItem; + private javax.swing.JLabel foundInLabel; private javax.swing.JPanel otherCasesPanel; private javax.swing.JTable otherCasesTable; private javax.swing.JPopupMenu rightClickPopupMenu; @@ -915,7 +936,6 @@ public class DataContentViewerOtherCases extends JPanel implements DataContentVi private javax.swing.JMenuItem showCommonalityMenuItem; private javax.swing.JPanel tableContainerPanel; private javax.swing.JScrollPane tableScrollPane; - private javax.swing.JPanel tableStatusPanel; // End of variables declaration//GEN-END:variables /** From 62610f7ccaee2b6d8e4add199be47b8038e3483d Mon Sep 17 00:00:00 2001 From: Raman Date: Thu, 28 Feb 2019 08:35:18 -0500 Subject: [PATCH 49/80] Fixed the Source of Web Cache artifacts. --- .../recentactivity/ChromeCacheExtractor.java | 45 ++++++++++++------- 1 file changed, 29 insertions(+), 16 deletions(-) diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ChromeCacheExtractor.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ChromeCacheExtractor.java index 976f83141f..56e18dfef5 100644 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ChromeCacheExtractor.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ChromeCacheExtractor.java @@ -344,17 +344,19 @@ final class ChromeCacheExtractor { List derivedFiles = new ArrayList<>(); - String fileName = cacheEntryAddress.getFilename(); + String cacheEntryFileName = cacheEntryAddress.getFilename(); String cachePath = cacheEntryAddress.getCachePath(); - Optional cacheFileCopy = this.getCacheFileCopy(fileName, cachePath); - if (!cacheFileCopy.isPresent()) { - logger.log(Level.SEVERE, String.format("Failed to get cache entry at address %s", cacheEntryAddress)); //NON-NLS + Optional cacheEntryFile = this.getCacheFileCopy(cacheEntryFileName, cachePath); + if (!cacheEntryFile.isPresent()) { + String msg = String.format("Failed to get cache entry at address %s", cacheEntryAddress); + throw new IngestModuleException(msg); } + // Get the cache entry and its data segments - CacheEntry cacheEntry = new CacheEntry(cacheEntryAddress, cacheFileCopy.get() ); + CacheEntry cacheEntry = new CacheEntry(cacheEntryAddress, cacheEntryFile.get() ); List dataEntries = cacheEntry.getData(); BlackboardAttribute urlAttr = new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_URL, @@ -400,10 +402,22 @@ final class ChromeCacheExtractor { sourceArtifacts.add(sourceArtifact); } - BlackboardArtifact webCacheArtifact = dataFile.get().newArtifact(ARTIFACT_TYPE.TSK_WEB_CACHE); + BlackboardArtifact webCacheArtifact = cacheEntryFile.get().getAbstractFile().newArtifact(ARTIFACT_TYPE.TSK_WEB_CACHE); if (webCacheArtifact != null) { webCacheArtifact.addAttributes(webCacheAttributes); - webCacheArtifacts.add(webCacheArtifact); + + // Add path of f_* file as attribute + webCacheArtifact.addAttribute(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PATH, + moduleName, + dataFile.get().getUniquePath())); //NON-NLS + + long pathID = Util.findID(dataSource, dataFile.get().getUniquePath()); //NON-NLS + if (pathID != -1) { + webCacheArtifact.addAttribute(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PATH_ID, + moduleName, pathID)); + } + + webCacheArtifacts.add(webCacheArtifact); } if (isBrotliCompressed) { @@ -417,7 +431,6 @@ final class ChromeCacheExtractor { // Data segments in "data_x" files are saved in individual files and added as derived files String filename = data.save(); - String relPathname = getRelOutputFolderName() + data.getAddress().getCachePath() + filename; try { DerivedFile derivedFile = fileManager.addDerivedFile(filename, relPathname, @@ -437,19 +450,19 @@ final class ChromeCacheExtractor { sourceArtifacts.add(sourceArtifact); } - BlackboardArtifact webCacheArtifact = derivedFile.newArtifact(ARTIFACT_TYPE.TSK_WEB_CACHE); + BlackboardArtifact webCacheArtifact = cacheEntryFile.get().getAbstractFile().newArtifact(ARTIFACT_TYPE.TSK_WEB_CACHE); if (webCacheArtifact != null) { webCacheArtifact.addAttributes(webCacheAttributes); + // Add path of derived file as attribute webCacheArtifact.addAttribute(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PATH, moduleName, - dataFile.get().getUniquePath())); //NON-NLS - - long pathID = Util.findID(dataSource, dataFile.get().getUniquePath()); //NON-NLS - if (pathID != -1) { - webCacheArtifact.addAttribute(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PATH_ID, - moduleName, pathID)); - } + derivedFile.getUniquePath())); //NON-NLS + long pathID = Util.findID(dataSource, derivedFile.getUniquePath()); //NON-NLS + if (pathID != -1) { + webCacheArtifact.addAttribute(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PATH_ID, + moduleName, pathID)); + } webCacheArtifacts.add(webCacheArtifact); } From 0e614f84c4b18a921fd613d81c496b0240d1da1e Mon Sep 17 00:00:00 2001 From: Kelly Kelly Date: Thu, 28 Feb 2019 10:09:51 -0500 Subject: [PATCH 50/80] Added folders to the find file function --- .../autopsy/recentactivity/ExtractSafari.java | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractSafari.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractSafari.java index 4bbca9fd85..2fcfe827eb 100755 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractSafari.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractSafari.java @@ -66,6 +66,8 @@ final class ExtractSafari extends Extract { private static final String BOOKMARK_FILE_NAME = "Bookmarks.plist"; //NON-NLS private static final String DOWNLOAD_FILE_NAME = "Downloads.plist"; //NON-NLS private static final String COOKIE_FILE_NAME = "Cookies.binarycookies"; //NON-NLS + private static final String COOKIE_FOLDER = "Cookies"; + private static final String SAFARI_FOLDER = "Safari"; private static final String HEAD_URL = "url"; //NON-NLS private static final String HEAD_TITLE = "title"; //NON-NLS @@ -146,7 +148,7 @@ final class ExtractSafari extends Extract { private void processHistoryDB(Content dataSource, IngestJobContext context) throws TskCoreException, IOException { FileManager fileManager = getCurrentCase().getServices().getFileManager(); - List historyFiles = fileManager.findFiles(dataSource, HISTORY_FILE_NAME); + List historyFiles = fileManager.findFiles(dataSource, HISTORY_FILE_NAME, SAFARI_FOLDER); if (historyFiles == null || historyFiles.isEmpty()) { return; @@ -177,7 +179,7 @@ final class ExtractSafari extends Extract { private void processBookmarkPList(Content dataSource, IngestJobContext context) throws TskCoreException, IOException, SAXException, PropertyListFormatException, ParseException, ParserConfigurationException { FileManager fileManager = getCurrentCase().getServices().getFileManager(); - List files = fileManager.findFiles(dataSource, BOOKMARK_FILE_NAME); + List files = fileManager.findFiles(dataSource, BOOKMARK_FILE_NAME, SAFARI_FOLDER); if (files == null || files.isEmpty()) { return; @@ -209,7 +211,7 @@ final class ExtractSafari extends Extract { private void processDownloadsPList(Content dataSource, IngestJobContext context) throws TskCoreException, IOException, SAXException, PropertyListFormatException, ParseException, ParserConfigurationException { FileManager fileManager = getCurrentCase().getServices().getFileManager(); - List files = fileManager.findFiles(dataSource, DOWNLOAD_FILE_NAME); + List files = fileManager.findFiles(dataSource, DOWNLOAD_FILE_NAME, SAFARI_FOLDER); if (files == null || files.isEmpty()) { return; @@ -236,7 +238,7 @@ final class ExtractSafari extends Extract { private void processBinaryCookieFile(Content dataSource, IngestJobContext context) throws TskCoreException, IOException { FileManager fileManager = getCurrentCase().getServices().getFileManager(); - List files = fileManager.findFiles(dataSource, COOKIE_FILE_NAME); + List files = fileManager.findFiles(dataSource, COOKIE_FILE_NAME, COOKIE_FOLDER); if (files == null || files.isEmpty()) { return; From effeceee29206653188459b53a9d4719ea89e046 Mon Sep 17 00:00:00 2001 From: William Schaefer Date: Thu, 28 Feb 2019 10:29:40 -0500 Subject: [PATCH 51/80] 4626 allow connection pool used for upgrades to ignore foreign keys with sqlite --- .../datamodel/AbstractSqlEamDb.java | 129 +++++++++--------- .../datamodel/PostgresEamDb.java | 7 +- .../datamodel/SqliteEamDb.java | 39 +++--- 3 files changed, 93 insertions(+), 82 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/AbstractSqlEamDb.java b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/AbstractSqlEamDb.java index 0e4dfcc624..aa0c29a9cf 100644 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/AbstractSqlEamDb.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/AbstractSqlEamDb.java @@ -108,7 +108,7 @@ abstract class AbstractSqlEamDb implements EamDb { /** * Setup and create a connection to the selected database implementation */ - protected abstract Connection connect() throws EamDbException; + protected abstract Connection connect(boolean foreignKeys) throws EamDbException; /** * Add a new name/value pair in the db_info table. @@ -120,7 +120,7 @@ abstract class AbstractSqlEamDb implements EamDb { */ @Override public void newDbInfo(String name, String value) throws EamDbException { - Connection conn = connect(); + Connection conn = connect(true); PreparedStatement preparedStatement = null; String sql = "INSERT INTO db_info (name, value) VALUES (?, ?) " @@ -141,7 +141,7 @@ abstract class AbstractSqlEamDb implements EamDb { @Override public void addDataSourceObjectId(int rowId, long dataSourceObjectId) throws EamDbException { - Connection conn = connect(); + Connection conn = connect(true); PreparedStatement preparedStatement = null; String sql = "UPDATE data_sources SET datasource_obj_id=? WHERE id=?"; try { @@ -168,7 +168,7 @@ abstract class AbstractSqlEamDb implements EamDb { */ @Override public String getDbInfo(String name) throws EamDbException { - Connection conn = connect(); + Connection conn = connect(true); PreparedStatement preparedStatement = null; ResultSet resultSet = null; @@ -213,7 +213,7 @@ abstract class AbstractSqlEamDb implements EamDb { */ @Override public void updateDbInfo(String name, String value) throws EamDbException { - Connection conn = connect(); + Connection conn = connect(true); PreparedStatement preparedStatement = null; String sql = "UPDATE db_info SET value=? WHERE name=?"; @@ -252,7 +252,7 @@ abstract class AbstractSqlEamDb implements EamDb { return cRCase; } - Connection conn = connect(); + Connection conn = connect(true); PreparedStatement preparedStatement = null; String sql = "INSERT INTO cases(case_uid, org_id, case_name, creation_date, case_number, " @@ -362,7 +362,7 @@ abstract class AbstractSqlEamDb implements EamDb { throw new EamDbException("Correlation case is null"); } - Connection conn = connect(); + Connection conn = connect(true); PreparedStatement preparedStatement = null; String sql = "UPDATE cases " @@ -447,7 +447,7 @@ abstract class AbstractSqlEamDb implements EamDb { * @return The retrieved case */ private CorrelationCase getCaseByUUIDFromCr(String caseUUID) throws EamDbException { - Connection conn = connect(); + Connection conn = connect(true); CorrelationCase eamCaseResult = null; PreparedStatement preparedStatement = null; @@ -508,7 +508,7 @@ abstract class AbstractSqlEamDb implements EamDb { * @return The retrieved case */ private CorrelationCase getCaseByIdFromCr(int caseId) throws EamDbException { - Connection conn = connect(); + Connection conn = connect(true); CorrelationCase eamCaseResult = null; PreparedStatement preparedStatement = null; @@ -548,7 +548,7 @@ abstract class AbstractSqlEamDb implements EamDb { */ @Override public List getCases() throws EamDbException { - Connection conn = connect(); + Connection conn = connect(true); List cases = new ArrayList<>(); CorrelationCase eamCaseResult; @@ -626,7 +626,7 @@ abstract class AbstractSqlEamDb implements EamDb { return eamDataSource; } - Connection conn = connect(); + Connection conn = connect(true); PreparedStatement preparedStatement = null; //The conflict clause exists in case multiple nodes are trying to add the data source because it did not exist at the same time @@ -733,7 +733,7 @@ abstract class AbstractSqlEamDb implements EamDb { * @throws EamDbException */ private CorrelationDataSource getDataSourceFromCr(int correlationCaseId, Long dataSourceObjectId) throws EamDbException { - Connection conn = connect(); + Connection conn = connect(true); CorrelationDataSource eamDataSourceResult = null; PreparedStatement preparedStatement = null; @@ -797,7 +797,7 @@ abstract class AbstractSqlEamDb implements EamDb { * @return The data source */ private CorrelationDataSource getDataSourceByIdFromCr(CorrelationCase correlationCase, int dataSourceId) throws EamDbException { - Connection conn = connect(); + Connection conn = connect(true); CorrelationDataSource eamDataSourceResult = null; PreparedStatement preparedStatement = null; @@ -834,7 +834,7 @@ abstract class AbstractSqlEamDb implements EamDb { */ @Override public List getDataSources() throws EamDbException { - Connection conn = connect(); + Connection conn = connect(true); List dataSources = new ArrayList<>(); CorrelationDataSource eamDataSourceResult; @@ -904,7 +904,7 @@ abstract class AbstractSqlEamDb implements EamDb { throw new EamDbException("Correlation data source is null"); } - Connection conn = connect(); + Connection conn = connect(true); PreparedStatement preparedStatement = null; String sql = "UPDATE data_sources " @@ -940,7 +940,7 @@ abstract class AbstractSqlEamDb implements EamDb { @Override public void updateDataSourceName(CorrelationDataSource eamDataSource, String newName) throws EamDbException { - Connection conn = connect(); + Connection conn = connect(true); PreparedStatement preparedStatement = null; @@ -983,7 +983,7 @@ abstract class AbstractSqlEamDb implements EamDb { public void addArtifactInstance(CorrelationAttributeInstance eamArtifact) throws EamDbException { checkAddArtifactInstanceNulls(eamArtifact); - Connection conn = connect(); + Connection conn = connect(true); PreparedStatement preparedStatement = null; @@ -1068,7 +1068,7 @@ abstract class AbstractSqlEamDb implements EamDb { String normalizedValue = CorrelationAttributeNormalizer.normalize(aType, value); - Connection conn = connect(); + Connection conn = connect(true); List artifactInstances = new ArrayList<>(); @@ -1133,7 +1133,7 @@ abstract class AbstractSqlEamDb implements EamDb { if (filePath == null) { throw new EamDbException("Correlation value is null"); } - Connection conn = connect(); + Connection conn = connect(true); List artifactInstances = new ArrayList<>(); @@ -1197,7 +1197,7 @@ abstract class AbstractSqlEamDb implements EamDb { public Long getCountArtifactInstancesByTypeValue(CorrelationAttributeInstance.Type aType, String value) throws EamDbException, CorrelationAttributeNormalizationException { String normalizedValue = CorrelationAttributeNormalizer.normalize(aType, value); - Connection conn = connect(); + Connection conn = connect(true); Long instanceCount = 0L; PreparedStatement preparedStatement = null; @@ -1251,7 +1251,7 @@ abstract class AbstractSqlEamDb implements EamDb { public Long getCountUniqueCaseDataSourceTuplesHavingTypeValue(CorrelationAttributeInstance.Type aType, String value) throws EamDbException, CorrelationAttributeNormalizationException { String normalizedValue = CorrelationAttributeNormalizer.normalize(aType, value); - Connection conn = connect(); + Connection conn = connect(true); Long instanceCount = 0L; PreparedStatement preparedStatement = null; @@ -1284,7 +1284,7 @@ abstract class AbstractSqlEamDb implements EamDb { @Override public Long getCountUniqueDataSources() throws EamDbException { - Connection conn = connect(); + Connection conn = connect(true); Long instanceCount = 0L; PreparedStatement preparedStatement = null; @@ -1321,7 +1321,7 @@ abstract class AbstractSqlEamDb implements EamDb { */ @Override public Long getCountArtifactInstancesByCaseDataSource(CorrelationDataSource correlationDataSource) throws EamDbException { - Connection conn = connect(); + Connection conn = connect(true); Long instanceCount = 0L; List artifactTypes = getDefinedCorrelationTypes(); @@ -1394,7 +1394,7 @@ abstract class AbstractSqlEamDb implements EamDb { public void commitAttributeInstancesBulk() throws EamDbException { List artifactTypes = getDefinedCorrelationTypes(); - Connection conn = connect(); + Connection conn = connect(true); PreparedStatement bulkPs = null; try { @@ -1501,7 +1501,7 @@ abstract class AbstractSqlEamDb implements EamDb { return; } - Connection conn = connect(); + Connection conn = connect(true); int counter = 0; PreparedStatement bulkPs = null; @@ -1589,7 +1589,7 @@ abstract class AbstractSqlEamDb implements EamDb { if (eamArtifact.getCorrelationDataSource() == null) { throw new EamDbException("Correlation data source is null"); } - Connection conn = connect(); + Connection conn = connect(true); PreparedStatement preparedQuery = null; String tableName = EamDbUtil.correlationTypeToInstanceTableName(eamArtifact.getCorrelationType()); String sqlUpdate @@ -1639,7 +1639,7 @@ abstract class AbstractSqlEamDb implements EamDb { throw new EamDbException("Correlation case is null"); } - Connection conn = connect(); + Connection conn = connect(true); PreparedStatement preparedStatement = null; ResultSet resultSet = null; @@ -1707,7 +1707,7 @@ abstract class AbstractSqlEamDb implements EamDb { throw new EamDbException("Correlation file path is null"); } - Connection conn = connect(); + Connection conn = connect(true); PreparedStatement preparedStatement = null; ResultSet resultSet = null; @@ -1776,7 +1776,7 @@ abstract class AbstractSqlEamDb implements EamDb { throw new EamDbException("Correlation data source is null"); } - Connection conn = connect(); + Connection conn = connect(true); PreparedStatement preparedUpdate = null; PreparedStatement preparedQuery = null; @@ -1858,7 +1858,7 @@ abstract class AbstractSqlEamDb implements EamDb { public List getArtifactInstancesKnownBad(CorrelationAttributeInstance.Type aType, String value) throws EamDbException, CorrelationAttributeNormalizationException { String normalizedValue = CorrelationAttributeNormalizer.normalize(aType, value); - Connection conn = connect(); + Connection conn = connect(true); List artifactInstances = new ArrayList<>(); @@ -1922,7 +1922,7 @@ abstract class AbstractSqlEamDb implements EamDb { throw new EamDbException("Correlation type is null"); } - Connection conn = connect(); + Connection conn = connect(true); List artifactInstances = new ArrayList<>(); @@ -1981,7 +1981,7 @@ abstract class AbstractSqlEamDb implements EamDb { String normalizedValue = CorrelationAttributeNormalizer.normalize(aType, value); - Connection conn = connect(); + Connection conn = connect(true); Long badInstances = 0L; PreparedStatement preparedStatement = null; @@ -2028,7 +2028,7 @@ abstract class AbstractSqlEamDb implements EamDb { String normalizedValue = CorrelationAttributeNormalizer.normalize(aType, value); - Connection conn = connect(); + Connection conn = connect(true); Collection caseNames = new LinkedHashSet<>(); @@ -2087,7 +2087,7 @@ abstract class AbstractSqlEamDb implements EamDb { * @throws EamDbException */ private void deleteReferenceSetEntry(int referenceSetID) throws EamDbException { - Connection conn = connect(); + Connection conn = connect(true); PreparedStatement preparedStatement = null; String sql = "DELETE FROM reference_sets WHERE id=?"; @@ -2113,7 +2113,7 @@ abstract class AbstractSqlEamDb implements EamDb { * @throws EamDbException */ private void deleteReferenceSetEntries(int referenceSetID) throws EamDbException { - Connection conn = connect(); + Connection conn = connect(true); PreparedStatement preparedStatement = null; String sql = "DELETE FROM %s WHERE reference_set_id=?"; @@ -2186,7 +2186,7 @@ abstract class AbstractSqlEamDb implements EamDb { String normalizeValued = CorrelationAttributeNormalizer.normalize(this.getCorrelationTypeById(correlationTypeID), value); - Connection conn = connect(); + Connection conn = connect(true); Long matchingInstances = 0L; PreparedStatement preparedStatement = null; @@ -2232,7 +2232,7 @@ abstract class AbstractSqlEamDb implements EamDb { return false; } - Connection conn = connect(); + Connection conn = connect(true); Long badInstances = 0L; PreparedStatement preparedStatement = null; @@ -2275,7 +2275,7 @@ abstract class AbstractSqlEamDb implements EamDb { throw new EamDbException("Callback interface is null"); } - Connection conn = connect(); + Connection conn = connect(true); PreparedStatement preparedStatement = null; ResultSet resultSet = null; String tableName = EamDbUtil.correlationTypeToInstanceTableName(type); @@ -2319,7 +2319,7 @@ abstract class AbstractSqlEamDb implements EamDb { throw new EamDbException("Where clause is null"); } - Connection conn = connect(); + Connection conn = connect(true); PreparedStatement preparedStatement = null; ResultSet resultSet = null; String tableName = EamDbUtil.correlationTypeToInstanceTableName(type); @@ -2350,7 +2350,7 @@ abstract class AbstractSqlEamDb implements EamDb { throw new EamDbException("EamOrganization already has an ID"); } - Connection conn = connect(); + Connection conn = connect(true); ResultSet generatedKeys = null; PreparedStatement preparedStatement = null; String sql = "INSERT INTO organizations(org_name, poc_name, poc_email, poc_phone) VALUES (?, ?, ?, ?) " @@ -2389,7 +2389,7 @@ abstract class AbstractSqlEamDb implements EamDb { */ @Override public List getOrganizations() throws EamDbException { - Connection conn = connect(); + Connection conn = connect(true); List orgs = new ArrayList<>(); PreparedStatement preparedStatement = null; @@ -2424,7 +2424,7 @@ abstract class AbstractSqlEamDb implements EamDb { */ @Override public EamOrganization getOrganizationByID(int orgID) throws EamDbException { - Connection conn = connect(); + Connection conn = connect(true); PreparedStatement preparedStatement = null; ResultSet resultSet = null; @@ -2492,7 +2492,7 @@ abstract class AbstractSqlEamDb implements EamDb { public void updateOrganization(EamOrganization updatedOrganization) throws EamDbException { testArgument(updatedOrganization); - Connection conn = connect(); + Connection conn = connect(true); PreparedStatement preparedStatement = null; String sql = "UPDATE organizations SET org_name = ?, poc_name = ?, poc_email = ?, poc_phone = ? WHERE id = ?"; try { @@ -2515,7 +2515,7 @@ abstract class AbstractSqlEamDb implements EamDb { public void deleteOrganization(EamOrganization organizationToDelete) throws EamDbException { testArgument(organizationToDelete); - Connection conn = connect(); + Connection conn = connect(true); PreparedStatement checkIfUsedStatement = null; ResultSet resultSet = null; String checkIfUsedSql = "SELECT (select count(*) FROM cases WHERE org_id=?) + (select count(*) FROM reference_sets WHERE org_id=?)"; @@ -2566,7 +2566,7 @@ abstract class AbstractSqlEamDb implements EamDb { throw new EamDbException("Type on the EamGlobalSet is null"); } - Connection conn = connect(); + Connection conn = connect(true); PreparedStatement preparedStatement1 = null; PreparedStatement preparedStatement2 = null; @@ -2618,7 +2618,7 @@ abstract class AbstractSqlEamDb implements EamDb { */ @Override public EamGlobalSet getReferenceSetByID(int referenceSetID) throws EamDbException { - Connection conn = connect(); + Connection conn = connect(true); PreparedStatement preparedStatement1 = null; ResultSet resultSet = null; @@ -2660,7 +2660,7 @@ abstract class AbstractSqlEamDb implements EamDb { } List results = new ArrayList<>(); - Connection conn = connect(); + Connection conn = connect(true); PreparedStatement preparedStatement1 = null; ResultSet resultSet = null; @@ -2701,7 +2701,7 @@ abstract class AbstractSqlEamDb implements EamDb { throw new EamDbException("Correlation type is null"); } - Connection conn = connect(); + Connection conn = connect(true); PreparedStatement preparedStatement = null; @@ -2737,7 +2737,7 @@ abstract class AbstractSqlEamDb implements EamDb { */ @Override public boolean referenceSetExists(String referenceSetName, String version) throws EamDbException { - Connection conn = connect(); + Connection conn = connect(true); PreparedStatement preparedStatement1 = null; ResultSet resultSet = null; @@ -2774,7 +2774,7 @@ abstract class AbstractSqlEamDb implements EamDb { throw new EamDbException("Null set of EamGlobalFileInstance"); } - Connection conn = connect(); + Connection conn = connect(true); PreparedStatement bulkPs = null; try { @@ -2827,7 +2827,7 @@ abstract class AbstractSqlEamDb implements EamDb { public List getReferenceInstancesByTypeValue(CorrelationAttributeInstance.Type aType, String aValue) throws EamDbException, CorrelationAttributeNormalizationException { String normalizeValued = CorrelationAttributeNormalizer.normalize(aType, aValue); - Connection conn = connect(); + Connection conn = connect(true); List globalFileInstances = new ArrayList<>(); PreparedStatement preparedStatement1 = null; @@ -2888,7 +2888,7 @@ abstract class AbstractSqlEamDb implements EamDb { * @throws EamDbException */ public int newCorrelationTypeNotKnownId(CorrelationAttributeInstance.Type newType) throws EamDbException { - Connection conn = connect(); + Connection conn = connect(true); PreparedStatement preparedStatement = null; PreparedStatement preparedStatementQuery = null; @@ -2941,7 +2941,7 @@ abstract class AbstractSqlEamDb implements EamDb { * @throws EamDbException */ private int newCorrelationTypeKnownId(CorrelationAttributeInstance.Type newType) throws EamDbException { - Connection conn = connect(); + Connection conn = connect(true); PreparedStatement preparedStatement = null; PreparedStatement preparedStatementQuery = null; @@ -2987,7 +2987,7 @@ abstract class AbstractSqlEamDb implements EamDb { @Override public List getDefinedCorrelationTypes() throws EamDbException { - Connection conn = connect(); + Connection conn = connect(true); List aTypes = new ArrayList<>(); PreparedStatement preparedStatement = null; @@ -3022,7 +3022,7 @@ abstract class AbstractSqlEamDb implements EamDb { */ @Override public List getEnabledCorrelationTypes() throws EamDbException { - Connection conn = connect(); + Connection conn = connect(true); List aTypes = new ArrayList<>(); PreparedStatement preparedStatement = null; @@ -3057,7 +3057,7 @@ abstract class AbstractSqlEamDb implements EamDb { */ @Override public List getSupportedCorrelationTypes() throws EamDbException { - Connection conn = connect(); + Connection conn = connect(true); List aTypes = new ArrayList<>(); PreparedStatement preparedStatement = null; @@ -3090,7 +3090,7 @@ abstract class AbstractSqlEamDb implements EamDb { */ @Override public void updateCorrelationType(CorrelationAttributeInstance.Type aType) throws EamDbException { - Connection conn = connect(); + Connection conn = connect(true); PreparedStatement preparedStatement = null; String sql = "UPDATE correlation_types SET display_name=?, db_table_name=?, supported=?, enabled=? WHERE id=?"; @@ -3144,7 +3144,7 @@ abstract class AbstractSqlEamDb implements EamDb { * @throws EamDbException */ private CorrelationAttributeInstance.Type getCorrelationTypeByIdFromCr(int typeId) throws EamDbException { - Connection conn = connect(); + Connection conn = connect(true); CorrelationAttributeInstance.Type aType; PreparedStatement preparedStatement = null; @@ -3338,12 +3338,13 @@ abstract class AbstractSqlEamDb implements EamDb { Statement statement = null; PreparedStatement preparedStatement = null; Connection conn = null; + EamDbPlatformEnum selectedPlatform = null; try { - conn = connect(); + conn = connect(false); conn.setAutoCommit(false); statement = conn.createStatement(); - + selectedPlatform = EamDbPlatformEnum.getSelectedPlatform(); int minorVersion = 0; String minorVersionStr = null; resultSet = statement.executeQuery("SELECT value FROM db_info WHERE name='" + AbstractSqlEamDb.SCHEMA_MINOR_VERSION_KEY + "'"); @@ -3397,8 +3398,6 @@ abstract class AbstractSqlEamDb implements EamDb { return; } - EamDbPlatformEnum selectedPlatform = EamDbPlatformEnum.getSelectedPlatform(); - /* * Update to 1.1 */ @@ -3590,10 +3589,12 @@ abstract class AbstractSqlEamDb implements EamDb { case POSTGRESQL: statement.execute("ALTER TABLE data_sources DROP CONSTRAINT datasource_unique"); statement.execute("ALTER TABLE data_sources ADD CONSTRAINT datasource_unique UNIQUE (case_id, device_id, name, datasource_obj_id)"); - + break; case SQLITE: - statement.execute("ALTER TABLE data_sources RENAME TO old_data_sources"); + statement.execute("DROP INDEX IF EXISTS data_sources_name"); + statement.execute("DROP INDEX IF EXISTS data_sources_object_id"); + statement.execute("ALTER TABLE data_sources RENAME TO old_data_sources"); statement.execute(SqliteEamDbSettings.getCreateDataSourcesTableStatement()); statement.execute(SqliteEamDbSettings.getAddDataSourcesNameIndexStatement()); statement.execute(SqliteEamDbSettings.getAddDataSourcesObjectIdIndexStatement()); diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/PostgresEamDb.java b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/PostgresEamDb.java index 769b49bfd3..f4aff71927 100755 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/PostgresEamDb.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/PostgresEamDb.java @@ -104,7 +104,7 @@ final class PostgresEamDb extends AbstractSqlEamDb { @Override public void reset() throws EamDbException { - Connection conn = connect(); + Connection conn = connect(true); try { Statement dropContent = conn.createStatement(); @@ -164,12 +164,15 @@ final class PostgresEamDb extends AbstractSqlEamDb { /** * Lazily setup Singleton connection on first request. * + * @param foreignKeys -ignored arguement with postgres databases + * * @return A connection from the connection pool. * * @throws EamDbException */ @Override - protected Connection connect() throws EamDbException { + protected Connection connect(boolean foreignKeys) throws EamDbException { + //foreignKeys boolean is ignored for postgres synchronized (this) { if (!EamDb.isEnabled()) { throw new EamDbException("Central Repository module is not enabled"); // NON-NLS diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/SqliteEamDb.java b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/SqliteEamDb.java index a9d3b8b46d..020bd69b96 100644 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/SqliteEamDb.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/SqliteEamDb.java @@ -114,7 +114,7 @@ final class SqliteEamDb extends AbstractSqlEamDb { try { acquireExclusiveLock(); - Connection conn = connect(); + Connection conn = connect(true); try { @@ -153,7 +153,7 @@ final class SqliteEamDb extends AbstractSqlEamDb { * Setup a connection pool for db connections. * */ - private void setupConnectionPool() throws EamDbException { + private void setupConnectionPool(boolean foreignKeysEnabled) throws EamDbException { if (dbSettings.dbFileExists() == false) { throw new EamDbException("Central repository database missing"); @@ -169,25 +169,31 @@ final class SqliteEamDb extends AbstractSqlEamDb { connectionPool.setMaxIdle(-1); connectionPool.setMaxWaitMillis(1000); connectionPool.setValidationQuery(dbSettings.getValidationQuery()); - connectionPool.setConnectionInitSqls(Arrays.asList("PRAGMA foreign_keys = ON")); + if (foreignKeysEnabled) { + connectionPool.setConnectionInitSqls(Arrays.asList("PRAGMA foreign_keys = ON")); + } else { + connectionPool.setConnectionInitSqls(Arrays.asList("PRAGMA foreign_keys = OFF")); + } } /** * Lazily setup Singleton connection on first request. * + * @param foreignKeys determines if foreign keys should be enforced during this connection for SQLite + * * @return A connection from the connection pool. * * @throws EamDbException */ @Override - protected Connection connect() throws EamDbException { + protected Connection connect(boolean foreignKeys) throws EamDbException { synchronized (this) { if (!EamDb.isEnabled()) { throw new EamDbException("Central Repository module is not enabled"); // NON-NLS } if (connectionPool == null) { - setupConnectionPool(); + setupConnectionPool(foreignKeys); } try { @@ -275,7 +281,7 @@ final class SqliteEamDb extends AbstractSqlEamDb { } @Override - public void addDataSourceObjectId(int rowId, long dataSourceObjectId) throws EamDbException{ + public void addDataSourceObjectId(int rowId, long dataSourceObjectId) throws EamDbException { try { acquireExclusiveLock(); super.addDataSourceObjectId(rowId, dataSourceObjectId); @@ -433,14 +439,14 @@ final class SqliteEamDb extends AbstractSqlEamDb { releaseSharedLock(); } } - + /** * Changes the name of a data source in the DB - * - * @param eamDataSource The data source - * @param newName The new name - * - * @throws EamDbException + * + * @param eamDataSource The data source + * @param newName The new name + * + * @throws EamDbException */ @Override public void updateDataSourceName(CorrelationDataSource eamDataSource, String newName) throws EamDbException { @@ -451,7 +457,7 @@ final class SqliteEamDb extends AbstractSqlEamDb { releaseExclusiveLock(); } } - + /** * Updates the MD5 hash value in an existing data source in the database. * @@ -466,7 +472,7 @@ final class SqliteEamDb extends AbstractSqlEamDb { releaseExclusiveLock(); } } - + /** * Updates the SHA-1 hash value in an existing data source in the database. * @@ -481,9 +487,10 @@ final class SqliteEamDb extends AbstractSqlEamDb { releaseExclusiveLock(); } } - + /** - * Updates the SHA-256 hash value in an existing data source in the database. + * Updates the SHA-256 hash value in an existing data source in the + * database. * * @param eamDataSource The data source to update */ From b7dce55e28bf2cfeaa20c8f2c1204bd3c7be3c36 Mon Sep 17 00:00:00 2001 From: William Schaefer Date: Thu, 28 Feb 2019 10:31:44 -0500 Subject: [PATCH 52/80] 4626 auto format changes --- .../autopsy/centralrepository/datamodel/AbstractSqlEamDb.java | 2 +- .../autopsy/centralrepository/datamodel/PostgresEamDb.java | 2 +- .../centralrepository/datamodel/PostgresEamDbSettings.java | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/AbstractSqlEamDb.java b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/AbstractSqlEamDb.java index aa0c29a9cf..f4fe77202f 100644 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/AbstractSqlEamDb.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/AbstractSqlEamDb.java @@ -3594,7 +3594,7 @@ abstract class AbstractSqlEamDb implements EamDb { case SQLITE: statement.execute("DROP INDEX IF EXISTS data_sources_name"); statement.execute("DROP INDEX IF EXISTS data_sources_object_id"); - statement.execute("ALTER TABLE data_sources RENAME TO old_data_sources"); + statement.execute("ALTER TABLE data_sources RENAME TO old_data_sources"); statement.execute(SqliteEamDbSettings.getCreateDataSourcesTableStatement()); statement.execute(SqliteEamDbSettings.getAddDataSourcesNameIndexStatement()); statement.execute(SqliteEamDbSettings.getAddDataSourcesObjectIdIndexStatement()); diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/PostgresEamDb.java b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/PostgresEamDb.java index f4aff71927..4a13e1e6b6 100755 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/PostgresEamDb.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/PostgresEamDb.java @@ -165,7 +165,7 @@ final class PostgresEamDb extends AbstractSqlEamDb { * Lazily setup Singleton connection on first request. * * @param foreignKeys -ignored arguement with postgres databases - * + * * @return A connection from the connection pool. * * @throws EamDbException diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/PostgresEamDbSettings.java b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/PostgresEamDbSettings.java index 5e6f7e53ed..ff46697ddb 100644 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/PostgresEamDbSettings.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/PostgresEamDbSettings.java @@ -477,7 +477,7 @@ public final class PostgresEamDbSettings { + "foreign key (case_id) references cases(id) ON UPDATE SET NULL ON DELETE SET NULL," + "foreign key (data_source_id) references data_sources(id) ON UPDATE SET NULL ON DELETE SET NULL)"); } - + /** * Get the statement String for creating a new data_sources table in a * Postgres central repository. From 7d556addabb9813d8a07ec16cf698c19b7341078 Mon Sep 17 00:00:00 2001 From: Kelly Kelly Date: Thu, 28 Feb 2019 11:41:29 -0500 Subject: [PATCH 53/80] Changed absolute path in project.properties to relative --- RecentActivity/nbproject/project.properties | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/RecentActivity/nbproject/project.properties b/RecentActivity/nbproject/project.properties index b5f9e4cc71..4071f4a54e 100644 --- a/RecentActivity/nbproject/project.properties +++ b/RecentActivity/nbproject/project.properties @@ -1,4 +1,4 @@ -file.reference.dd-plist-1.20.jar=C:\\Users\\kelly\\Workspace\\autopsy\\RecentActivity\\release\\modules\\ext\\dd-plist-1.20.jar +file.reference.dd-plist-1.20.jar=release/modules/ext/dd-plist-1.20.jar javac.source=1.8 javac.compilerargs=-Xlint -Xlint:-serial license.file=../LICENSE-2.0.txt From 5122dfba4d83843b9d9ecafba2f6e27428e35605 Mon Sep 17 00:00:00 2001 From: Kelly Kelly Date: Thu, 28 Feb 2019 12:54:00 -0500 Subject: [PATCH 54/80] Changed absolute path to relative in project.xml --- RecentActivity/nbproject/project.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/RecentActivity/nbproject/project.xml b/RecentActivity/nbproject/project.xml index f397b6b23b..9584170602 100644 --- a/RecentActivity/nbproject/project.xml +++ b/RecentActivity/nbproject/project.xml @@ -76,7 +76,7 @@ ext/dd-plist-1.20.jar - C:\Users\kelly\Workspace\autopsy\RecentActivity\release\modules\ext\dd-plist-1.20.jar + release/modules/ext/dd-plist-1.20.jar From 91b2656eb85187ca4420a571c12450ff566bd27c Mon Sep 17 00:00:00 2001 From: Kelly Kelly Date: Thu, 28 Feb 2019 13:03:31 -0500 Subject: [PATCH 55/80] Changed absolute path to relative in coreLib. --- CoreLibs/nbproject/project.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CoreLibs/nbproject/project.xml b/CoreLibs/nbproject/project.xml index 33c3e28eeb..2dd56aa5cb 100644 --- a/CoreLibs/nbproject/project.xml +++ b/CoreLibs/nbproject/project.xml @@ -973,7 +973,7 @@ ext/dd-plist-1.20.jar - C:\Users\kelly\Workspace\autopsy\CoreLibs\release\modules\ext\dd-plist-1.20.jar + release/modules/ext/dd-plist-1.20.jar ext/dom4j-1.6.1.jar From 7c0c8de4886857fbfbf7403aef7478489f7fe35f Mon Sep 17 00:00:00 2001 From: William Schaefer Date: Thu, 28 Feb 2019 13:44:09 -0500 Subject: [PATCH 56/80] 4626-change unique constraint on new central repository tables to be as minimal as possible --- .../centralrepository/datamodel/AbstractSqlEamDb.java | 8 +++++++- .../datamodel/PostgresEamDbSettings.java | 2 +- .../centralrepository/datamodel/SqliteEamDbSettings.java | 2 +- 3 files changed, 9 insertions(+), 3 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/AbstractSqlEamDb.java b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/AbstractSqlEamDb.java index f4fe77202f..fe09da90ac 100644 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/AbstractSqlEamDb.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/AbstractSqlEamDb.java @@ -3588,6 +3588,7 @@ abstract class AbstractSqlEamDb implements EamDb { switch (selectedPlatform) { case POSTGRESQL: statement.execute("ALTER TABLE data_sources DROP CONSTRAINT datasource_unique"); + //unique constraint for upgraded data_sources table is purposefully different than new data_sources table statement.execute("ALTER TABLE data_sources ADD CONSTRAINT datasource_unique UNIQUE (case_id, device_id, name, datasource_obj_id)"); break; @@ -3595,7 +3596,12 @@ abstract class AbstractSqlEamDb implements EamDb { statement.execute("DROP INDEX IF EXISTS data_sources_name"); statement.execute("DROP INDEX IF EXISTS data_sources_object_id"); statement.execute("ALTER TABLE data_sources RENAME TO old_data_sources"); - statement.execute(SqliteEamDbSettings.getCreateDataSourcesTableStatement()); + //unique constraint for upgraded data_sources table is purposefully different than new data_sources table + statement.execute("CREATE TABLE IF NOT EXISTS data_sources (id integer primary key autoincrement NOT NULL," + + "case_id integer NOT NULL,device_id text NOT NULL,name text NOT NULL,datasource_obj_id integer," + + "md5 text DEFAULT NULL,sha1 text DEFAULT NULL,sha256 text DEFAULT NULL," + + "foreign key (case_id) references cases(id) ON UPDATE SET NULL ON DELETE SET NULL," + + "CONSTRAINT datasource_unique UNIQUE (case_id, device_id, name, datasource_obj_id))"); statement.execute(SqliteEamDbSettings.getAddDataSourcesNameIndexStatement()); statement.execute(SqliteEamDbSettings.getAddDataSourcesObjectIdIndexStatement()); statement.execute("INSERT INTO data_sources SELECT * FROM old_data_sources"); diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/PostgresEamDbSettings.java b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/PostgresEamDbSettings.java index ff46697ddb..a4ab17906b 100644 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/PostgresEamDbSettings.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/PostgresEamDbSettings.java @@ -491,7 +491,7 @@ public final class PostgresEamDbSettings { + "name text NOT NULL,datasource_obj_id BIGINT,md5 text DEFAULT NULL," + "sha1 text DEFAULT NULL,sha256 text DEFAULT NULL," + "foreign key (case_id) references cases(id) ON UPDATE SET NULL ON DELETE SET NULL," - + "CONSTRAINT datasource_unique UNIQUE (case_id, device_id, name, datasource_obj_id))"; + + "CONSTRAINT datasource_unique UNIQUE (case_id, datasource_obj_id))"; } /** diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/SqliteEamDbSettings.java b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/SqliteEamDbSettings.java index ae8e419e99..c76ab19783 100644 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/SqliteEamDbSettings.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/SqliteEamDbSettings.java @@ -438,7 +438,7 @@ public final class SqliteEamDbSettings { + "case_id integer NOT NULL,device_id text NOT NULL,name text NOT NULL,datasource_obj_id integer," + "md5 text DEFAULT NULL,sha1 text DEFAULT NULL,sha256 text DEFAULT NULL," + "foreign key (case_id) references cases(id) ON UPDATE SET NULL ON DELETE SET NULL," - + "CONSTRAINT datasource_unique UNIQUE (case_id, device_id, name, datasource_obj_id))"; + + "CONSTRAINT datasource_unique UNIQUE (case_id, datasource_obj_id))"; } /** From bf53781efd2a5ecc6c202fd15e2ebfc1632ad5d2 Mon Sep 17 00:00:00 2001 From: Kelly Kelly Date: Thu, 28 Feb 2019 15:20:35 -0500 Subject: [PATCH 57/80] Changes from review comments, plus a few other little cosmetic things. --- .../recentactivity/BinaryCookieReader.java | 60 +++++++++---------- .../autopsy/recentactivity/Extract.java | 43 ++++++------- .../autopsy/recentactivity/ExtractSafari.java | 26 ++++---- 3 files changed, 66 insertions(+), 63 deletions(-) diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/BinaryCookieReader.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/BinaryCookieReader.java index 04e0893e1b..f08e4e4360 100755 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/BinaryCookieReader.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/BinaryCookieReader.java @@ -32,7 +32,7 @@ import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.recentactivity.BinaryCookieReader.Cookie; /** - * SafariCookieReader wraps all the knowledge of how to read the mac + * The binary cookie reader encapsulates all the knowledge of how to read the mac * .binarycookie files into one class. * * The binarycookie file has a header which describes how many pages of cookies @@ -53,16 +53,16 @@ public final class BinaryCookieReader implements Iterable { private final int[] pageSizeArray; private final File cookieFile; - private final Logger logger = Logger.getLogger(this.getClass().getName()); + private final Logger LOG = Logger.getLogger(BinaryCookieReader.class.getName()); /** - * SafariCookieReader wraps all the knowledge of how to read the mac + * The binary cookie reader encapsulates all the knowledge of how to read the mac * .binarycookie files into one class. * */ - private BinaryCookieReader(File file, int[] sizeArray) { - cookieFile = file; - pageSizeArray = sizeArray; + private BinaryCookieReader(File cookieFile, int[] pageSizeArray) { + this.cookieFile = cookieFile; + this.pageSizeArray = pageSizeArray; } /** @@ -75,17 +75,17 @@ public final class BinaryCookieReader implements Iterable { * @throws FileNotFoundException * @throws IOException */ - public static BinaryCookieReader initalizeReader(File file) throws FileNotFoundException, IOException { + public static BinaryCookieReader initalizeReader(File cookieFile) throws FileNotFoundException, IOException { BinaryCookieReader reader = null; - try (DataInputStream dataStream = new DataInputStream(new FileInputStream(file))) { + try (DataInputStream dataStream = new DataInputStream(new FileInputStream(cookieFile))) { byte[] magic = new byte[MAGIC_SIZE]; if (dataStream.read(magic) != MAGIC_SIZE) { - throw new IOException("Failed to read header, invalid file size" + file.getName()); //NON-NLS + throw new IOException("Failed to read header, invalid file size (" + cookieFile.getName() + ")"); //NON-NLS } if (!(new String(magic)).equals(COOKIE_MAGIC)) { - throw new IOException(file.getName() + " is not a cookie file"); //NON-NLS + throw new IOException(cookieFile.getName() + " is not a cookie file"); //NON-NLS } int[] sizeArray = null; @@ -98,14 +98,14 @@ public final class BinaryCookieReader implements Iterable { } } - reader = new BinaryCookieReader(file, sizeArray); + reader = new BinaryCookieReader(cookieFile, sizeArray); } return reader; } /** - * Creates and returns a instance of CookiePageIterator + * Creates and returns a instance of CookiePageIterator. * * @return CookiePageIterator */ @@ -133,7 +133,7 @@ public final class BinaryCookieReader implements Iterable { // skip to the first page dataStream.skipBytes((2 * SIZEOF_INT_BYTES) + (pageSizeArray.length * SIZEOF_INT_BYTES)); } catch (IOException ex) { - logger.log(Level.WARNING, "Error occured creating DataInputStream", new Object[]{cookieFile.getName(), ex}); //NON-NLS + LOG.log(Level.WARNING, "Error occured creating DataInputStream", new Object[]{cookieFile.getName(), ex}); //NON-NLS closeStream(); // Just incase the error was from skip } } @@ -142,7 +142,7 @@ public final class BinaryCookieReader implements Iterable { * hasNext manages reading of each cookie page and creating the Cookie * Page objects. If the currentIterator returns false from hadNext, read * the next page and create and new instance of - * CookiePage\CookieIterator + * CookiePage\CookieIterator. * * @return True if there are more cookies */ @@ -170,7 +170,7 @@ public final class BinaryCookieReader implements Iterable { pageIndex++; } catch (IOException ex) { closeStream(); - logger.log(Level.WARNING, "A read error occured for file {0} page {1} {2}", new Object[]{cookieFile.getName(), pageIndex, ex}); //NON-NLS + LOG.log(Level.WARNING, "A read error occured for file {0} page {1} {2}", new Object[]{cookieFile.getName(), pageIndex, ex}); //NON-NLS return false; } } @@ -179,7 +179,7 @@ public final class BinaryCookieReader implements Iterable { } /** - * Get the next cookie from current the current CookieIterator + * Get the next cookie from current the current CookieIterator. * * @return The next cookie */ @@ -203,14 +203,14 @@ public final class BinaryCookieReader implements Iterable { dataStream.close(); dataStream = null; } catch (IOException ex) { - logger.log(Level.WARNING, "SafariCookieReader unable to close DataInputStream for file {0} {1}", new Object[]{cookieFile.getName(), ex}); //NON-NLS + LOG.log(Level.WARNING, "SafariCookieReader unable to close DataInputStream for file {0} {1}", new Object[]{cookieFile.getName(), ex}); //NON-NLS } } } } /** - * Wrapper class for an instance of a CookiePage in the binarycookie file + * Wrapper class for an instance of a CookiePage in the binarycookie file. */ private class CookiePage implements Iterable { @@ -219,7 +219,7 @@ public final class BinaryCookieReader implements Iterable { /** * Setup the CookiePage object. Calidates that the page bytes are in the - * correct format by checking for the header value of 0x0100 + * correct format by checking for the header value of 0x0100. * * @param page * @throws IOException @@ -258,7 +258,7 @@ public final class BinaryCookieReader implements Iterable { } /** - * Implements Iterator to iterate over the cookies in the page + * Implements Iterator to iterate over the cookies in the page. */ private class CookieIterator implements Iterator { @@ -301,7 +301,7 @@ public final class BinaryCookieReader implements Iterable { } /** - * Represents an instance of a cookie from the binarycookie file + * Represents an instance of a cookie from the binarycookie file. */ public class Cookie { @@ -316,7 +316,7 @@ public final class BinaryCookieReader implements Iterable { private final String value; /** - * Creates a cookie object from the given array of bytes + * Creates a cookie object from the given array of bytes. * * @param cookieBytes Byte array for the cookie */ @@ -348,7 +348,7 @@ public final class BinaryCookieReader implements Iterable { /** * Returns the expiration date of the cookie represented by this cookie - * object + * object. * * @return */ @@ -358,7 +358,7 @@ public final class BinaryCookieReader implements Iterable { /** * Returns the creation date of the cookie represented by this cookie - * object + * object. * * @return */ @@ -367,7 +367,7 @@ public final class BinaryCookieReader implements Iterable { } /** - * Returns the url of the cookie represented by this cookie object + * Returns the url of the cookie represented by this cookie object. * * @return the cookie URL */ @@ -376,7 +376,7 @@ public final class BinaryCookieReader implements Iterable { } /** - * Returns the name of the cookie represented by this cookie object + * Returns the name of the cookie represented by this cookie object. * * @return The cookie name */ @@ -385,7 +385,7 @@ public final class BinaryCookieReader implements Iterable { } /** - * Returns the path of the cookie represented by this cookie object + * Returns the path of the cookie represented by this cookie object. * * @return The cookie path */ @@ -394,7 +394,7 @@ public final class BinaryCookieReader implements Iterable { } /** - * Returns the value of the cookie represented by this cookie object + * Returns the value of the cookie represented by this cookie object. * * @return The cookie value */ @@ -406,7 +406,7 @@ public final class BinaryCookieReader implements Iterable { * Give an array a bytes and an offset in the array this function will * copy the bytes from offset to the first null terminator into a new * array. The bytes in the new array will be returned as a string not - * including the null terminator + * including the null terminator. * * @param byteArray * @param offset @@ -416,7 +416,7 @@ public final class BinaryCookieReader implements Iterable { byte[] stringBytes = new byte[byteArray.length - offset]; for (int index = 0; index < stringBytes.length; index++) { byte nibble = byteArray[offset + index]; - if (nibble != '\0') { + if (nibble != '\0') { //NON-NLS stringBytes[index] = nibble; } else { break; diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Extract.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Extract.java index 48a262f89c..0280bc8455 100644 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Extract.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Extract.java @@ -248,7 +248,7 @@ abstract class Extract { Collection bbattributes = new ArrayList<>(); bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_URL, RecentActivityExtracterModuleFactory.getModuleName(), - (url != null) ? url : "")); + (url != null) ? url : "")); //NON-NLS if (accessTime != null) { bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED, @@ -257,23 +257,23 @@ abstract class Extract { bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_REFERRER, RecentActivityExtracterModuleFactory.getModuleName(), - (referrer != null) ? referrer : "")); + (referrer != null) ? referrer : "")); //NON-NLS bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_TITLE, RecentActivityExtracterModuleFactory.getModuleName(), - (title != null) ? title : "")); + (title != null) ? title : "")); //NON-NLS bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PROG_NAME, RecentActivityExtracterModuleFactory.getModuleName(), - (programName != null) ? programName : "")); + (programName != null) ? programName : "")); //NON-NLS bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DOMAIN, RecentActivityExtracterModuleFactory.getModuleName(), - (domain != null) ? domain : "")); + (domain != null) ? domain : "")); //NON-NLS bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_USER_NAME, RecentActivityExtracterModuleFactory.getModuleName(), - (user != null) ? user : "")); + (user != null) ? user : "")); //NON-NLS return bbattributes; } @@ -295,7 +295,7 @@ abstract class Extract { Collection bbattributes = new ArrayList<>(); bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_URL, RecentActivityExtracterModuleFactory.getModuleName(), - (url != null) ? url : "")); + (url != null) ? url : "")); //NON-NLS if (creationTime != null) { bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DATETIME, @@ -304,19 +304,19 @@ abstract class Extract { bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_NAME, RecentActivityExtracterModuleFactory.getModuleName(), - (name != null) ? name : "")); + (name != null) ? name : "")); //NON-NLS bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_VALUE, RecentActivityExtracterModuleFactory.getModuleName(), - (value != null) ? value : "")); + (value != null) ? value : "")); //NON-NLS bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PROG_NAME, RecentActivityExtracterModuleFactory.getModuleName(), - (programName != null) ? programName : "")); + (programName != null) ? programName : "")); //NON-NLS bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DOMAIN, RecentActivityExtracterModuleFactory.getModuleName(), - (domain != null) ? domain : "")); + (domain != null) ? domain : "")); //NON-NLS return bbattributes; } @@ -336,11 +336,11 @@ abstract class Extract { bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_URL, RecentActivityExtracterModuleFactory.getModuleName(), - (url != null) ? url : "")); + (url != null) ? url : "")); //NON-NLS bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_TITLE, RecentActivityExtracterModuleFactory.getModuleName(), - (title != null) ? title : "")); + (title != null) ? title : "")); //NON-NLS if (creationTime != null) { bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DATETIME_CREATED, @@ -349,11 +349,11 @@ abstract class Extract { bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PROG_NAME, RecentActivityExtracterModuleFactory.getModuleName(), - (programName != null) ? programName : "")); + (programName != null) ? programName : "")); //NON-NLS bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DOMAIN, RecentActivityExtracterModuleFactory.getModuleName(), - (domain != null) ? domain : "")); + (domain != null) ? domain : "")); //NON-NLS return bbattributes; } @@ -373,7 +373,7 @@ abstract class Extract { bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PATH, RecentActivityExtracterModuleFactory.getModuleName(), - (path != null) ? path : "")); + (path != null) ? path : "")); //NON-NLS if (pathID != null && pathID != -1) { bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PATH_ID, @@ -383,7 +383,7 @@ abstract class Extract { bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_URL, RecentActivityExtracterModuleFactory.getModuleName(), - (url != null) ? url : "")); + (url != null) ? url : "")); //NON-NLS if (accessTime != null) { bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED, @@ -392,21 +392,22 @@ abstract class Extract { bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DOMAIN, RecentActivityExtracterModuleFactory.getModuleName(), - (domain != null) ? domain : "")); + (domain != null) ? domain : "")); //NON-NLS bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PROG_NAME, RecentActivityExtracterModuleFactory.getModuleName(), - (programName != null) ? programName : "")); + (programName != null) ? programName : "")); //NON-NLS return bbattributes; } /** - * Create temporary file for the given AbstractFile + * Create temporary file for the given AbstractFile. The new file will be + * created in the temp directory for the module with a unique file name. * * @param context * @param file - * @return + * @return Newly created copy of the AbstractFile * @throws IOException */ protected java.io.File createTemporaryFile(IngestJobContext context, AbstractFile file) throws IOException{ diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractSafari.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractSafari.java index 2fcfe827eb..afb52654b2 100755 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractSafari.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractSafari.java @@ -82,17 +82,17 @@ final class ExtractSafari extends Extract { private static final String PLIST_KEY_DOWNLOAD_PATH = "DownloadEntryPath"; //NON-NLS private static final String PLIST_KEY_DOWNLOAD_HISTORY = "DownloadHistory"; //NON-NLS - private final Logger logger = Logger.getLogger(this.getClass().getName()); + private static final Logger LOG = Logger.getLogger(ExtractSafari.class.getName()); @Messages({ "ExtractSafari_Module_Name=Safari", "ExtractSafari_Error_Getting_History=An error occurred while processing Safari history files.", "ExtractSafari_Error_Parsing_Bookmark=An error occured while processing Safari Bookmark files", - "ExtractSafari_Error_Parsing_Cookies=An error occured while processing Safari Bookmark files", + "ExtractSafari_Error_Parsing_Cookies=An error occured while processing Safari Cookies files", }) /** - * Extract the bookmarks, cookies, downloads and history from Safari + * Extract the bookmarks, cookies, downloads and history from Safari. * */ ExtractSafari() { @@ -113,34 +113,34 @@ final class ExtractSafari extends Extract { } catch (IOException | TskCoreException ex) { this.addErrorMessage(Bundle.ExtractSafari_Error_Getting_History()); - logger.log(Level.SEVERE, "Exception thrown while processing history file: {0}", ex); //NON-NLS + LOG.log(Level.SEVERE, "Exception thrown while processing history file: {0}", ex); //NON-NLS } try { processBookmarkPList(dataSource, context); } catch (IOException | TskCoreException | SAXException | PropertyListFormatException | ParseException | ParserConfigurationException ex) { this.addErrorMessage(Bundle.ExtractSafari_Error_Parsing_Bookmark()); - logger.log(Level.SEVERE, "Exception thrown while parsing Safari Bookmarks file: {0}", ex); //NON-NLS + LOG.log(Level.SEVERE, "Exception thrown while parsing Safari Bookmarks file: {0}", ex); //NON-NLS } try { processDownloadsPList(dataSource, context); } catch (IOException | TskCoreException | SAXException | PropertyListFormatException | ParseException | ParserConfigurationException ex) { this.addErrorMessage(Bundle.ExtractSafari_Error_Parsing_Bookmark()); - logger.log(Level.SEVERE, "Exception thrown while parsing Safari Bookmarks file: {0}", ex); //NON-NLS + LOG.log(Level.SEVERE, "Exception thrown while parsing Safari Download.plist file: {0}", ex); //NON-NLS } try { processBinaryCookieFile(dataSource, context); } catch (IOException | TskCoreException ex) { this.addErrorMessage(Bundle.ExtractSafari_Error_Parsing_Cookies()); - logger.log(Level.SEVERE, "Exception thrown while processing Safarri cookies file: {0}", ex); //NON-NLS + LOG.log(Level.SEVERE, "Exception thrown while processing Safari cookies file: {0}", ex); //NON-NLS } } /** * Finds the all of the history.db files in the case looping through them to - * find all of the history artifacts + * find all of the history artifacts. * * @throws TskCoreException * @throws IOException @@ -166,7 +166,7 @@ final class ExtractSafari extends Extract { } /** - * Finds all Bookmark.plist files and looks for bookmark entries + * Finds all Bookmark.plist files and looks for bookmark entries. * @param dataSource * @param context * @throws TskCoreException @@ -348,14 +348,16 @@ final class ExtractSafari extends Extract { BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_DOWNLOAD, bbartifacts)); } } finally { - tempFile.delete(); + if (tempFile != null) { + tempFile.delete(); + } } } /** * Creates a temporary copy of the Cookie file and creates a list of cookie - * BlackboardArtifacts + * BlackboardArtifacts. * * @param context IngetstJobContext * @param file Original Cookie file from the case @@ -551,7 +553,7 @@ final class ExtractSafari extends Extract { /** * Parses the plist object to find the bookmark child objects, then creates - * an artifact with the bookmark information + * an artifact with the bookmark information. * * @param bbartifacts BlackboardArtifact list to add new the artifacts to * @param origFile The origFile Bookmark.plist file from the case From 9575bba204736273e90ce3e8b78cf69f07c377f2 Mon Sep 17 00:00:00 2001 From: Raman Date: Thu, 28 Feb 2019 17:36:09 -0500 Subject: [PATCH 58/80] Merge from develop. --- .../experimental/autoingest/Bundle.properties-MERGED | 2 +- .../autopsy/keywordsearch/Bundle.properties-MERGED | 6 +++--- .../autopsy/recentactivity/Bundle.properties-MERGED | 1 + .../autopsy/thunderbirdparser/Bundle.properties-MERGED | 2 +- 4 files changed, 6 insertions(+), 5 deletions(-) diff --git a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/Bundle.properties-MERGED b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/Bundle.properties-MERGED index 0026d0c670..c10ac5581f 100755 --- a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/Bundle.properties-MERGED +++ b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/Bundle.properties-MERGED @@ -183,7 +183,7 @@ OpenAutoIngestLogAction.menuItemText=Open Auto Ingest Log File # {0} - caseErrorMessage OpenCaseAction.errorMsg=Failed to open case: {0} OpenCaseAction.menuItemText=Open -OpenIDE-Module-Long-Description=This module contains features that are being developed by Basis Technology and are not part of the default Autopsy distribution. You can enable this module to use the new features. The features should be stable, but their exact behavior and API are subject to change. \n\nWe make no guarantee that the API of this module will not change, so developers should be careful when relying on it. +OpenIDE-Module-Long-Description=This module contains features that are being developed by Basis Technology and are not part of the default Autopsy distribution. You can enable this module to use the new features. The features should be stable, but their exact behavior and API are subject to change.\n\nWe make no guarantee that the API of this module will not change, so developers should be careful when relying on it. OpenIDE-Module-Name=Experimental OpenIDE-Module-Short-Description=This module contains features that are being developed by Basis Technology and are not part of the default Autopsy distribution. DisplayLogDialog.cannotOpenLog=Unable to open the selected case log file diff --git a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/Bundle.properties-MERGED b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/Bundle.properties-MERGED index 52513be1e0..f4febc1d7c 100755 --- a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/Bundle.properties-MERGED +++ b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/Bundle.properties-MERGED @@ -34,7 +34,7 @@ KeywordSearchIngestModule.startupMessage.failedToGetIndexSchema=Failed to get sc KeywordSearchResultFactory.createNodeForKey.noResultsFound.text=No results found. KeywordSearchResultFactory.query.exception.msg=Could not perform the query OpenIDE-Module-Display-Category=Ingest Module -OpenIDE-Module-Long-Description=Keyword Search ingest module.\n\nThe module indexes files found in the disk image at ingest time. \nIt then periodically runs the search on the indexed files using one or more keyword lists (containing pure words and/or regular expressions) and posts results.\n\nThe module also contains additional tools integrated in the main GUI, such as keyword list configuration, keyword seach bar in the top-right corner, extracted text viewer and search results viewer showing highlighted keywords found. +OpenIDE-Module-Long-Description=Keyword Search ingest module.\n\n\The module indexes files found in the disk image at ingest time.\n\It then periodically runs the search on the indexed files using one or more keyword lists (containing pure words and/or regular expressions) and posts results.\n\n\The module also contains additional tools integrated in the main GUI, such as keyword list configuration, keyword seach bar in the top-right corner, extracted text viewer and search results viewer showing highlighted keywords found. OpenIDE-Module-Name=KeywordSearch OptionsCategory_Name_KeywordSearchOptions=Keyword Search OptionsCategory_Keywords_KeywordSearchOptions=Keyword Search @@ -128,7 +128,7 @@ KeywordSearchFilterNode.getFileActions.viewInNewWinActionLbl=View in New Window KeywordSearchIngestModule.init.noKwInLstMsg=No keywords in keyword list. KeywordSearchIngestModule.init.onlyIdxKwSkipMsg=Only indexing will be done and keyword search will be skipped (you can still add keyword lists using the Keyword Lists - Add to Ingest). KeywordSearchIngestModule.doInBackGround.displayName=Periodic Keyword Search -KeywordSearchIngestModule.doInBackGround.finalizeMsg=- Finalizing +KeywordSearchIngestModule.doInBackGround.finalizeMsg=Finalizing KeywordSearchIngestModule.doInBackGround.pendingMsg=(Pending) RawText.FileText=File Text RawText.ResultText=Result Text @@ -224,7 +224,7 @@ Server.start.exception.cantStartSolr.msg=Could not start Solr server process Server.start.exception.cantStartSolr.msg2=Could not start Solr server process Server.isRunning.exception.errCheckSolrRunning.msg=Error checking if Solr server is running Server.isRunning.exception.errCheckSolrRunning.msg2=Error checking if Solr server is running -Server.openCore.exception.alreadyOpen.msg=Already an open Core! Explicitely close Core first. +Server.openCore.exception.alreadyOpen.msg=There is an already open Solr core. Explicitly close the core first. Server.queryNumIdxFiles.exception.msg=Error querying number of indexed files, Server.queryNumIdxChunks.exception.msg=Error querying number of indexed chunks, Server.queryNumIdxDocs.exception.msg=Error querying number of indexed documents, diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Bundle.properties-MERGED b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Bundle.properties-MERGED index 62bacbc9fc..b2a5a02487 100755 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Bundle.properties-MERGED +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Bundle.properties-MERGED @@ -1,6 +1,7 @@ cannotBuildXmlParser=Unable to build XML parser: cannotLoadSEUQA=Unable to load Search Engine URL Query Analyzer settings file, SEUQAMappings.xml: cannotParseXml=Unable to parse XML file: +ChromeCacheExtractor.moduleName=ChromeCacheExtractor # {0} - OS name DataSourceUsageAnalyzer.customVolume.label=OS Drive ({0}) DataSourceUsageAnalyzer.parentModuleName=Recent Activity diff --git a/thunderbirdparser/src/org/sleuthkit/autopsy/thunderbirdparser/Bundle.properties-MERGED b/thunderbirdparser/src/org/sleuthkit/autopsy/thunderbirdparser/Bundle.properties-MERGED index 7072b374a7..b39a22c484 100755 --- a/thunderbirdparser/src/org/sleuthkit/autopsy/thunderbirdparser/Bundle.properties-MERGED +++ b/thunderbirdparser/src/org/sleuthkit/autopsy/thunderbirdparser/Bundle.properties-MERGED @@ -1,6 +1,6 @@ MboxParser.handleAttch.noOpenCase.errMsg=Exception while getting open case. OpenIDE-Module-Display-Category=Ingest Module -OpenIDE-Module-Long-Description=Email Parser ingest module.\n\nThe module extracts MBOX and PST e-mail files and posts the results to the blackboard. \nIt knows about the Thunderbird folder structure for MBOX files. +OpenIDE-Module-Long-Description=Email Parser ingest module.\n\nThe module extracts MBOX and PST e-mail files and posts the results to the blackboard.\nIt knows about the Thunderbird folder structure for MBOX files. OpenIDE-Module-Name=Email Parser OpenIDE-Module-Short-Description=Parses MBOX and PST files MboxParser.parse.errMsg.failedToReadFile=Failed to read mbox file from disk. From 9594c031df90536ec7851a89f6122e106b91b5b8 Mon Sep 17 00:00:00 2001 From: William Schaefer Date: Thu, 28 Feb 2019 17:56:23 -0500 Subject: [PATCH 59/80] 4782 add incrementing integer to beggining of extracted picture names for doc --- .../MSOfficeEmbeddedContentExtractor.java | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/Core/src/org/sleuthkit/autopsy/modules/embeddedfileextractor/MSOfficeEmbeddedContentExtractor.java b/Core/src/org/sleuthkit/autopsy/modules/embeddedfileextractor/MSOfficeEmbeddedContentExtractor.java index 16c2b0e3e8..26f4c3d961 100644 --- a/Core/src/org/sleuthkit/autopsy/modules/embeddedfileextractor/MSOfficeEmbeddedContentExtractor.java +++ b/Core/src/org/sleuthkit/autopsy/modules/embeddedfileextractor/MSOfficeEmbeddedContentExtractor.java @@ -302,8 +302,9 @@ class MSOfficeEmbeddedContentExtractor { } List listOfExtractedImages = new ArrayList<>(); byte[] data = null; + int pictureNumber = 0; for (Picture picture : listOfAllPictures) { - String fileName = picture.suggestFullFileName(); + String fileName = String.valueOf(pictureNumber) + picture.suggestFullFileName(); try { data = picture.getContent(); } catch (Exception ex) { @@ -312,6 +313,7 @@ class MSOfficeEmbeddedContentExtractor { writeExtractedImage(Paths.get(outputFolderPath, fileName).toString(), data); // TODO Extract more info from the Picture viz ctime, crtime, atime, mtime listOfExtractedImages.add(new ExtractedFile(fileName, getFileRelativePath(fileName), picture.getSize())); + pictureNumber++; } return listOfExtractedImages; From a22a84566dd033565e74c881f14b73ab3579823e Mon Sep 17 00:00:00 2001 From: William Schaefer Date: Thu, 28 Feb 2019 18:05:09 -0500 Subject: [PATCH 60/80] 4782 add non hex character to name to account for variable lenght names --- .../MSOfficeEmbeddedContentExtractor.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/modules/embeddedfileextractor/MSOfficeEmbeddedContentExtractor.java b/Core/src/org/sleuthkit/autopsy/modules/embeddedfileextractor/MSOfficeEmbeddedContentExtractor.java index 26f4c3d961..02a03fcb81 100644 --- a/Core/src/org/sleuthkit/autopsy/modules/embeddedfileextractor/MSOfficeEmbeddedContentExtractor.java +++ b/Core/src/org/sleuthkit/autopsy/modules/embeddedfileextractor/MSOfficeEmbeddedContentExtractor.java @@ -302,9 +302,9 @@ class MSOfficeEmbeddedContentExtractor { } List listOfExtractedImages = new ArrayList<>(); byte[] data = null; - int pictureNumber = 0; + int pictureNumber = 0; //added to ensure uniqueness in cases where suggestFullFileName returns duplicates for (Picture picture : listOfAllPictures) { - String fileName = String.valueOf(pictureNumber) + picture.suggestFullFileName(); + String fileName = String.valueOf(pictureNumber) +"-"+ picture.suggestFullFileName(); try { data = picture.getContent(); } catch (Exception ex) { From 94225be5a76988437ee29b610359cfbcdd6ee97d Mon Sep 17 00:00:00 2001 From: William Schaefer Date: Thu, 28 Feb 2019 18:10:22 -0500 Subject: [PATCH 61/80] 4782 make file name determination for .doc files simaler to other files --- .../embeddedfileextractor/MSOfficeEmbeddedContentExtractor.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Core/src/org/sleuthkit/autopsy/modules/embeddedfileextractor/MSOfficeEmbeddedContentExtractor.java b/Core/src/org/sleuthkit/autopsy/modules/embeddedfileextractor/MSOfficeEmbeddedContentExtractor.java index 02a03fcb81..fc84ecfa59 100644 --- a/Core/src/org/sleuthkit/autopsy/modules/embeddedfileextractor/MSOfficeEmbeddedContentExtractor.java +++ b/Core/src/org/sleuthkit/autopsy/modules/embeddedfileextractor/MSOfficeEmbeddedContentExtractor.java @@ -304,7 +304,7 @@ class MSOfficeEmbeddedContentExtractor { byte[] data = null; int pictureNumber = 0; //added to ensure uniqueness in cases where suggestFullFileName returns duplicates for (Picture picture : listOfAllPictures) { - String fileName = String.valueOf(pictureNumber) +"-"+ picture.suggestFullFileName(); + String fileName = UNKNOWN_IMAGE_NAME_PREFIX +pictureNumber + picture.suggestFileExtension(); try { data = picture.getContent(); } catch (Exception ex) { From 2a19dc71aa82c0fc74d47521a2dbd3c012e09069 Mon Sep 17 00:00:00 2001 From: William Schaefer Date: Thu, 28 Feb 2019 18:16:04 -0500 Subject: [PATCH 62/80] 4782 add missing dot before extension to new file names for embedded images --- .../embeddedfileextractor/MSOfficeEmbeddedContentExtractor.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Core/src/org/sleuthkit/autopsy/modules/embeddedfileextractor/MSOfficeEmbeddedContentExtractor.java b/Core/src/org/sleuthkit/autopsy/modules/embeddedfileextractor/MSOfficeEmbeddedContentExtractor.java index fc84ecfa59..d9c142563b 100644 --- a/Core/src/org/sleuthkit/autopsy/modules/embeddedfileextractor/MSOfficeEmbeddedContentExtractor.java +++ b/Core/src/org/sleuthkit/autopsy/modules/embeddedfileextractor/MSOfficeEmbeddedContentExtractor.java @@ -304,7 +304,7 @@ class MSOfficeEmbeddedContentExtractor { byte[] data = null; int pictureNumber = 0; //added to ensure uniqueness in cases where suggestFullFileName returns duplicates for (Picture picture : listOfAllPictures) { - String fileName = UNKNOWN_IMAGE_NAME_PREFIX +pictureNumber + picture.suggestFileExtension(); + String fileName = UNKNOWN_IMAGE_NAME_PREFIX +pictureNumber +"."+ picture.suggestFileExtension(); try { data = picture.getContent(); } catch (Exception ex) { From 9f31d3d4e02021ea8937941267a9d36a44071982 Mon Sep 17 00:00:00 2001 From: "U-BASIS\\dgrove" Date: Thu, 28 Feb 2019 23:56:58 -0500 Subject: [PATCH 63/80] Reset viewport. --- .../autopsy/contentviewers/MediaViewImagePanel.java | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/contentviewers/MediaViewImagePanel.java b/Core/src/org/sleuthkit/autopsy/contentviewers/MediaViewImagePanel.java index e501b136d5..b98c350d68 100644 --- a/Core/src/org/sleuthkit/autopsy/contentviewers/MediaViewImagePanel.java +++ b/Core/src/org/sleuthkit/autopsy/contentviewers/MediaViewImagePanel.java @@ -126,7 +126,6 @@ class MediaViewImagePanel extends JPanel implements MediaFileViewer.MediaViewPan scene.getStylesheets().add(MediaViewImagePanel.class.getResource("MediaViewImagePanel.css").toExternalForm()); //NOI18N fxPanel.setScene(scene); - //bind size of image to that of scene, while keeping proportions fxImageView.setSmooth(true); fxImageView.setCache(true); @@ -142,11 +141,13 @@ class MediaViewImagePanel extends JPanel implements MediaFileViewer.MediaViewPan } /** - * clear the displayed image + * Clear the displayed image */ public void reset() { Platform.runLater(() -> { + fxImageView.setViewport(new Rectangle2D(0, 0, 0, 0)); fxImageView.setImage(null); + scrollPane.setContent(null); scrollPane.setContent(fxImageView); }); From 3e153f0c33a78d978fc8b5a6b5aea3a22670055f Mon Sep 17 00:00:00 2001 From: "U-BASIS\\dgrove" Date: Fri, 1 Mar 2019 01:13:53 -0500 Subject: [PATCH 64/80] Additional mime-type support; reposition button. --- .../sleuthkit/autopsy/contentviewers/HtmlPanel.form | 6 +++--- .../sleuthkit/autopsy/contentviewers/HtmlPanel.java | 11 +++++++---- .../sleuthkit/autopsy/contentviewers/HtmlViewer.java | 5 ++++- 3 files changed, 14 insertions(+), 8 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/contentviewers/HtmlPanel.form b/Core/src/org/sleuthkit/autopsy/contentviewers/HtmlPanel.form index ec9f0fc569..54e6d45006 100755 --- a/Core/src/org/sleuthkit/autopsy/contentviewers/HtmlPanel.form +++ b/Core/src/org/sleuthkit/autopsy/contentviewers/HtmlPanel.form @@ -16,11 +16,11 @@ - - + + + - diff --git a/Core/src/org/sleuthkit/autopsy/contentviewers/HtmlPanel.java b/Core/src/org/sleuthkit/autopsy/contentviewers/HtmlPanel.java index 96badc44c3..76dd7b1f3a 100755 --- a/Core/src/org/sleuthkit/autopsy/contentviewers/HtmlPanel.java +++ b/Core/src/org/sleuthkit/autopsy/contentviewers/HtmlPanel.java @@ -21,6 +21,7 @@ package org.sleuthkit.autopsy.contentviewers; import org.jsoup.Jsoup; import org.jsoup.nodes.Document; import org.openide.util.NbBundle.Messages; +import org.sleuthkit.autopsy.coreutils.Logger; /** * A file content viewer for HTML files. @@ -29,6 +30,7 @@ import org.openide.util.NbBundle.Messages; final class HtmlPanel extends javax.swing.JPanel { private static final long serialVersionUID = 1L; + private static final Logger logger = Logger.getLogger(HtmlPanel.class.getName()); private String htmlText; @@ -103,6 +105,7 @@ final class HtmlPanel extends javax.swing.JPanel { showImagesToggleButton.setText(Bundle.HtmlPanel_showImagesToggleButton_show()); this.htmlbodyTextPane.setText(wrapInHtmlBody(cleanseHTML(htmlText))); } + htmlbodyTextPane.setCaretPosition(0); showImagesToggleButton.setEnabled(true); } @@ -137,10 +140,10 @@ final class HtmlPanel extends javax.swing.JPanel { this.setLayout(layout); layout.setHorizontalGroup( layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) - .addGroup(javax.swing.GroupLayout.Alignment.TRAILING, layout.createSequentialGroup() - .addGap(0, 203, Short.MAX_VALUE) - .addComponent(showImagesToggleButton)) - .addComponent(htmlScrollPane) + .addComponent(htmlScrollPane, javax.swing.GroupLayout.DEFAULT_SIZE, 300, Short.MAX_VALUE) + .addGroup(layout.createSequentialGroup() + .addComponent(showImagesToggleButton) + .addGap(0, 0, Short.MAX_VALUE)) ); layout.setVerticalGroup( layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) diff --git a/Core/src/org/sleuthkit/autopsy/contentviewers/HtmlViewer.java b/Core/src/org/sleuthkit/autopsy/contentviewers/HtmlViewer.java index 657c2aad38..ff22d952e7 100755 --- a/Core/src/org/sleuthkit/autopsy/contentviewers/HtmlViewer.java +++ b/Core/src/org/sleuthkit/autopsy/contentviewers/HtmlViewer.java @@ -37,7 +37,10 @@ final class HtmlViewer extends javax.swing.JPanel implements FileTypeViewer { private static final long serialVersionUID = 1L; private static final Logger logger = Logger.getLogger(HtmlViewer.class.getName()); - private static final String[] SUPPORTED_MIMETYPES = new String[]{"text/html"}; + private static final String[] SUPPORTED_MIMETYPES = new String[]{ + "text/html", + "application/xhtml+xml" + }; /** * Creates new form HtmlViewerPanel From 7262cbbfe3eb56bb860f793b8be1ffc82d86fff2 Mon Sep 17 00:00:00 2001 From: "U-BASIS\\dgrove" Date: Fri, 1 Mar 2019 01:17:37 -0500 Subject: [PATCH 65/80] Removed unused logger. --- Core/src/org/sleuthkit/autopsy/contentviewers/HtmlPanel.java | 2 -- 1 file changed, 2 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/contentviewers/HtmlPanel.java b/Core/src/org/sleuthkit/autopsy/contentviewers/HtmlPanel.java index 76dd7b1f3a..1699536c94 100755 --- a/Core/src/org/sleuthkit/autopsy/contentviewers/HtmlPanel.java +++ b/Core/src/org/sleuthkit/autopsy/contentviewers/HtmlPanel.java @@ -21,7 +21,6 @@ package org.sleuthkit.autopsy.contentviewers; import org.jsoup.Jsoup; import org.jsoup.nodes.Document; import org.openide.util.NbBundle.Messages; -import org.sleuthkit.autopsy.coreutils.Logger; /** * A file content viewer for HTML files. @@ -30,7 +29,6 @@ import org.sleuthkit.autopsy.coreutils.Logger; final class HtmlPanel extends javax.swing.JPanel { private static final long serialVersionUID = 1L; - private static final Logger logger = Logger.getLogger(HtmlPanel.class.getName()); private String htmlText; From ab782a4df7ac5e1af37fee08a7d01d04f908c216 Mon Sep 17 00:00:00 2001 From: Kelly Kelly Date: Fri, 1 Mar 2019 11:35:43 -0500 Subject: [PATCH 66/80] Completed the rest of the review comments that where hidden --- .../recentactivity/BinaryCookieReader.java | 65 ++++++++++--------- .../autopsy/recentactivity/Extract.java | 18 ++++- 2 files changed, 50 insertions(+), 33 deletions(-) diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/BinaryCookieReader.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/BinaryCookieReader.java index f08e4e4360..3e24a1fe5d 100755 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/BinaryCookieReader.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/BinaryCookieReader.java @@ -53,7 +53,7 @@ public final class BinaryCookieReader implements Iterable { private final int[] pageSizeArray; private final File cookieFile; - private final Logger LOG = Logger.getLogger(BinaryCookieReader.class.getName()); + private static final Logger LOG = Logger.getLogger(BinaryCookieReader.class.getName()); /** * The binary cookie reader encapsulates all the knowledge of how to read the mac @@ -96,6 +96,8 @@ public final class BinaryCookieReader implements Iterable { for (int cnt = 0; cnt < pageCount; cnt++) { sizeArray[cnt] = dataStream.readInt(); } + + LOG.log(Level.INFO, "No cookies found in {0}", cookieFile.getName()); //NON-NLS } reader = new BinaryCookieReader(cookieFile, sizeArray); @@ -128,21 +130,24 @@ public final class BinaryCookieReader implements Iterable { * The cookiePageIterator iterates the binarycookie file by page. */ CookiePageIterator() { + if(pageSizeArray == null || pageSizeArray.length == 0) { + return; + } + try { dataStream = new DataInputStream(new FileInputStream(cookieFile)); // skip to the first page dataStream.skipBytes((2 * SIZEOF_INT_BYTES) + (pageSizeArray.length * SIZEOF_INT_BYTES)); } catch (IOException ex) { - LOG.log(Level.WARNING, "Error occured creating DataInputStream", new Object[]{cookieFile.getName(), ex}); //NON-NLS + + String errorMessage = String.format("An error occurred creating an input stream for %s", cookieFile.getName()); + LOG.log(Level.WARNING, errorMessage, ex); //NON-NLS closeStream(); // Just incase the error was from skip } } /** - * hasNext manages reading of each cookie page and creating the Cookie - * Page objects. If the currentIterator returns false from hadNext, read - * the next page and create and new instance of - * CookiePage\CookieIterator. + * Returns true if there are more cookies in the binarycookie file. * * @return True if there are more cookies */ @@ -152,7 +157,7 @@ public final class BinaryCookieReader implements Iterable { if (dataStream == null) { return false; } - + if (currentIterator == null || !currentIterator.hasNext()) { try { @@ -170,7 +175,8 @@ public final class BinaryCookieReader implements Iterable { pageIndex++; } catch (IOException ex) { closeStream(); - LOG.log(Level.WARNING, "A read error occured for file {0} page {1} {2}", new Object[]{cookieFile.getName(), pageIndex, ex}); //NON-NLS + String errorMessage = String.format("A read error occured for file %s (pageIndex = %d)", cookieFile.getName(), pageIndex); + LOG.log(Level.WARNING, errorMessage, ex); //NON-NLS return false; } } @@ -179,14 +185,14 @@ public final class BinaryCookieReader implements Iterable { } /** - * Get the next cookie from current the current CookieIterator. + * Get the next cookie from the current CookieIterator. * * @return The next cookie */ @Override public Cookie next() { // Just in case someone uses next without hasNext, this check will - // make sure there is more elements and that we iterate properly + // make sure there are more elements and that we iterate properly // through the pages. if (!hasNext()) { throw new NoSuchElementException(); @@ -203,7 +209,8 @@ public final class BinaryCookieReader implements Iterable { dataStream.close(); dataStream = null; } catch (IOException ex) { - LOG.log(Level.WARNING, "SafariCookieReader unable to close DataInputStream for file {0} {1}", new Object[]{cookieFile.getName(), ex}); //NON-NLS + String errorMessage = String.format("An error occurred trying to close stream for file %s", cookieFile.getName()); + LOG.log(Level.WARNING, errorMessage, ex); //NON-NLS } } } @@ -221,12 +228,12 @@ public final class BinaryCookieReader implements Iterable { * Setup the CookiePage object. Calidates that the page bytes are in the * correct format by checking for the header value of 0x0100. * - * @param page + * @param page byte array representing a cookie page * @throws IOException */ CookiePage(byte[] page) throws IOException { if (page == null || page.length == 0) { - throw new IllegalArgumentException("Invalid value for page passes to CookiePage constructor"); //NON-NLS + throw new IllegalArgumentException("Invalid value for page passed to CookiePage constructor"); //NON-NLS } pageBuffer = ByteBuffer.wrap(page); @@ -248,7 +255,7 @@ public final class BinaryCookieReader implements Iterable { } /** - * Returns in instance of a CookieIterator + * Returns an instance of a CookieIterator. * * @return CookieIterator */ @@ -265,7 +272,7 @@ public final class BinaryCookieReader implements Iterable { int index = 0; /** - * Checks to see if there are more cookies + * Checks to see if there are more cookies. * * @return True if there are more cookies, false if there are not */ @@ -279,7 +286,7 @@ public final class BinaryCookieReader implements Iterable { } /** - * Gets the next cookie from the page + * Gets the next cookie from the page. * * @return Next cookie */ @@ -307,8 +314,8 @@ public final class BinaryCookieReader implements Iterable { private final static int COOKIE_HEAD_SKIP = 16; - private final Double expirationDate; - private final Double creationDate; + private final double expirationDate; + private final double creationDate; private final String name; private final String url; @@ -322,7 +329,7 @@ public final class BinaryCookieReader implements Iterable { */ protected Cookie(byte[] cookieBytes) { if (cookieBytes == null || cookieBytes.length == 0) { - throw new IllegalArgumentException("Invalid value for cookieBytes passes to Cookie constructor"); //NON-NLS + throw new IllegalArgumentException("Invalid value for cookieBytes passed to Cookie constructor"); //NON-NLS } ByteBuffer byteBuffer = ByteBuffer.wrap(cookieBytes); @@ -350,20 +357,20 @@ public final class BinaryCookieReader implements Iterable { * Returns the expiration date of the cookie represented by this cookie * object. * - * @return + * @return Cookie expiration date in milliseconds with java epoch */ public final Long getExpirationDate() { - return expirationDate.longValue() + MAC_EPOC_FIX; + return ((long)expirationDate) + MAC_EPOC_FIX; } /** * Returns the creation date of the cookie represented by this cookie * object. * - * @return + * @return Cookie creation date in milliseconds with java epoch */ public final Long getCreationDate() { - return creationDate.longValue() + MAC_EPOC_FIX; + return ((long)creationDate) + MAC_EPOC_FIX; } /** @@ -403,14 +410,12 @@ public final class BinaryCookieReader implements Iterable { } /** - * Give an array a bytes and an offset in the array this function will - * copy the bytes from offset to the first null terminator into a new - * array. The bytes in the new array will be returned as a string not - * including the null terminator. + * Creates an ascii string from the bytes in byteArray starting at + * offset ending at the first null terminator found. * - * @param byteArray - * @param offset - * @return + * @param byteArray Array of bytes + * @param offset starting offset in the array + * @return String with bytes converted to ascii */ private String decodeString(byte[] byteArray, int offset) { byte[] stringBytes = new byte[byteArray.length - offset]; diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Extract.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Extract.java index 0280bc8455..b4f04c79cd 100644 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Extract.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Extract.java @@ -22,13 +22,18 @@ */ package org.sleuthkit.autopsy.recentactivity; +import java.io.File; import java.io.IOException; import java.nio.file.Path; import java.nio.file.Paths; import java.sql.ResultSet; import java.sql.ResultSetMetaData; import java.sql.SQLException; -import java.util.*; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; import java.util.logging.Level; import org.openide.util.NbBundle; import org.openide.util.NbBundle.Messages; @@ -41,7 +46,14 @@ import org.sleuthkit.autopsy.coreutils.SQLiteDBConnect; import org.sleuthkit.autopsy.datamodel.ContentUtils; import org.sleuthkit.autopsy.ingest.IngestJobContext; import org.sleuthkit.autopsy.ingest.IngestModule.IngestModuleException; -import org.sleuthkit.datamodel.*; +import org.sleuthkit.datamodel.AbstractFile; +import org.sleuthkit.datamodel.BlackboardArtifact; +import org.sleuthkit.datamodel.BlackboardAttribute; +import org.sleuthkit.datamodel.Content; +import org.sleuthkit.datamodel.SleuthkitCase; +import org.sleuthkit.datamodel.TskCoreException; +import org.sleuthkit.datamodel.TskException; + abstract class Extract { @@ -410,7 +422,7 @@ abstract class Extract { * @return Newly created copy of the AbstractFile * @throws IOException */ - protected java.io.File createTemporaryFile(IngestJobContext context, AbstractFile file) throws IOException{ + protected File createTemporaryFile(IngestJobContext context, AbstractFile file) throws IOException{ Path tempFilePath = Paths.get(RAImageIngestModule.getRATempPath( getCurrentCase(), getName()), file.getName() + file.getId() + file.getNameExtension()); java.io.File tempFile = tempFilePath.toFile(); From fa60adcb261375a40ad49ab44f84af62906ed238 Mon Sep 17 00:00:00 2001 From: "U-BASIS\\dgrove" Date: Fri, 1 Mar 2019 11:40:38 -0500 Subject: [PATCH 67/80] Additional count parameter; handling edge case. --- .../contentviewer/Bundle.properties | 1 + .../DataContentViewerOtherCases.form | 4 +-- .../DataContentViewerOtherCases.java | 32 +++++++++++-------- 3 files changed, 22 insertions(+), 15 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/contentviewer/Bundle.properties b/Core/src/org/sleuthkit/autopsy/centralrepository/contentviewer/Bundle.properties index 2a9cd7b456..aa2b4b9297 100644 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/contentviewer/Bundle.properties +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/contentviewer/Bundle.properties @@ -6,3 +6,4 @@ DataContentViewerOtherCases.showCommonalityMenuItem.text=Show Frequency DataContentViewerOtherCases.earliestCaseDate.text=Earliest Case Date DataContentViewerOtherCases.earliestCaseLabel.toolTipText= DataContentViewerOtherCases.earliestCaseLabel.text=Central Repository Starting Date: +DataContentViewerOtherCases.foundInLabel.text= diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/contentviewer/DataContentViewerOtherCases.form b/Core/src/org/sleuthkit/autopsy/centralrepository/contentviewer/DataContentViewerOtherCases.form index 9fdbfcfb67..53f6f6fc4b 100644 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/contentviewer/DataContentViewerOtherCases.form +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/contentviewer/DataContentViewerOtherCases.form @@ -73,7 +73,7 @@ - + @@ -99,7 +99,7 @@ - + diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/contentviewer/DataContentViewerOtherCases.java b/Core/src/org/sleuthkit/autopsy/centralrepository/contentviewer/DataContentViewerOtherCases.java index fe5df99a0f..823c1a3a05 100644 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/contentviewer/DataContentViewerOtherCases.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/contentviewer/DataContentViewerOtherCases.java @@ -345,28 +345,34 @@ public class DataContentViewerOtherCases extends JPanel implements DataContentVi * Set the number of unique cases and data sources. */ @Messages({ - "DataContentViewerOtherCases.foundIn.text=Found in %d cases and %d data sources." + "DataContentViewerOtherCases.foundIn.text=Found %d instances in %d cases and %d data sources." }) private void setOccurrenceCounts() { DataContentViewerOtherCasesTableModel model = (DataContentViewerOtherCasesTableModel) otherCasesTable.getModel(); - // Note: Relying on the case name isn't a fool-proof way of determining - // a case to be unique. We should improve this in the future. int caseColumnIndex = DataContentViewerOtherCasesTableModel.TableColumns.CASE_NAME.ordinal(); + int deviceColumnIndex = DataContentViewerOtherCasesTableModel.TableColumns.DEVICE.ordinal(); + + /* + * We also need a unique set of data sources. We rely on device ID for + * this. To mitigate edge cases where a device ID could be duplicated + * in the same case (e.g. "report.xml"), we put the device ID and case + * name in a key-value pair. + * + * Note: Relying on the case name isn't a fool-proof way of determining + * a case to be unique. We should improve this in the future. + */ Set cases = new HashSet<>(); + Map devices = new HashMap(); + for (int i=0; i < model.getRowCount(); i++) { String caseName = (String) model.getValueAt(i, caseColumnIndex); - cases.add(caseName); - } - - int deviceColumnIndex = DataContentViewerOtherCasesTableModel.TableColumns.DEVICE.ordinal(); - Set devices = new HashSet<>(); - for (int i=0; i < model.getRowCount(); i++) { String deviceId = (String) model.getValueAt(i, deviceColumnIndex); - devices.add(deviceId); + cases.add(caseName); + devices.put(deviceId, caseName); } - foundInLabel.setText(String.format(Bundle.DataContentViewerOtherCases_foundIn_text(), cases.size(), devices.size())); + foundInLabel.setText(String.format(Bundle.DataContentViewerOtherCases_foundIn_text(), model.getRowCount(), cases.size(), devices.size())); } /** @@ -891,7 +897,7 @@ public class DataContentViewerOtherCases extends JPanel implements DataContentVi .addGap(0, 61, Short.MAX_VALUE) .addGroup(otherCasesPanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(otherCasesPanelLayout.createSequentialGroup() - .addComponent(tableContainerPanel, javax.swing.GroupLayout.DEFAULT_SIZE, 61, Short.MAX_VALUE) + .addComponent(tableContainerPanel, javax.swing.GroupLayout.DEFAULT_SIZE, 53, Short.MAX_VALUE) .addGap(0, 0, 0))) ); @@ -903,7 +909,7 @@ public class DataContentViewerOtherCases extends JPanel implements DataContentVi ); layout.setVerticalGroup( layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) - .addComponent(otherCasesPanel, javax.swing.GroupLayout.DEFAULT_SIZE, 61, Short.MAX_VALUE) + .addComponent(otherCasesPanel, javax.swing.GroupLayout.DEFAULT_SIZE, 53, Short.MAX_VALUE) ); }// //GEN-END:initComponents From 672e20bd6bbf20704a053c7fb4cf481e1bf8e6ca Mon Sep 17 00:00:00 2001 From: "U-BASIS\\dgrove" Date: Fri, 1 Mar 2019 11:42:15 -0500 Subject: [PATCH 68/80] Typo fix. --- .../contentviewer/DataContentViewerOtherCases.java | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/contentviewer/DataContentViewerOtherCases.java b/Core/src/org/sleuthkit/autopsy/centralrepository/contentviewer/DataContentViewerOtherCases.java index 823c1a3a05..db5f99dd96 100644 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/contentviewer/DataContentViewerOtherCases.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/contentviewer/DataContentViewerOtherCases.java @@ -354,10 +354,10 @@ public class DataContentViewerOtherCases extends JPanel implements DataContentVi int deviceColumnIndex = DataContentViewerOtherCasesTableModel.TableColumns.DEVICE.ordinal(); /* - * We also need a unique set of data sources. We rely on device ID for - * this. To mitigate edge cases where a device ID could be duplicated - * in the same case (e.g. "report.xml"), we put the device ID and case - * name in a key-value pair. + * We need a unique set of data sources. We rely on device ID for this. + * To mitigate edge cases where a device ID could be duplicated in the + * same case (e.g. "report.xml"), we put the device ID and case name in + * a key-value pair. * * Note: Relying on the case name isn't a fool-proof way of determining * a case to be unique. We should improve this in the future. From ec3903cdb6a18c812e323cc8e885a43b9e6ee05b Mon Sep 17 00:00:00 2001 From: William Schaefer Date: Fri, 1 Mar 2019 11:51:52 -0500 Subject: [PATCH 69/80] 4626 make connect(bool foreignKeys) an additional method instead of a replacement method --- .../datamodel/AbstractSqlEamDb.java | 119 +++++++++--------- .../datamodel/PostgresEamDb.java | 15 ++- .../datamodel/SqliteEamDb.java | 16 ++- 3 files changed, 88 insertions(+), 62 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/AbstractSqlEamDb.java b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/AbstractSqlEamDb.java index fe09da90ac..504ae8040d 100644 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/AbstractSqlEamDb.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/AbstractSqlEamDb.java @@ -110,6 +110,11 @@ abstract class AbstractSqlEamDb implements EamDb { */ protected abstract Connection connect(boolean foreignKeys) throws EamDbException; + /** + * Setup and create a connection to the selected database implementation + */ + protected abstract Connection connect() throws EamDbException; + /** * Add a new name/value pair in the db_info table. * @@ -120,7 +125,7 @@ abstract class AbstractSqlEamDb implements EamDb { */ @Override public void newDbInfo(String name, String value) throws EamDbException { - Connection conn = connect(true); + Connection conn = connect(); PreparedStatement preparedStatement = null; String sql = "INSERT INTO db_info (name, value) VALUES (?, ?) " @@ -141,7 +146,7 @@ abstract class AbstractSqlEamDb implements EamDb { @Override public void addDataSourceObjectId(int rowId, long dataSourceObjectId) throws EamDbException { - Connection conn = connect(true); + Connection conn = connect(); PreparedStatement preparedStatement = null; String sql = "UPDATE data_sources SET datasource_obj_id=? WHERE id=?"; try { @@ -168,7 +173,7 @@ abstract class AbstractSqlEamDb implements EamDb { */ @Override public String getDbInfo(String name) throws EamDbException { - Connection conn = connect(true); + Connection conn = connect(); PreparedStatement preparedStatement = null; ResultSet resultSet = null; @@ -213,7 +218,7 @@ abstract class AbstractSqlEamDb implements EamDb { */ @Override public void updateDbInfo(String name, String value) throws EamDbException { - Connection conn = connect(true); + Connection conn = connect(); PreparedStatement preparedStatement = null; String sql = "UPDATE db_info SET value=? WHERE name=?"; @@ -252,7 +257,7 @@ abstract class AbstractSqlEamDb implements EamDb { return cRCase; } - Connection conn = connect(true); + Connection conn = connect(); PreparedStatement preparedStatement = null; String sql = "INSERT INTO cases(case_uid, org_id, case_name, creation_date, case_number, " @@ -362,7 +367,7 @@ abstract class AbstractSqlEamDb implements EamDb { throw new EamDbException("Correlation case is null"); } - Connection conn = connect(true); + Connection conn = connect(); PreparedStatement preparedStatement = null; String sql = "UPDATE cases " @@ -447,7 +452,7 @@ abstract class AbstractSqlEamDb implements EamDb { * @return The retrieved case */ private CorrelationCase getCaseByUUIDFromCr(String caseUUID) throws EamDbException { - Connection conn = connect(true); + Connection conn = connect(); CorrelationCase eamCaseResult = null; PreparedStatement preparedStatement = null; @@ -508,7 +513,7 @@ abstract class AbstractSqlEamDb implements EamDb { * @return The retrieved case */ private CorrelationCase getCaseByIdFromCr(int caseId) throws EamDbException { - Connection conn = connect(true); + Connection conn = connect(); CorrelationCase eamCaseResult = null; PreparedStatement preparedStatement = null; @@ -548,7 +553,7 @@ abstract class AbstractSqlEamDb implements EamDb { */ @Override public List getCases() throws EamDbException { - Connection conn = connect(true); + Connection conn = connect(); List cases = new ArrayList<>(); CorrelationCase eamCaseResult; @@ -626,7 +631,7 @@ abstract class AbstractSqlEamDb implements EamDb { return eamDataSource; } - Connection conn = connect(true); + Connection conn = connect(); PreparedStatement preparedStatement = null; //The conflict clause exists in case multiple nodes are trying to add the data source because it did not exist at the same time @@ -733,7 +738,7 @@ abstract class AbstractSqlEamDb implements EamDb { * @throws EamDbException */ private CorrelationDataSource getDataSourceFromCr(int correlationCaseId, Long dataSourceObjectId) throws EamDbException { - Connection conn = connect(true); + Connection conn = connect(); CorrelationDataSource eamDataSourceResult = null; PreparedStatement preparedStatement = null; @@ -797,7 +802,7 @@ abstract class AbstractSqlEamDb implements EamDb { * @return The data source */ private CorrelationDataSource getDataSourceByIdFromCr(CorrelationCase correlationCase, int dataSourceId) throws EamDbException { - Connection conn = connect(true); + Connection conn = connect(); CorrelationDataSource eamDataSourceResult = null; PreparedStatement preparedStatement = null; @@ -834,7 +839,7 @@ abstract class AbstractSqlEamDb implements EamDb { */ @Override public List getDataSources() throws EamDbException { - Connection conn = connect(true); + Connection conn = connect(); List dataSources = new ArrayList<>(); CorrelationDataSource eamDataSourceResult; @@ -904,7 +909,7 @@ abstract class AbstractSqlEamDb implements EamDb { throw new EamDbException("Correlation data source is null"); } - Connection conn = connect(true); + Connection conn = connect(); PreparedStatement preparedStatement = null; String sql = "UPDATE data_sources " @@ -940,7 +945,7 @@ abstract class AbstractSqlEamDb implements EamDb { @Override public void updateDataSourceName(CorrelationDataSource eamDataSource, String newName) throws EamDbException { - Connection conn = connect(true); + Connection conn = connect(); PreparedStatement preparedStatement = null; @@ -983,7 +988,7 @@ abstract class AbstractSqlEamDb implements EamDb { public void addArtifactInstance(CorrelationAttributeInstance eamArtifact) throws EamDbException { checkAddArtifactInstanceNulls(eamArtifact); - Connection conn = connect(true); + Connection conn = connect(); PreparedStatement preparedStatement = null; @@ -1068,7 +1073,7 @@ abstract class AbstractSqlEamDb implements EamDb { String normalizedValue = CorrelationAttributeNormalizer.normalize(aType, value); - Connection conn = connect(true); + Connection conn = connect(); List artifactInstances = new ArrayList<>(); @@ -1133,7 +1138,7 @@ abstract class AbstractSqlEamDb implements EamDb { if (filePath == null) { throw new EamDbException("Correlation value is null"); } - Connection conn = connect(true); + Connection conn = connect(); List artifactInstances = new ArrayList<>(); @@ -1197,7 +1202,7 @@ abstract class AbstractSqlEamDb implements EamDb { public Long getCountArtifactInstancesByTypeValue(CorrelationAttributeInstance.Type aType, String value) throws EamDbException, CorrelationAttributeNormalizationException { String normalizedValue = CorrelationAttributeNormalizer.normalize(aType, value); - Connection conn = connect(true); + Connection conn = connect(); Long instanceCount = 0L; PreparedStatement preparedStatement = null; @@ -1251,7 +1256,7 @@ abstract class AbstractSqlEamDb implements EamDb { public Long getCountUniqueCaseDataSourceTuplesHavingTypeValue(CorrelationAttributeInstance.Type aType, String value) throws EamDbException, CorrelationAttributeNormalizationException { String normalizedValue = CorrelationAttributeNormalizer.normalize(aType, value); - Connection conn = connect(true); + Connection conn = connect(); Long instanceCount = 0L; PreparedStatement preparedStatement = null; @@ -1284,7 +1289,7 @@ abstract class AbstractSqlEamDb implements EamDb { @Override public Long getCountUniqueDataSources() throws EamDbException { - Connection conn = connect(true); + Connection conn = connect(); Long instanceCount = 0L; PreparedStatement preparedStatement = null; @@ -1321,7 +1326,7 @@ abstract class AbstractSqlEamDb implements EamDb { */ @Override public Long getCountArtifactInstancesByCaseDataSource(CorrelationDataSource correlationDataSource) throws EamDbException { - Connection conn = connect(true); + Connection conn = connect(); Long instanceCount = 0L; List artifactTypes = getDefinedCorrelationTypes(); @@ -1394,7 +1399,7 @@ abstract class AbstractSqlEamDb implements EamDb { public void commitAttributeInstancesBulk() throws EamDbException { List artifactTypes = getDefinedCorrelationTypes(); - Connection conn = connect(true); + Connection conn = connect(); PreparedStatement bulkPs = null; try { @@ -1501,7 +1506,7 @@ abstract class AbstractSqlEamDb implements EamDb { return; } - Connection conn = connect(true); + Connection conn = connect(); int counter = 0; PreparedStatement bulkPs = null; @@ -1589,7 +1594,7 @@ abstract class AbstractSqlEamDb implements EamDb { if (eamArtifact.getCorrelationDataSource() == null) { throw new EamDbException("Correlation data source is null"); } - Connection conn = connect(true); + Connection conn = connect(); PreparedStatement preparedQuery = null; String tableName = EamDbUtil.correlationTypeToInstanceTableName(eamArtifact.getCorrelationType()); String sqlUpdate @@ -1639,7 +1644,7 @@ abstract class AbstractSqlEamDb implements EamDb { throw new EamDbException("Correlation case is null"); } - Connection conn = connect(true); + Connection conn = connect(); PreparedStatement preparedStatement = null; ResultSet resultSet = null; @@ -1707,7 +1712,7 @@ abstract class AbstractSqlEamDb implements EamDb { throw new EamDbException("Correlation file path is null"); } - Connection conn = connect(true); + Connection conn = connect(); PreparedStatement preparedStatement = null; ResultSet resultSet = null; @@ -1776,7 +1781,7 @@ abstract class AbstractSqlEamDb implements EamDb { throw new EamDbException("Correlation data source is null"); } - Connection conn = connect(true); + Connection conn = connect(); PreparedStatement preparedUpdate = null; PreparedStatement preparedQuery = null; @@ -1858,7 +1863,7 @@ abstract class AbstractSqlEamDb implements EamDb { public List getArtifactInstancesKnownBad(CorrelationAttributeInstance.Type aType, String value) throws EamDbException, CorrelationAttributeNormalizationException { String normalizedValue = CorrelationAttributeNormalizer.normalize(aType, value); - Connection conn = connect(true); + Connection conn = connect(); List artifactInstances = new ArrayList<>(); @@ -1922,7 +1927,7 @@ abstract class AbstractSqlEamDb implements EamDb { throw new EamDbException("Correlation type is null"); } - Connection conn = connect(true); + Connection conn = connect(); List artifactInstances = new ArrayList<>(); @@ -1981,7 +1986,7 @@ abstract class AbstractSqlEamDb implements EamDb { String normalizedValue = CorrelationAttributeNormalizer.normalize(aType, value); - Connection conn = connect(true); + Connection conn = connect(); Long badInstances = 0L; PreparedStatement preparedStatement = null; @@ -2028,7 +2033,7 @@ abstract class AbstractSqlEamDb implements EamDb { String normalizedValue = CorrelationAttributeNormalizer.normalize(aType, value); - Connection conn = connect(true); + Connection conn = connect(); Collection caseNames = new LinkedHashSet<>(); @@ -2087,7 +2092,7 @@ abstract class AbstractSqlEamDb implements EamDb { * @throws EamDbException */ private void deleteReferenceSetEntry(int referenceSetID) throws EamDbException { - Connection conn = connect(true); + Connection conn = connect(); PreparedStatement preparedStatement = null; String sql = "DELETE FROM reference_sets WHERE id=?"; @@ -2113,7 +2118,7 @@ abstract class AbstractSqlEamDb implements EamDb { * @throws EamDbException */ private void deleteReferenceSetEntries(int referenceSetID) throws EamDbException { - Connection conn = connect(true); + Connection conn = connect(); PreparedStatement preparedStatement = null; String sql = "DELETE FROM %s WHERE reference_set_id=?"; @@ -2186,7 +2191,7 @@ abstract class AbstractSqlEamDb implements EamDb { String normalizeValued = CorrelationAttributeNormalizer.normalize(this.getCorrelationTypeById(correlationTypeID), value); - Connection conn = connect(true); + Connection conn = connect(); Long matchingInstances = 0L; PreparedStatement preparedStatement = null; @@ -2232,7 +2237,7 @@ abstract class AbstractSqlEamDb implements EamDb { return false; } - Connection conn = connect(true); + Connection conn = connect(); Long badInstances = 0L; PreparedStatement preparedStatement = null; @@ -2275,7 +2280,7 @@ abstract class AbstractSqlEamDb implements EamDb { throw new EamDbException("Callback interface is null"); } - Connection conn = connect(true); + Connection conn = connect(); PreparedStatement preparedStatement = null; ResultSet resultSet = null; String tableName = EamDbUtil.correlationTypeToInstanceTableName(type); @@ -2319,7 +2324,7 @@ abstract class AbstractSqlEamDb implements EamDb { throw new EamDbException("Where clause is null"); } - Connection conn = connect(true); + Connection conn = connect(); PreparedStatement preparedStatement = null; ResultSet resultSet = null; String tableName = EamDbUtil.correlationTypeToInstanceTableName(type); @@ -2350,7 +2355,7 @@ abstract class AbstractSqlEamDb implements EamDb { throw new EamDbException("EamOrganization already has an ID"); } - Connection conn = connect(true); + Connection conn = connect(); ResultSet generatedKeys = null; PreparedStatement preparedStatement = null; String sql = "INSERT INTO organizations(org_name, poc_name, poc_email, poc_phone) VALUES (?, ?, ?, ?) " @@ -2389,7 +2394,7 @@ abstract class AbstractSqlEamDb implements EamDb { */ @Override public List getOrganizations() throws EamDbException { - Connection conn = connect(true); + Connection conn = connect(); List orgs = new ArrayList<>(); PreparedStatement preparedStatement = null; @@ -2424,7 +2429,7 @@ abstract class AbstractSqlEamDb implements EamDb { */ @Override public EamOrganization getOrganizationByID(int orgID) throws EamDbException { - Connection conn = connect(true); + Connection conn = connect(); PreparedStatement preparedStatement = null; ResultSet resultSet = null; @@ -2492,7 +2497,7 @@ abstract class AbstractSqlEamDb implements EamDb { public void updateOrganization(EamOrganization updatedOrganization) throws EamDbException { testArgument(updatedOrganization); - Connection conn = connect(true); + Connection conn = connect(); PreparedStatement preparedStatement = null; String sql = "UPDATE organizations SET org_name = ?, poc_name = ?, poc_email = ?, poc_phone = ? WHERE id = ?"; try { @@ -2515,7 +2520,7 @@ abstract class AbstractSqlEamDb implements EamDb { public void deleteOrganization(EamOrganization organizationToDelete) throws EamDbException { testArgument(organizationToDelete); - Connection conn = connect(true); + Connection conn = connect(); PreparedStatement checkIfUsedStatement = null; ResultSet resultSet = null; String checkIfUsedSql = "SELECT (select count(*) FROM cases WHERE org_id=?) + (select count(*) FROM reference_sets WHERE org_id=?)"; @@ -2566,7 +2571,7 @@ abstract class AbstractSqlEamDb implements EamDb { throw new EamDbException("Type on the EamGlobalSet is null"); } - Connection conn = connect(true); + Connection conn = connect(); PreparedStatement preparedStatement1 = null; PreparedStatement preparedStatement2 = null; @@ -2618,7 +2623,7 @@ abstract class AbstractSqlEamDb implements EamDb { */ @Override public EamGlobalSet getReferenceSetByID(int referenceSetID) throws EamDbException { - Connection conn = connect(true); + Connection conn = connect(); PreparedStatement preparedStatement1 = null; ResultSet resultSet = null; @@ -2660,7 +2665,7 @@ abstract class AbstractSqlEamDb implements EamDb { } List results = new ArrayList<>(); - Connection conn = connect(true); + Connection conn = connect(); PreparedStatement preparedStatement1 = null; ResultSet resultSet = null; @@ -2701,7 +2706,7 @@ abstract class AbstractSqlEamDb implements EamDb { throw new EamDbException("Correlation type is null"); } - Connection conn = connect(true); + Connection conn = connect(); PreparedStatement preparedStatement = null; @@ -2737,7 +2742,7 @@ abstract class AbstractSqlEamDb implements EamDb { */ @Override public boolean referenceSetExists(String referenceSetName, String version) throws EamDbException { - Connection conn = connect(true); + Connection conn = connect(); PreparedStatement preparedStatement1 = null; ResultSet resultSet = null; @@ -2774,7 +2779,7 @@ abstract class AbstractSqlEamDb implements EamDb { throw new EamDbException("Null set of EamGlobalFileInstance"); } - Connection conn = connect(true); + Connection conn = connect(); PreparedStatement bulkPs = null; try { @@ -2827,7 +2832,7 @@ abstract class AbstractSqlEamDb implements EamDb { public List getReferenceInstancesByTypeValue(CorrelationAttributeInstance.Type aType, String aValue) throws EamDbException, CorrelationAttributeNormalizationException { String normalizeValued = CorrelationAttributeNormalizer.normalize(aType, aValue); - Connection conn = connect(true); + Connection conn = connect(); List globalFileInstances = new ArrayList<>(); PreparedStatement preparedStatement1 = null; @@ -2888,7 +2893,7 @@ abstract class AbstractSqlEamDb implements EamDb { * @throws EamDbException */ public int newCorrelationTypeNotKnownId(CorrelationAttributeInstance.Type newType) throws EamDbException { - Connection conn = connect(true); + Connection conn = connect(); PreparedStatement preparedStatement = null; PreparedStatement preparedStatementQuery = null; @@ -2941,7 +2946,7 @@ abstract class AbstractSqlEamDb implements EamDb { * @throws EamDbException */ private int newCorrelationTypeKnownId(CorrelationAttributeInstance.Type newType) throws EamDbException { - Connection conn = connect(true); + Connection conn = connect(); PreparedStatement preparedStatement = null; PreparedStatement preparedStatementQuery = null; @@ -2987,7 +2992,7 @@ abstract class AbstractSqlEamDb implements EamDb { @Override public List getDefinedCorrelationTypes() throws EamDbException { - Connection conn = connect(true); + Connection conn = connect(); List aTypes = new ArrayList<>(); PreparedStatement preparedStatement = null; @@ -3022,7 +3027,7 @@ abstract class AbstractSqlEamDb implements EamDb { */ @Override public List getEnabledCorrelationTypes() throws EamDbException { - Connection conn = connect(true); + Connection conn = connect(); List aTypes = new ArrayList<>(); PreparedStatement preparedStatement = null; @@ -3057,7 +3062,7 @@ abstract class AbstractSqlEamDb implements EamDb { */ @Override public List getSupportedCorrelationTypes() throws EamDbException { - Connection conn = connect(true); + Connection conn = connect(); List aTypes = new ArrayList<>(); PreparedStatement preparedStatement = null; @@ -3090,7 +3095,7 @@ abstract class AbstractSqlEamDb implements EamDb { */ @Override public void updateCorrelationType(CorrelationAttributeInstance.Type aType) throws EamDbException { - Connection conn = connect(true); + Connection conn = connect(); PreparedStatement preparedStatement = null; String sql = "UPDATE correlation_types SET display_name=?, db_table_name=?, supported=?, enabled=? WHERE id=?"; @@ -3144,7 +3149,7 @@ abstract class AbstractSqlEamDb implements EamDb { * @throws EamDbException */ private CorrelationAttributeInstance.Type getCorrelationTypeByIdFromCr(int typeId) throws EamDbException { - Connection conn = connect(true); + Connection conn = connect(); CorrelationAttributeInstance.Type aType; PreparedStatement preparedStatement = null; diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/PostgresEamDb.java b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/PostgresEamDb.java index 4a13e1e6b6..89382289f0 100755 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/PostgresEamDb.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/PostgresEamDb.java @@ -104,7 +104,7 @@ final class PostgresEamDb extends AbstractSqlEamDb { @Override public void reset() throws EamDbException { - Connection conn = connect(true); + Connection conn = connect(); try { Statement dropContent = conn.createStatement(); @@ -173,6 +173,18 @@ final class PostgresEamDb extends AbstractSqlEamDb { @Override protected Connection connect(boolean foreignKeys) throws EamDbException { //foreignKeys boolean is ignored for postgres + return connect(); + } + + /** + * Lazily setup Singleton connection on first request. + * + * @return A connection from the connection pool. + * + * @throws EamDbException + */ + @Override + protected Connection connect() throws EamDbException { synchronized (this) { if (!EamDb.isEnabled()) { throw new EamDbException("Central Repository module is not enabled"); // NON-NLS @@ -182,7 +194,6 @@ final class PostgresEamDb extends AbstractSqlEamDb { setupConnectionPool(); } } - try { return connectionPool.getConnection(); } catch (SQLException ex) { diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/SqliteEamDb.java b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/SqliteEamDb.java index 020bd69b96..8a6c4467c1 100644 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/SqliteEamDb.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/SqliteEamDb.java @@ -114,7 +114,7 @@ final class SqliteEamDb extends AbstractSqlEamDb { try { acquireExclusiveLock(); - Connection conn = connect(true); + Connection conn = connect(); try { @@ -191,11 +191,9 @@ final class SqliteEamDb extends AbstractSqlEamDb { if (!EamDb.isEnabled()) { throw new EamDbException("Central Repository module is not enabled"); // NON-NLS } - if (connectionPool == null) { setupConnectionPool(foreignKeys); } - try { return connectionPool.getConnection(); } catch (SQLException ex) { @@ -204,6 +202,18 @@ final class SqliteEamDb extends AbstractSqlEamDb { } } + /** + * Lazily setup Singleton connection on first request with foreign keys enforced. + * + * @return A connection from the connection pool. + * + * @throws EamDbException + */ + @Override + protected Connection connect() throws EamDbException { + return connect(true); + } + @Override protected String getConflictClause() { // For sqlite, our conflict clause is part of the table schema From 18beac06e845fd561dc261cc21c9d3a20a118757 Mon Sep 17 00:00:00 2001 From: Raman Date: Fri, 1 Mar 2019 14:45:57 -0500 Subject: [PATCH 70/80] 1204: Show TSK_DOWNLOAD_SOURCE artifact in MetaData viewer Also addressed review comments on previous commit. --- .../contentviewers/Bundle.properties-MERGED | 1 + .../autopsy/contentviewers/Metadata.java | 19 +++++++ .../autopsy/datamodel/ExtractedContent.java | 4 +- .../keywordsearch/Bundle.properties-MERGED | 2 +- .../recentactivity/Bundle.properties-MERGED | 4 +- .../recentactivity/ChromeCacheExtractor.java | 53 ++++++++++--------- .../recentactivity/RAImageIngestModule.java | 5 +- 7 files changed, 55 insertions(+), 33 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/contentviewers/Bundle.properties-MERGED b/Core/src/org/sleuthkit/autopsy/contentviewers/Bundle.properties-MERGED index 74b500780f..6c3d0b1d5c 100755 --- a/Core/src/org/sleuthkit/autopsy/contentviewers/Bundle.properties-MERGED +++ b/Core/src/org/sleuthkit/autopsy/contentviewers/Bundle.properties-MERGED @@ -53,6 +53,7 @@ Metadata.nodeText.truncated=(results truncated) Metadata.nodeText.unknown=Unknown Metadata.tableRowTitle.acquisitionDetails=Acquisition Details Metadata.tableRowTitle.deviceId=Device ID +Metadata.tableRowTitle.downloadSource=Downloaded From Metadata.tableRowTitle.imageType=Type Metadata.tableRowTitle.mimeType=MIME Type Metadata.tableRowTitle.name=Name diff --git a/Core/src/org/sleuthkit/autopsy/contentviewers/Metadata.java b/Core/src/org/sleuthkit/autopsy/contentviewers/Metadata.java index 4adae57b8d..126299f120 100644 --- a/Core/src/org/sleuthkit/autopsy/contentviewers/Metadata.java +++ b/Core/src/org/sleuthkit/autopsy/contentviewers/Metadata.java @@ -19,6 +19,7 @@ package org.sleuthkit.autopsy.contentviewers; import java.awt.Component; +import java.util.List; import java.util.logging.Level; import org.apache.commons.lang3.StringUtils; import org.openide.nodes.Node; @@ -29,6 +30,10 @@ import org.sleuthkit.autopsy.corecomponentinterfaces.DataContentViewer; import org.sleuthkit.autopsy.datamodel.ContentUtils; import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.datamodel.AbstractFile; +import org.sleuthkit.datamodel.BlackboardArtifact; +import org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE; +import org.sleuthkit.datamodel.BlackboardAttribute; +import org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE; import org.sleuthkit.datamodel.DataSource; import org.sleuthkit.datamodel.Image; import org.sleuthkit.datamodel.FsContent; @@ -138,6 +143,7 @@ public class Metadata extends javax.swing.JPanel implements DataContentViewer { "Metadata.tableRowTitle.timezone=Time Zone", "Metadata.tableRowTitle.deviceId=Device ID", "Metadata.tableRowTitle.acquisitionDetails=Acquisition Details", + "Metadata.tableRowTitle.downloadSource=Downloaded From", "Metadata.nodeText.unknown=Unknown", "Metadata.nodeText.none=None"}) @Override @@ -184,6 +190,19 @@ public class Metadata extends javax.swing.JPanel implements DataContentViewer { addRow(sb, NbBundle.getMessage(this.getClass(), "Metadata.tableRowTitle.localPath"), file.getLocalAbsPath()); } + try { + List sourceArtifacts = file.getArtifacts(ARTIFACT_TYPE.TSK_DOWNLOAD_SOURCE); + if (!sourceArtifacts.isEmpty()) { + BlackboardArtifact artifact = sourceArtifacts.get(0); + BlackboardAttribute urlAttr = artifact.getAttribute(new BlackboardAttribute.Type(ATTRIBUTE_TYPE.TSK_URL)); + if (urlAttr != null) { + addRow(sb, NbBundle.getMessage(this.getClass(), "Metadata.tableRowTitle.downloadSource"), urlAttr.getValueString()); + } + } + } catch (TskCoreException ex) { + sb.append(NbBundle.getMessage(this.getClass(), "Metadata.nodeText.exceptionNotice.text")).append(ex.getLocalizedMessage()); + } + endTable(sb); /* diff --git a/Core/src/org/sleuthkit/autopsy/datamodel/ExtractedContent.java b/Core/src/org/sleuthkit/autopsy/datamodel/ExtractedContent.java index 7037ad4584..4b3694b35f 100644 --- a/Core/src/org/sleuthkit/autopsy/datamodel/ExtractedContent.java +++ b/Core/src/org/sleuthkit/autopsy/datamodel/ExtractedContent.java @@ -50,7 +50,7 @@ import static org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE.TSK_HASHS import static org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_ARTIFACT_HIT; import static org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT; import static org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE.TSK_KEYWORD_HIT; -import static org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE.TSK_SOURCE; +import static org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE.TSK_DOWNLOAD_SOURCE; import org.sleuthkit.datamodel.SleuthkitCase; import org.sleuthkit.datamodel.TskCoreException; import org.sleuthkit.datamodel.TskException; @@ -237,7 +237,7 @@ public class ExtractedContent implements AutopsyVisitableItem { doNotShow.add(new BlackboardArtifact.Type(TSK_INTERESTING_ARTIFACT_HIT)); doNotShow.add(new BlackboardArtifact.Type(TSK_ACCOUNT)); doNotShow.add(new BlackboardArtifact.Type(TSK_DATA_SOURCE_USAGE)); - doNotShow.add(new BlackboardArtifact.Type(TSK_SOURCE) ); + doNotShow.add(new BlackboardArtifact.Type(TSK_DOWNLOAD_SOURCE) ); } private final PropertyChangeListener pcl = (PropertyChangeEvent evt) -> { diff --git a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/Bundle.properties-MERGED b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/Bundle.properties-MERGED index f4febc1d7c..61c0d2d2c7 100755 --- a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/Bundle.properties-MERGED +++ b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/Bundle.properties-MERGED @@ -34,7 +34,7 @@ KeywordSearchIngestModule.startupMessage.failedToGetIndexSchema=Failed to get sc KeywordSearchResultFactory.createNodeForKey.noResultsFound.text=No results found. KeywordSearchResultFactory.query.exception.msg=Could not perform the query OpenIDE-Module-Display-Category=Ingest Module -OpenIDE-Module-Long-Description=Keyword Search ingest module.\n\n\The module indexes files found in the disk image at ingest time.\n\It then periodically runs the search on the indexed files using one or more keyword lists (containing pure words and/or regular expressions) and posts results.\n\n\The module also contains additional tools integrated in the main GUI, such as keyword list configuration, keyword seach bar in the top-right corner, extracted text viewer and search results viewer showing highlighted keywords found. +OpenIDE-Module-Long-Description=Keyword Search ingest module.\n\nThe module indexes files found in the disk image at ingest time.\nIt then periodically runs the search on the indexed files using one or more keyword lists (containing pure words and/or regular expressions) and posts results.\n\nThe module also contains additional tools integrated in the main GUI, such as keyword list configuration, keyword seach bar in the top-right corner, extracted text viewer and search results viewer showing highlighted keywords found. OpenIDE-Module-Name=KeywordSearch OptionsCategory_Name_KeywordSearchOptions=Keyword Search OptionsCategory_Keywords_KeywordSearchOptions=Keyword Search diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Bundle.properties-MERGED b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Bundle.properties-MERGED index b2a5a02487..1e5595024a 100755 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Bundle.properties-MERGED +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Bundle.properties-MERGED @@ -38,7 +38,7 @@ ExtractOs.windowsVolume.label=OS Drive (Windows) ExtractOs.yellowDogLinuxOs.label=Linux (Yellow Dog) ExtractOs.yellowDogLinuxVolume.label=OS Drive (Linux Yellow Dog) OpenIDE-Module-Display-Category=Ingest Module -OpenIDE-Module-Long-Description=Recent Activity ingest module.\n\n\The module extracts useful information about the recent user activity on the disk image being ingested, such as:\n\n- Recently open documents,\n- Web acitivity (sites visited, stored cookies, bookmarked sites, search engine queries, file downloads),\n- Recently attached devices,\n- Installed programs.\n\n\The module currently supports Windows only disk images.\n\The plugin is also fully functional when deployed on Windows version of Autopsy. +OpenIDE-Module-Long-Description=Recent Activity ingest module.\n\nThe module extracts useful information about the recent user activity on the disk image being ingested, such as:\n\n- Recently open documents,\n- Web acitivity (sites visited, stored cookies, bookmarked sites, search engine queries, file downloads),\n- Recently attached devices,\n- Installed programs.\n\nThe module currently supports Windows only disk images.\nThe plugin is also fully functional when deployed on Windows version of Autopsy. OpenIDE-Module-Name=RecentActivity OpenIDE-Module-Short-Description=Recent Activity finder ingest module Chrome.moduleName=Chrome @@ -136,7 +136,7 @@ SearchEngineURLQueryAnalyzer.init.exception.msg=Unable to find {0}. SearchEngineURLQueryAnalyzer.moduleName.text=Search Engine SearchEngineURLQueryAnalyzer.engineName.none=NONE SearchEngineURLQueryAnalyzer.domainSubStr.none=NONE -SearchEngineURLQueryAnalyzer.toString=Name: {0}\nDomain Substring: {1}\n\count: {2}\nSplit Tokens: \n{3} +SearchEngineURLQueryAnalyzer.toString=Name: {0}\nDomain Substring: {1}\ncount: {2}\nSplit Tokens: \n{3} SearchEngineURLQueryAnalyzer.parentModuleName.noSpace=RecentActivity SearchEngineURLQueryAnalyzer.parentModuleName=Recent Activity UsbDeviceIdMapper.parseAndLookup.text=Product: {0} diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ChromeCacheExtractor.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ChromeCacheExtractor.java index 56e18dfef5..a3a6bf7522 100644 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ChromeCacheExtractor.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ChromeCacheExtractor.java @@ -28,6 +28,7 @@ import java.nio.ByteBuffer; import java.nio.ByteOrder; import java.nio.channels.FileChannel; import java.nio.charset.Charset; +import java.nio.file.Path; import java.nio.file.Paths; import java.util.ArrayList; import java.util.Collection; @@ -74,8 +75,8 @@ import org.sleuthkit.datamodel.TskException; */ final class ChromeCacheExtractor { - private final static String DEFAULT_CACHE_STR = "default/cache"; - private final static String BROTLI_MIMETYPE ="application/x-brotli"; + private final static String DEFAULT_CACHE_STR = "default/cache"; //NON-NLS + private final static String BROTLI_MIMETYPE ="application/x-brotli"; //NON-NLS private final static long UINT32_MASK = 0xFFFFFFFFl; @@ -84,7 +85,7 @@ final class ChromeCacheExtractor { private final static Logger logger = Logger.getLogger(ChromeCacheExtractor.class.getName()); - private static final String VERSION_NUMBER = "1.0.0"; + private static final String VERSION_NUMBER = "1.0.0"; //NON-NLS private final String moduleName; private String absOutputFolderName; @@ -128,8 +129,8 @@ final class ChromeCacheExtractor { @NbBundle.Messages({ "ChromeCacheExtractor.moduleName=ChromeCacheExtractor" }) - ChromeCacheExtractor(Content dataSource, IngestJobContext context ) { - moduleName = NbBundle.getMessage(ChromeCacheExtractor.class, "ChromeCacheExtractor.moduleName"); + ChromeCacheExtractor(Content dataSource, IngestJobContext context ) { + moduleName = Bundle.ChromeCacheExtractor_moduleName(); this.dataSource = dataSource; this.context = context; } @@ -155,7 +156,7 @@ final class ChromeCacheExtractor { dir.mkdirs(); } } catch (NoCurrentCaseException ex) { - String msg = "Failed to get current case."; + String msg = "Failed to get current case."; //NON-NLS throw new IngestModuleException(msg, ex); } } @@ -193,17 +194,17 @@ final class ChromeCacheExtractor { void cleanup () { for (Entry entry : this.filesTable.entrySet()) { - String tempFilePathname = RAImageIngestModule.getRATempPath(currentCase, moduleName) + File.separator + entry.getKey(); + Path tempFilePath = Paths.get(RAImageIngestModule.getRATempPath(currentCase, moduleName), entry.getKey() ); try { entry.getValue().getFileCopy().getChannel().close(); entry.getValue().getFileCopy().close(); - File tmpFile = new File(tempFilePathname); + File tmpFile = tempFilePath.toFile(); if (!tmpFile.delete()) { tmpFile.deleteOnExit(); } } catch (IOException ex) { - logger.log(Level.WARNING, String.format("Failed to delete cache file copy %s", tempFilePathname), ex); + logger.log(Level.WARNING, String.format("Failed to delete cache file copy %s", tempFilePath.toString()), ex); //NON-NLS } } } @@ -211,7 +212,7 @@ final class ChromeCacheExtractor { /** * Returns the location of output folder for this module * - * @return + * @return absolute location of output folder */ private String getAbsOutputFolderName() { return absOutputFolderName; @@ -220,7 +221,7 @@ final class ChromeCacheExtractor { /** * Returns the relative location of output folder for this module * - * @return + * @return relative location of output folder */ private String getRelOutputFolderName() { return relOutputFolderName; @@ -237,7 +238,7 @@ final class ChromeCacheExtractor { try { moduleInit(); } catch (IngestModuleException ex) { - String msg = "Failed to initialize ChromeCacheExtractor."; + String msg = "Failed to initialize ChromeCacheExtractor."; //NON-NLS logger.log(Level.SEVERE, msg, ex); return; } @@ -245,7 +246,7 @@ final class ChromeCacheExtractor { // Find all possible caches List indexFiles; try { - indexFiles = findCacheFiles("index"); + indexFiles = findCacheFiles("index"); //NON-NLS // Get each of the caches for (AbstractFile indexFile: indexFiles) { @@ -253,7 +254,7 @@ final class ChromeCacheExtractor { } } catch (TskCoreException ex) { - String msg = "Failed to find cache index files"; + String msg = "Failed to find cache index files"; //NON-NLS logger.log(Level.SEVERE, msg, ex); } } @@ -285,7 +286,7 @@ final class ChromeCacheExtractor { } } catch (TskCoreException | IngestModuleException ex) { - String msg = "Failed to find cache files in path " + cachePath; + String msg = "Failed to find cache files in path " + cachePath; //NON-NLS logger.log(Level.SEVERE, msg, ex); return; } @@ -323,7 +324,7 @@ final class ChromeCacheExtractor { context.addFilesToJob(derivedFiles); - services.fireModuleDataEvent(new ModuleDataEvent(moduleName, BlackboardArtifact.ARTIFACT_TYPE.TSK_SOURCE, !sourceArtifacts.isEmpty() ? sourceArtifacts : null)); + services.fireModuleDataEvent(new ModuleDataEvent(moduleName, BlackboardArtifact.ARTIFACT_TYPE.TSK_DOWNLOAD_SOURCE, !sourceArtifacts.isEmpty() ? sourceArtifacts : null)); services.fireModuleDataEvent(new ModuleDataEvent(moduleName, BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_CACHE, !webCacheArtifacts.isEmpty() ? webCacheArtifacts : null)); cleanup(); @@ -350,7 +351,7 @@ final class ChromeCacheExtractor { Optional cacheEntryFile = this.getCacheFileCopy(cacheEntryFileName, cachePath); if (!cacheEntryFile.isPresent()) { - String msg = String.format("Failed to get cache entry at address %s", cacheEntryAddress); + String msg = String.format("Failed to get cache entry at address %s", cacheEntryAddress); //NON-NLS throw new IngestModuleException(msg); } @@ -396,7 +397,7 @@ final class ChromeCacheExtractor { if (dataFile.isPresent()) { if (data.isInExternalFile() ) { try { - BlackboardArtifact sourceArtifact = dataFile.get().newArtifact(ARTIFACT_TYPE.TSK_SOURCE); + BlackboardArtifact sourceArtifact = dataFile.get().newArtifact(ARTIFACT_TYPE.TSK_DOWNLOAD_SOURCE); if (sourceArtifact != null) { sourceArtifact.addAttributes(sourceArtifactAttributes); sourceArtifacts.add(sourceArtifact); @@ -409,9 +410,9 @@ final class ChromeCacheExtractor { // Add path of f_* file as attribute webCacheArtifact.addAttribute(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PATH, moduleName, - dataFile.get().getUniquePath())); //NON-NLS + dataFile.get().getUniquePath())); - long pathID = Util.findID(dataSource, dataFile.get().getUniquePath()); //NON-NLS + long pathID = Util.findID(dataSource, dataFile.get().getUniquePath()); if (pathID != -1) { webCacheArtifact.addAttribute(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PATH_ID, moduleName, pathID)); @@ -444,7 +445,7 @@ final class ChromeCacheExtractor { "", TskData.EncodingType.NONE); - BlackboardArtifact sourceArtifact = derivedFile.newArtifact(ARTIFACT_TYPE.TSK_SOURCE); + BlackboardArtifact sourceArtifact = derivedFile.newArtifact(ARTIFACT_TYPE.TSK_DOWNLOAD_SOURCE); if (sourceArtifact != null) { sourceArtifact.addAttributes(sourceArtifactAttributes); sourceArtifacts.add(sourceArtifact); @@ -457,8 +458,8 @@ final class ChromeCacheExtractor { // Add path of derived file as attribute webCacheArtifact.addAttribute(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PATH, moduleName, - derivedFile.getUniquePath())); //NON-NLS - long pathID = Util.findID(dataSource, derivedFile.getUniquePath()); //NON-NLS + derivedFile.getUniquePath())); + long pathID = Util.findID(dataSource, derivedFile.getUniquePath()); if (pathID != -1) { webCacheArtifact.addAttribute(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PATH_ID, moduleName, pathID)); @@ -571,11 +572,11 @@ final class ChromeCacheExtractor { return Optional.of(cacheFileCopy); } catch (ReadContentInputStream.ReadContentInputStreamException ex) { - String msg = String.format("Error reading Chrome cache file '%s' (id=%d).", - cacheFile.getName(), cacheFile.getId()); + String msg = String.format("Error reading Chrome cache file '%s' (id=%d).", //NON-NLS + cacheFile.getName(), cacheFile.getId()); throw new IngestModuleException(msg, ex); } catch (IOException ex) { - String msg = String.format("Error writing temp Chrome cache file '%s' (id=%d).", + String msg = String.format("Error writing temp Chrome cache file '%s' (id=%d).", //NON-NLS cacheFile.getName(), cacheFile.getId()); throw new IngestModuleException(msg, ex); } diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/RAImageIngestModule.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/RAImageIngestModule.java index 5b15a29805..d909c77fa7 100644 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/RAImageIngestModule.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/RAImageIngestModule.java @@ -23,6 +23,7 @@ package org.sleuthkit.autopsy.recentactivity; import java.io.File; +import java.nio.file.Paths; import java.util.ArrayList; import java.util.List; import java.util.logging.Level; @@ -235,7 +236,7 @@ public final class RAImageIngestModule implements DataSourceIngestModule { * @return the relative path of the module output folder */ static String getRelModuleOutputPath() throws NoCurrentCaseException { - return Case.getCurrentCaseThrows().getModuleOutputDirectoryRelativePath() + File.separator - + "RecentActivity"; + return Paths.get(Case.getCurrentCaseThrows().getModuleOutputDirectoryRelativePath(), + "RecentActivity").normalize().toString() ; //NON-NLS } } From 4065b9996723f73c74d3d51f2be024e61342c6ab Mon Sep 17 00:00:00 2001 From: Kelly Kelly Date: Mon, 4 Mar 2019 10:22:42 -0500 Subject: [PATCH 71/80] Cleaned up the download code to put it on hold, fixed code to make sure we clean up the temp files properly and cleaned up a few javadocs --- .../autopsy/recentactivity/ExtractEdge.java | 71 ++++++++++--------- 1 file changed, 39 insertions(+), 32 deletions(-) diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractEdge.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractEdge.java index 2264a83850..2484d41b4f 100755 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractEdge.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractEdge.java @@ -95,11 +95,13 @@ final class ExtractEdge extends Extract { private static final String EDGE_FAVORITE_FILE_NAME = "Favorites.csv"; //NON-NLS private static final String EDGE_OUTPUT_FILE_NAME = "Output.txt"; //NON-NLS private static final String EDGE_ERROR_FILE_NAME = "File.txt"; //NON-NLS + private static final String EDGE_WEBCACHE_FOLDER_NAME = "WebCache"; //NON-NLS + private static final String EDGE_SPARTAN_FOLDER_NAME = "MicrosoftEdge"; //NON-NLS private static final String ESE_TOOL_FOLDER = "ESEDatabaseView"; //NON-NLS private static final String EDGE_RESULT_FOLDER_NAME = "results"; //NON-NLS - private static final SimpleDateFormat DATE_FORMATTER = new SimpleDateFormat("MM/dd/yyyy hh:mm:ss a"); + private static final SimpleDateFormat DATE_FORMATTER = new SimpleDateFormat("MM/dd/yyyy hh:mm:ss a"); //NON-NLS @Messages({ "ExtractEdge_process_errMsg_unableFindESEViewer=Unable to find ESEDatabaseViewer", @@ -174,7 +176,7 @@ final class ExtractEdge extends Extract { try { this.processSpartanDbFile(esedumper, spartanFiles); } catch (IOException | TskCoreException ex) { - this.addErrorMessage(Bundle.ExtractEdge_process_errMsg_webcacheFail()); + this.addErrorMessage(Bundle.ExtractEdge_process_errMsg_spartanFail()); LOG.log(Level.SEVERE, "Error returned from processSpartanDbFile", ex); // NON-NLS } } @@ -228,13 +230,15 @@ final class ExtractEdge extends Extract { this.getCookies(webCacheFile, resultsDir); -// if (context.dataSourceIngestIsCancelled()) { -// return; -// } -// Putting downloads on hold -// this.getDownload(webCacheFile, resultsDir); } finally { tempWebCacheFile.delete(); + + // Emppty the result dir + File[] resultFiles = resultsDir.listFiles(); + for (File file : resultFiles) { + file.delete(); + } + resultsDir.delete(); } } @@ -283,6 +287,13 @@ final class ExtractEdge extends Extract { } finally { tempSpartanFile.delete(); + + // Empty the result dir + File[] resultFiles = resultsDir.listFiles(); + for (File file : resultFiles) { + file.delete(); + } + resultsDir.delete(); } } @@ -393,7 +404,7 @@ final class ExtractEdge extends Extract { } /** - * Queries for cookie files and adds artifacts + * Queries for cookie files and adds artifacts. * * @param origFile Original case file * @param resultDir Output directory of ESEDatabaseViewer @@ -445,7 +456,9 @@ final class ExtractEdge extends Extract { } /** - * Queries for download files and adds artifacts + * Queries for download files and adds artifacts. + * + * Leaving for future use. * * @param origFile Original case file * @param resultDir Output directory of ESEDatabaseViewer @@ -524,7 +537,7 @@ final class ExtractEdge extends Extract { private List fetchWebCacheDBFiles() throws TskCoreException { org.sleuthkit.autopsy.casemodule.services.FileManager fileManager = currentCase.getServices().getFileManager(); - return fileManager.findFiles(dataSource, EDGE_WEBCACHE_NAME, "WebCache"); //NON-NLS + return fileManager.findFiles(dataSource, EDGE_WEBCACHE_NAME, EDGE_WEBCACHE_FOLDER_NAME); //NON-NLS } /** @@ -536,7 +549,7 @@ final class ExtractEdge extends Extract { private List fetchSpartanDBFiles() throws TskCoreException { org.sleuthkit.autopsy.casemodule.services.FileManager fileManager = currentCase.getServices().getFileManager(); - return fileManager.findFiles(dataSource, EDGE_SPARTAN_NAME, "MicrosoftEdge"); //NON-NLS + return fileManager.findFiles(dataSource, EDGE_SPARTAN_NAME, EDGE_SPARTAN_FOLDER_NAME); //NON-NLS } /** @@ -622,7 +635,7 @@ final class ExtractEdge extends Extract { * @throws TskCoreException */ private BlackboardArtifact getCookieArtifact(AbstractFile origFile, List headers, String line) throws TskCoreException { - String[] lineSplit = line.split(","); + String[] lineSplit = line.split(","); // NON-NLS String accessTime = lineSplit[headers.indexOf(EDGE_HEAD_LASTMOD)].trim(); Long ftime = null; @@ -636,9 +649,10 @@ final class ExtractEdge extends Extract { String domain = lineSplit[headers.indexOf(EDGE_HEAD_RDOMAIN)].trim(); String name = hexToChar(lineSplit[headers.indexOf(EDGE_HEAD_NAME)].trim()); String value = hexToChar(lineSplit[headers.indexOf(EDGE_HEAD_VALUE)].trim()); + String url = flipDomain(domain); BlackboardArtifact bbart = origFile.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_COOKIE); - bbart.addAttributes(createCookieAttributes(null, ftime, name, value, this.getName(), flipDomain(domain))); + bbart.addAttributes(createCookieAttributes(url, ftime, name, value, this.getName(), NetworkUtils.extractDomain(url))); return bbart; } @@ -656,18 +670,12 @@ final class ExtractEdge extends Extract { * @throws TskCoreException */ private BlackboardArtifact getDownloadArtifact(AbstractFile origFile, List headers, String line) throws TskCoreException { - -// String[] lineSplit = line.split(","); -// -// String url = lineSplit[headers.indexOf(EDGE_HEAD_URL)]; -// -// String rheader = lineSplit[headers.indexOf(EDGE_HEAD_RESPONSEHEAD)]; -// -// String decodedheader = this.hexToASCII(rheader); -// BlackboardArtifact bbart = origFile.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_DOWNLOAD); -// bbart.addAttributes(createDownloadAttributes(decodedheader, "Test2", null, "microsoft.com", this.getName())); -// return bbart; - return null; + BlackboardArtifact bbart = null; + + String[] lineSplit = line.split(","); // NON-NLS + String rheader = lineSplit[headers.indexOf(EDGE_HEAD_RESPONSEHEAD)]; + + return bbart; } /** @@ -687,7 +695,7 @@ final class ExtractEdge extends Extract { String[] lineSplit = line.split(IGNORE_COMMA_IN_QUOTES_REGEX, -1); String url = lineSplit[headers.indexOf(EDGE_HEAD_URL)]; - String title = lineSplit[headers.indexOf(EDGE_HEAD_TITLE)].replace("\"", ""); + String title = lineSplit[headers.indexOf(EDGE_HEAD_TITLE)].replace("\"", ""); // NON-NLS if (url.isEmpty()) { return null; @@ -801,15 +809,14 @@ final class ExtractEdge extends Extract { * @param programName Name of the module creating the attribute * @return A collection of attributed of a downloaded file */ - private Collection createDownloadAttributes(String path, String url, Long accessTime, String domain, String programName) { + private Collection createDownloadAttributes(String path, Long pathID, String url, Long accessTime, String domain, String programName) { Collection bbattributes = new ArrayList<>(); bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PATH, RecentActivityExtracterModuleFactory.getModuleName(), (path != null) ? path : "")); - long pathID = Util.findID(dataSource, path); - if (pathID != -1) { + if (pathID != null && pathID != -1) { bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PATH_ID, RecentActivityExtracterModuleFactory.getModuleName(), pathID)); @@ -879,7 +886,7 @@ final class ExtractEdge extends Extract { * @return "decoded" string or null if a non-hex value was found */ private String hexToChar(String hexString) { - String[] hexValues = hexString.split(" "); + String[] hexValues = hexString.split(" "); // NON-NLS StringBuilder output = new StringBuilder(); for (String str : hexValues) { @@ -912,7 +919,7 @@ final class ExtractEdge extends Extract { return null; } - String[] tokens = domain.split("\\."); + String[] tokens = domain.split("\\."); // NON-NLS if (tokens.length < 2 || tokens.length > 3) { return domain; // don't know what to do, just send it back as is @@ -1004,7 +1011,7 @@ final class ExtractEdge extends Extract { nameIdx = headers.indexOf(EDGE_HEAD_NAME); idIdx = headers.indexOf(EDGE_HEAD_CONTAINER_ID); } else { - String[] row = line.split(","); + String[] row = line.split(","); // NON-NLS String name = row[nameIdx]; String id = row[idIdx]; From ff5195eeffa3a2dfc8a992be5b5174313813eb31 Mon Sep 17 00:00:00 2001 From: Raman Date: Mon, 4 Mar 2019 10:42:43 -0500 Subject: [PATCH 72/80] Close temp file copy in case of error. --- .../recentactivity/ChromeCacheExtractor.java | 24 ++++++++++++------- 1 file changed, 15 insertions(+), 9 deletions(-) diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ChromeCacheExtractor.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ChromeCacheExtractor.java index a3a6bf7522..1f7eb0580e 100644 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ChromeCacheExtractor.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ChromeCacheExtractor.java @@ -552,12 +552,13 @@ final class ChromeCacheExtractor { } AbstractFile cacheFile = cacheFileOptional.get(); + RandomAccessFile randomAccessFile = null; String tempFilePathname = RAImageIngestModule.getRATempPath(currentCase, moduleName) + cachePath + cacheFile.getName(); //NON-NLS try { File newFile = new File(tempFilePathname); ContentUtils.writeToFile(cacheFile, newFile, context::dataSourceIngestIsCancelled); - RandomAccessFile randomAccessFile = new RandomAccessFile(tempFilePathname, "r"); + randomAccessFile = new RandomAccessFile(tempFilePathname, "r"); FileChannel roChannel = randomAccessFile.getChannel(); ByteBuffer cacheFileROBuf = roChannel.map(FileChannel.MapMode.READ_ONLY, 0, (int) roChannel.size()); @@ -571,15 +572,20 @@ final class ChromeCacheExtractor { return Optional.of(cacheFileCopy); } - catch (ReadContentInputStream.ReadContentInputStreamException ex) { - String msg = String.format("Error reading Chrome cache file '%s' (id=%d).", //NON-NLS - cacheFile.getName(), cacheFile.getId()); + catch (IOException ex) { + + try { + if (randomAccessFile != null) { + randomAccessFile.close(); + } + } + catch (IOException ex2) { + logger.log(Level.SEVERE, "Error while trying to close temp file after exception.", ex2); //NON-NLS + } + String msg = String.format("Error reading/copying Chrome cache file '%s' (id=%d).", //NON-NLS + cacheFile.getName(), cacheFile.getId()); throw new IngestModuleException(msg, ex); - } catch (IOException ex) { - String msg = String.format("Error writing temp Chrome cache file '%s' (id=%d).", //NON-NLS - cacheFile.getName(), cacheFile.getId()); - throw new IngestModuleException(msg, ex); - } + } } /** From 67ebffac6ca55a2b37deac7897c04f4975a77e16 Mon Sep 17 00:00:00 2001 From: Kelly Kelly Date: Mon, 4 Mar 2019 12:50:01 -0500 Subject: [PATCH 73/80] Removed the plist jar from recentActivity, it should be using the one from corelib now. --- RecentActivity/ivy.xml | 2 +- RecentActivity/nbproject/project.properties | 1 - RecentActivity/nbproject/project.xml | 4 ---- 3 files changed, 1 insertion(+), 6 deletions(-) diff --git a/RecentActivity/ivy.xml b/RecentActivity/ivy.xml index ca95f14a98..26621b0320 100644 --- a/RecentActivity/ivy.xml +++ b/RecentActivity/ivy.xml @@ -7,6 +7,6 @@ - + diff --git a/RecentActivity/nbproject/project.properties b/RecentActivity/nbproject/project.properties index 4071f4a54e..9736070e53 100644 --- a/RecentActivity/nbproject/project.properties +++ b/RecentActivity/nbproject/project.properties @@ -1,4 +1,3 @@ -file.reference.dd-plist-1.20.jar=release/modules/ext/dd-plist-1.20.jar javac.source=1.8 javac.compilerargs=-Xlint -Xlint:-serial license.file=../LICENSE-2.0.txt diff --git a/RecentActivity/nbproject/project.xml b/RecentActivity/nbproject/project.xml index 9584170602..87619a8356 100644 --- a/RecentActivity/nbproject/project.xml +++ b/RecentActivity/nbproject/project.xml @@ -74,10 +74,6 @@
- - ext/dd-plist-1.20.jar - release/modules/ext/dd-plist-1.20.jar - From 151e4218dadb37f1a8a4289de341f989ae9f81ff Mon Sep 17 00:00:00 2001 From: Kelly Kelly Date: Mon, 4 Mar 2019 15:57:14 -0500 Subject: [PATCH 74/80] Modify Extract and subclasses to take a progress bar in process function --- .../autopsy/recentactivity/Chrome.java | 26 ++++++++++++++++++- .../DataSourceUsageAnalyzer.java | 7 +++-- .../autopsy/recentactivity/Extract.java | 3 ++- .../autopsy/recentactivity/ExtractEdge.java | 17 +++++++++--- .../autopsy/recentactivity/ExtractIE.java | 20 +++++++++++++- .../autopsy/recentactivity/ExtractOs.java | 7 +++-- .../recentactivity/ExtractRegistry.java | 8 ++++-- .../autopsy/recentactivity/ExtractSafari.java | 15 ++++++++--- .../autopsy/recentactivity/Firefox.java | 25 +++++++++++++++++- .../recentactivity/RAImageIngestModule.java | 2 +- .../recentactivity/RecentDocumentsByLnk.java | 9 ++++++- .../SearchEngineURLQueryAnalyzer.java | 8 ++++-- 12 files changed, 126 insertions(+), 21 deletions(-) diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Chrome.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Chrome.java index 211372d6d0..a30cf2c59a 100644 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Chrome.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Chrome.java @@ -37,11 +37,13 @@ import java.io.File; import java.io.FileNotFoundException; import java.io.FileReader; import java.io.IOException; +import org.openide.util.NbBundle.Messages; import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; import org.sleuthkit.autopsy.casemodule.services.FileManager; import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.coreutils.NetworkUtils; +import org.sleuthkit.autopsy.ingest.DataSourceIngestModuleProgress; import org.sleuthkit.autopsy.ingest.IngestJobContext; import org.sleuthkit.autopsy.ingest.ModuleDataEvent; import org.sleuthkit.datamodel.AbstractFile; @@ -81,21 +83,43 @@ class Chrome extends Extract { private final Logger logger = Logger.getLogger(this.getClass().getName()); private Content dataSource; private IngestJobContext context; + + @Messages({ + "Progress_Message_Chrome_History=Chrome History", + "Progress_Message_Chrome_Bookmarks=Chrome Bookmarks", + "Progress_Message_Chrome_Cookies=Chrome Cookies", + "Progress_Message_Chrome_Downloads=Chrome Downloads", + "Progress_Message_Chrome_FormHistory=Chrome Form History", + "Progress_Message_Chrome_AutoFill=Chrome Auto Fill", + "Progress_Message_Chrome_Logins=Chrome Logins", + }) Chrome() { moduleName = NbBundle.getMessage(Chrome.class, "Chrome.moduleName"); } @Override - public void process(Content dataSource, IngestJobContext context) { + public void process(Content dataSource, IngestJobContext context, DataSourceIngestModuleProgress progressBar) { this.dataSource = dataSource; this.context = context; dataFound = false; + + progressBar.progress(Bundle.Progress_Message_Chrome_History()); this.getHistory(); + + progressBar.progress(Bundle.Progress_Message_Chrome_Bookmarks()); this.getBookmark(); + + progressBar.progress(Bundle.Progress_Message_Chrome_Cookies()); this.getCookie(); + + progressBar.progress(Bundle.Progress_Message_Chrome_Logins()); this.getLogins(); + + progressBar.progress(Bundle.Progress_Message_Chrome_AutoFill()); this.getAutofill(); + + progressBar.progress(Bundle.Progress_Message_Chrome_Downloads()); this.getDownload(); } diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/DataSourceUsageAnalyzer.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/DataSourceUsageAnalyzer.java index 9c841442a3..8b40633d2b 100644 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/DataSourceUsageAnalyzer.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/DataSourceUsageAnalyzer.java @@ -26,6 +26,7 @@ import org.apache.commons.io.FilenameUtils; import org.openide.util.NbBundle.Messages; import org.sleuthkit.autopsy.casemodule.services.FileManager; import org.sleuthkit.autopsy.coreutils.Logger; +import org.sleuthkit.autopsy.ingest.DataSourceIngestModuleProgress; import org.sleuthkit.autopsy.ingest.IngestJobContext; import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.BlackboardArtifact; @@ -46,12 +47,14 @@ class DataSourceUsageAnalyzer extends Extract { @Messages({ "# {0} - OS name", - "DataSourceUsageAnalyzer.customVolume.label=OS Drive ({0})" + "DataSourceUsageAnalyzer.customVolume.label=OS Drive ({0})", + "Progress_Message_Analyze_Usage=Data Sources Usage Analysis", }) @Override - void process(Content dataSource, IngestJobContext context) { + void process(Content dataSource, IngestJobContext context, DataSourceIngestModuleProgress progressBar) { this.dataSource = dataSource; try { + progressBar.progress(Bundle.Progress_Message_Analyze_Usage()); createDataSourceUsageArtifacts(); } catch (TskCoreException ex) { logger.log(Level.WARNING, "Failed to check if datasource contained a volume with operating system specific files", ex); diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Extract.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Extract.java index b4f04c79cd..3ffbad514b 100644 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Extract.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Extract.java @@ -44,6 +44,7 @@ import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil; import org.sleuthkit.autopsy.coreutils.SQLiteDBConnect; import org.sleuthkit.autopsy.datamodel.ContentUtils; +import org.sleuthkit.autopsy.ingest.DataSourceIngestModuleProgress; import org.sleuthkit.autopsy.ingest.IngestJobContext; import org.sleuthkit.autopsy.ingest.IngestModule.IngestModuleException; import org.sleuthkit.datamodel.AbstractFile; @@ -85,7 +86,7 @@ abstract class Extract { void configExtractor() throws IngestModuleException { } - abstract void process(Content dataSource, IngestJobContext context); + abstract void process(Content dataSource, IngestJobContext context, DataSourceIngestModuleProgress progressBar); void complete() { } diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractEdge.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractEdge.java index ff793703b1..a446b6450a 100755 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractEdge.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractEdge.java @@ -43,6 +43,7 @@ import org.sleuthkit.autopsy.coreutils.NetworkUtils; import org.sleuthkit.autopsy.coreutils.PlatformUtil; import org.sleuthkit.autopsy.datamodel.ContentUtils; import org.sleuthkit.autopsy.ingest.DataSourceIngestModuleProcessTerminator; +import org.sleuthkit.autopsy.ingest.DataSourceIngestModuleProgress; import org.sleuthkit.autopsy.ingest.IngestJobContext; import org.sleuthkit.autopsy.ingest.IngestServices; import org.sleuthkit.autopsy.ingest.ModuleDataEvent; @@ -107,6 +108,9 @@ final class ExtractEdge extends Extract { "ExtractEdge_process_errMsg_spartanFail=Failure processing Microsoft Edge spartan.edb file", "ExtractEdge_Module_Name=Microsoft Edge", "ExtractEdge_getHistory_containerFileNotFound=Error while trying to analyze Edge history", + "Progress_Message_Edge_History=Microsoft Edge History", + "Progress_Message_Edge_Bookmarks=Microsoft Edge Bookmarks", + "Progress_Message_Edge_Cookies=Microsoft Edge Cookies", }) /** @@ -122,7 +126,7 @@ final class ExtractEdge extends Extract { } @Override - void process(Content dataSource, IngestJobContext context) { + void process(Content dataSource, IngestJobContext context, DataSourceIngestModuleProgress progressBar) { this.dataSource = dataSource; this.context = context; this.setFoundData(false); @@ -164,12 +168,13 @@ final class ExtractEdge extends Extract { } try { - this.processWebCacheDbFile(esedumper, webCacheFiles); + this.processWebCacheDbFile(esedumper, webCacheFiles, progressBar); } catch (IOException | TskCoreException ex) { this.addErrorMessage(Bundle.ExtractEdge_process_errMsg_webcacheFail()); LOG.log(Level.SEVERE, "Error returned from processWebCacheDbFile", ex); // NON-NLS } + progressBar.progress(Bundle.Progress_Message_Edge_Bookmarks()); try { this.processSpartanDbFile(esedumper, spartanFiles); } catch (IOException | TskCoreException ex) { @@ -187,7 +192,7 @@ final class ExtractEdge extends Extract { * @throws IOException * @throws TskCoreException */ - void processWebCacheDbFile(String eseDumperPath, List webCacheFiles) throws IOException, TskCoreException { + void processWebCacheDbFile(String eseDumperPath, List webCacheFiles, DataSourceIngestModuleProgress progressBar) throws IOException, TskCoreException { for (AbstractFile webCacheFile : webCacheFiles) { @@ -218,13 +223,17 @@ final class ExtractEdge extends Extract { if (context.dataSourceIngestIsCancelled()) { return; } - + + progressBar.progress(Bundle.Progress_Message_Edge_History()); + this.getHistory(webCacheFile, resultsDir); if (context.dataSourceIngestIsCancelled()) { return; } + progressBar.progress(Bundle.Progress_Message_Edge_Cookies()); + this.getCookies(webCacheFile, resultsDir); // if (context.dataSourceIngestIsCancelled()) { diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractIE.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractIE.java index 223ed69799..099579ecae 100644 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractIE.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractIE.java @@ -41,6 +41,7 @@ import java.util.Collection; import java.util.Scanner; import java.util.stream.Collectors; import org.openide.modules.InstalledFileLocator; +import org.openide.util.NbBundle.Messages; import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; import org.sleuthkit.autopsy.datamodel.ContentUtils; @@ -53,6 +54,7 @@ import org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE; import org.sleuthkit.datamodel.Content; import org.sleuthkit.autopsy.coreutils.PlatformUtil; import org.sleuthkit.autopsy.ingest.DataSourceIngestModuleProcessTerminator; +import org.sleuthkit.autopsy.ingest.DataSourceIngestModuleProgress; import org.sleuthkit.autopsy.ingest.IngestJobContext; import org.sleuthkit.datamodel.*; @@ -71,6 +73,16 @@ class ExtractIE extends Extract { private static final SimpleDateFormat dateFormatter = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'"); private Content dataSource; private IngestJobContext context; + + @Messages({ + "Progress_Message_IE_History=IE History", + "Progress_Message_IE_Bookmarks=IE Bookmarks", + "Progress_Message_IE_Cookies=IE Cookies", + "Progress_Message_IE_Downloads=IE Downloads", + "Progress_Message_IE_FormHistory=IE Form History", + "Progress_Message_IE_AutoFill=IE Auto Fill", + "Progress_Message_IE_Logins=IE Logins", + }) ExtractIE() throws NoCurrentCaseException { moduleName = NbBundle.getMessage(ExtractIE.class, "ExtractIE.moduleName.text"); @@ -79,12 +91,18 @@ class ExtractIE extends Extract { } @Override - public void process(Content dataSource, IngestJobContext context) { + public void process(Content dataSource, IngestJobContext context, DataSourceIngestModuleProgress progressBar) { this.dataSource = dataSource; this.context = context; dataFound = false; + + progressBar.progress(Bundle.Progress_Message_IE_Bookmarks()); this.getBookmark(); + + progressBar.progress(Bundle.Progress_Message_IE_Cookies()); this.getCookie(); + + progressBar.progress(Bundle.Progress_Message_IE_History()); this.getHistory(); } diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractOs.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractOs.java index 04976cb95b..9a838d2adb 100644 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractOs.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractOs.java @@ -28,6 +28,7 @@ import org.apache.commons.io.FilenameUtils; import org.openide.util.NbBundle.Messages; import org.sleuthkit.autopsy.casemodule.services.FileManager; import org.sleuthkit.autopsy.coreutils.Logger; +import org.sleuthkit.autopsy.ingest.DataSourceIngestModuleProgress; import org.sleuthkit.autopsy.ingest.IngestJobContext; import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.BlackboardArtifact; @@ -39,7 +40,8 @@ import org.sleuthkit.datamodel.TskCoreException; * Create OS INFO artifacts for the Operating Systems believed to be present on * the data source. */ -@Messages({"ExtractOs.parentModuleName=Recent Activity"}) +@Messages({"ExtractOs.parentModuleName=Recent Activity", + "ExtractOS_progressMessage=Checking for OS"}) class ExtractOs extends Extract { private static final Logger logger = Logger.getLogger(ExtractOs.class.getName()); @@ -64,9 +66,10 @@ class ExtractOs extends Extract { private Content dataSource; @Override - void process(Content dataSource, IngestJobContext context) { + void process(Content dataSource, IngestJobContext context, DataSourceIngestModuleProgress progressBar) { this.dataSource = dataSource; try { + progressBar.progress(Bundle.ExtractOS_progressMessage()); for (OS_TYPE value : OS_TYPE.values()) { checkForOSFiles(value); } diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractRegistry.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractRegistry.java index 68885bcc52..77338655a5 100644 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractRegistry.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractRegistry.java @@ -52,6 +52,7 @@ import org.xml.sax.SAXException; import java.nio.file.Path; import static java.util.TimeZone.getTimeZone; import org.openide.util.Lookup; +import org.sleuthkit.autopsy.ingest.DataSourceIngestModuleProgress; import org.sleuthkit.autopsy.ingest.IngestModule.IngestModuleException; import org.sleuthkit.autopsy.ingest.IngestServices; import org.sleuthkit.autopsy.ingest.ModuleDataEvent; @@ -66,7 +67,8 @@ import org.sleuthkit.datamodel.ReadContentInputStream.ReadContentInputStreamExce */ @NbBundle.Messages({ "RegRipperNotFound=Autopsy RegRipper executable not found.", - "RegRipperFullNotFound=Full version RegRipper executable not found." + "RegRipperFullNotFound=Full version RegRipper executable not found.", + "Progress_Message_Analyze_Registry=Analyzing Registry Files" }) class ExtractRegistry extends Extract { @@ -969,9 +971,11 @@ class ExtractRegistry extends Extract { } @Override - public void process(Content dataSource, IngestJobContext context) { + public void process(Content dataSource, IngestJobContext context, DataSourceIngestModuleProgress progressBar) { this.dataSource = dataSource; this.context = context; + + progressBar.progress(Bundle.Progress_Message_Analyze_Registry()); analyzeRegistryFiles(); } diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractSafari.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractSafari.java index afb52654b2..3f98bcdf91 100755 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractSafari.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractSafari.java @@ -41,6 +41,7 @@ import org.sleuthkit.autopsy.casemodule.services.FileManager; import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.coreutils.NetworkUtils; import org.sleuthkit.autopsy.datamodel.ContentUtils; +import org.sleuthkit.autopsy.ingest.DataSourceIngestModuleProgress; import org.sleuthkit.autopsy.ingest.IngestJobContext; import org.sleuthkit.autopsy.ingest.IngestServices; import org.sleuthkit.autopsy.ingest.ModuleDataEvent; @@ -89,6 +90,10 @@ final class ExtractSafari extends Extract { "ExtractSafari_Error_Getting_History=An error occurred while processing Safari history files.", "ExtractSafari_Error_Parsing_Bookmark=An error occured while processing Safari Bookmark files", "ExtractSafari_Error_Parsing_Cookies=An error occured while processing Safari Cookies files", + "Progress_Message_Safari_History=Safari History", + "Progress_Message_Safari_Bookmarks=Safari Bookmarks", + "Progress_Message_Safari_Cookies=Safari Cookies", + "Progress_Message_Safari_Downloads=Safari Downloads", }) /** @@ -105,9 +110,10 @@ final class ExtractSafari extends Extract { } @Override - void process(Content dataSource, IngestJobContext context) { + void process(Content dataSource, IngestJobContext context, DataSourceIngestModuleProgress progressBar) { setFoundData(false); - + + progressBar.progress(Bundle.Progress_Message_Safari_Cookies()); try { processHistoryDB(dataSource, context); @@ -116,6 +122,7 @@ final class ExtractSafari extends Extract { LOG.log(Level.SEVERE, "Exception thrown while processing history file: {0}", ex); //NON-NLS } + progressBar.progress(Bundle.Progress_Message_Safari_Bookmarks()); try { processBookmarkPList(dataSource, context); } catch (IOException | TskCoreException | SAXException | PropertyListFormatException | ParseException | ParserConfigurationException ex) { @@ -123,13 +130,15 @@ final class ExtractSafari extends Extract { LOG.log(Level.SEVERE, "Exception thrown while parsing Safari Bookmarks file: {0}", ex); //NON-NLS } - try { + progressBar.progress(Bundle.Progress_Message_Safari_Downloads()); + try { processDownloadsPList(dataSource, context); } catch (IOException | TskCoreException | SAXException | PropertyListFormatException | ParseException | ParserConfigurationException ex) { this.addErrorMessage(Bundle.ExtractSafari_Error_Parsing_Bookmark()); LOG.log(Level.SEVERE, "Exception thrown while parsing Safari Download.plist file: {0}", ex); //NON-NLS } + progressBar.progress(Bundle.Progress_Message_Safari_Cookies()); try { processBinaryCookieFile(dataSource, context); } catch (IOException | TskCoreException ex) { diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Firefox.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Firefox.java index 043e0bf0ff..129d86d10d 100644 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Firefox.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Firefox.java @@ -44,12 +44,14 @@ import java.util.Set; import java.util.logging.Level; import org.openide.util.NbBundle; +import org.openide.util.NbBundle.Messages; import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; import org.sleuthkit.autopsy.casemodule.services.FileManager; import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.coreutils.NetworkUtils; import org.sleuthkit.autopsy.datamodel.ContentUtils; +import org.sleuthkit.autopsy.ingest.DataSourceIngestModuleProgress; import org.sleuthkit.autopsy.ingest.IngestJobContext; import org.sleuthkit.autopsy.ingest.IngestServices; import org.sleuthkit.autopsy.ingest.ModuleDataEvent; @@ -63,6 +65,15 @@ import org.sleuthkit.datamodel.Content; import org.sleuthkit.datamodel.ReadContentInputStream.ReadContentInputStreamException; import org.sleuthkit.datamodel.TskCoreException; +@Messages({ + "Progress_Message_Firefox_History=Firefox History", + "Progress_Message_Firefox_Bookmarks=Firefox Bookmarks", + "Progress_Message_Firefox_Cookies=Firefox Cookies", + "Progress_Message_Firefox_Downloads=Firefox Downloads", + "Progress_Message_Firefox_FormHistory=Firefox Form History", + "Progress_Message_Firefox_AutoFill=Firefox Auto Fill" +}) + /** * Firefox recent activity extraction */ @@ -95,15 +106,27 @@ class Firefox extends Extract { } @Override - public void process(Content dataSource, IngestJobContext context) { + public void process(Content dataSource, IngestJobContext context, DataSourceIngestModuleProgress progressBar) { this.dataSource = dataSource; this.context = context; dataFound = false; + + progressBar.progress(Bundle.Progress_Message_Firefox_History()); this.getHistory(); + + progressBar.progress(Bundle.Progress_Message_Firefox_Bookmarks()); this.getBookmark(); + + progressBar.progress(Bundle.Progress_Message_Firefox_Downloads()); this.getDownload(); + + progressBar.progress(Bundle.Progress_Message_Firefox_Cookies()); this.getCookie(); + + progressBar.progress(Bundle.Progress_Message_Firefox_FormHistory()); this.getFormsHistory(); + + progressBar.progress(Bundle.Progress_Message_Firefox_AutoFill()); this.getAutofillProfiles(); } diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/RAImageIngestModule.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/RAImageIngestModule.java index 434cf968de..18757cc997 100644 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/RAImageIngestModule.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/RAImageIngestModule.java @@ -119,7 +119,7 @@ public final class RAImageIngestModule implements DataSourceIngestModule { progressBar.progress(extracter.getName(), i); try { - extracter.process(dataSource, context); + extracter.process(dataSource, context, progressBar); } catch (Exception ex) { logger.log(Level.SEVERE, "Exception occurred in " + extracter.getName(), ex); //NON-NLS subCompleted.append(NbBundle.getMessage(this.getClass(), "RAImageIngestModule.process.errModFailed", diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/RecentDocumentsByLnk.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/RecentDocumentsByLnk.java index bb200551fb..160f88f9b7 100644 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/RecentDocumentsByLnk.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/RecentDocumentsByLnk.java @@ -29,6 +29,7 @@ import java.util.logging.Level; import org.openide.util.NbBundle; import org.sleuthkit.autopsy.coreutils.Logger; import java.util.Collection; +import org.openide.util.NbBundle.Messages; import org.sleuthkit.autopsy.coreutils.JLNK; import org.sleuthkit.autopsy.coreutils.JLnkParser; import org.sleuthkit.autopsy.coreutils.JLnkParserException; @@ -53,6 +54,10 @@ class RecentDocumentsByLnk extends Extract { private IngestServices services = IngestServices.getInstance(); private Content dataSource; private IngestJobContext context; + + @Messages({ + "Progress_Message_Extract_Resent_Docs=Recent Documents", + }) /** * Find the documents that Windows stores about recent documents and make @@ -125,10 +130,12 @@ class RecentDocumentsByLnk extends Extract { } @Override - public void process(Content dataSource, IngestJobContext context) { + public void process(Content dataSource, IngestJobContext context, DataSourceIngestModuleProgress progressBar) { this.dataSource = dataSource; this.context = context; dataFound = false; + + progressBar.progress(Bundle.Progress_Message_Extract_Resent_Docs()); this.getRecentDocuments(); } } diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/SearchEngineURLQueryAnalyzer.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/SearchEngineURLQueryAnalyzer.java index 4c8999ec47..5aff1a4a0b 100644 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/SearchEngineURLQueryAnalyzer.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/SearchEngineURLQueryAnalyzer.java @@ -33,6 +33,7 @@ import org.openide.util.NbBundle; import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.coreutils.PlatformUtil; import org.sleuthkit.autopsy.coreutils.XMLUtil; +import org.sleuthkit.autopsy.ingest.DataSourceIngestModuleProgress; import org.sleuthkit.autopsy.ingest.IngestJobContext; import org.sleuthkit.autopsy.ingest.IngestModule.IngestModuleException; import org.sleuthkit.autopsy.ingest.IngestServices; @@ -62,7 +63,8 @@ import org.xml.sax.SAXException; "cannotBuildXmlParser=Unable to build XML parser: ", "cannotLoadSEUQA=Unable to load Search Engine URL Query Analyzer settings file, SEUQAMappings.xml: ", "cannotParseXml=Unable to parse XML file: ", - "# {0} - file name", "SearchEngineURLQueryAnalyzer.init.exception.msg=Unable to find {0}." + "# {0} - file name", "SearchEngineURLQueryAnalyzer.init.exception.msg=Unable to find {0}.", + "Progress_Message_Find_Search_Query=Find Search Queries" }) class SearchEngineURLQueryAnalyzer extends Extract { @@ -396,9 +398,11 @@ class SearchEngineURLQueryAnalyzer extends Extract { } @Override - public void process(Content dataSource, IngestJobContext context) { + public void process(Content dataSource, IngestJobContext context, DataSourceIngestModuleProgress progressBar) { this.dataSource = dataSource; this.context = context; + + progressBar.progress(Bundle.Progress_Message_Find_Search_Query()); this.findSearchQueries(); logger.log(Level.INFO, "Search Engine stats: \n{0}", getTotals()); //NON-NLS } From a82bcc603e8b052b972ac345edd67119a502f0b1 Mon Sep 17 00:00:00 2001 From: Kelly Kelly Date: Tue, 5 Mar 2019 10:33:44 -0500 Subject: [PATCH 75/80] Modified code based on review comments --- .../autopsy/recentactivity/ExtractEdge.java | 31 ++++++------------- 1 file changed, 9 insertions(+), 22 deletions(-) diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractEdge.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractEdge.java index 9ea55287ff..76adb9a11a 100755 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractEdge.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractEdge.java @@ -38,6 +38,7 @@ import org.openide.util.NbBundle.Messages; import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; import org.sleuthkit.autopsy.coreutils.ExecUtil; +import org.sleuthkit.autopsy.coreutils.FileUtil; import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.coreutils.NetworkUtils; import org.sleuthkit.autopsy.coreutils.PlatformUtil; @@ -181,8 +182,8 @@ final class ExtractEdge extends Extract { } /** - * Dump the tables from WebCacheV01.dat and look for the data contained with - * in those files including downloads, cookies and history. + * Process WebCacheV01.dat ese database file creating artifacts for cookies, + * and history contained within. * * @param eseDumperPath Path to ESEDatabaseView.exe * @param webCacheFiles List of case WebCacheV01.dat files @@ -231,21 +232,14 @@ final class ExtractEdge extends Extract { } finally { tempWebCacheFile.delete(); - - // Emppty the result dir - File[] resultFiles = resultsDir.listFiles(); - for (File file : resultFiles) { - file.delete(); - } - - resultsDir.delete(); + FileUtil.deleteFileDir(resultsDir); } } } /** - * Creates a temp version of the database and runs the ESEDatabaseView tool - * to dump each of the database tables into a temporary folder. + * Process spartan.edb ese database file creating artifacts for the bookmarks + * contained within. * * @param eseDumperPath Path to ESEDatabaseViewer * @param spartanFiles List of the case spartan.edb files @@ -286,14 +280,7 @@ final class ExtractEdge extends Extract { } finally { tempSpartanFile.delete(); - - // Empty the result dir - File[] resultFiles = resultsDir.listFiles(); - for (File file : resultFiles) { - file.delete(); - } - - resultsDir.delete(); + FileUtil.deleteFileDir(resultsDir); } } } @@ -536,7 +523,7 @@ final class ExtractEdge extends Extract { private List fetchWebCacheDBFiles() throws TskCoreException { org.sleuthkit.autopsy.casemodule.services.FileManager fileManager = currentCase.getServices().getFileManager(); - return fileManager.findFiles(dataSource, EDGE_WEBCACHE_NAME, EDGE_WEBCACHE_FOLDER_NAME); //NON-NLS + return fileManager.findFiles(dataSource, EDGE_WEBCACHE_NAME, EDGE_WEBCACHE_FOLDER_NAME); } /** @@ -548,7 +535,7 @@ final class ExtractEdge extends Extract { private List fetchSpartanDBFiles() throws TskCoreException { org.sleuthkit.autopsy.casemodule.services.FileManager fileManager = currentCase.getServices().getFileManager(); - return fileManager.findFiles(dataSource, EDGE_SPARTAN_NAME, EDGE_SPARTAN_FOLDER_NAME); //NON-NLS + return fileManager.findFiles(dataSource, EDGE_SPARTAN_NAME, EDGE_SPARTAN_FOLDER_NAME); } /** From 449955bfb63804e3eaec91b60bf98dd30a2a1b69 Mon Sep 17 00:00:00 2001 From: Richard Cordovano Date: Tue, 5 Mar 2019 12:00:39 -0500 Subject: [PATCH 76/80] Update bundle MERGE files --- .../contentviewer/Bundle.properties-MERGED | 2 ++ .../contentviewers/Bundle.properties-MERGED | 5 +++-- .../autopsy/report/Bundle.properties-MERGED | 7 ++++++- .../autopsy/keywordsearch/Bundle.properties-MERGED | 2 +- .../recentactivity/Bundle.properties-MERGED | 14 ++++++++++++-- 5 files changed, 24 insertions(+), 6 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/contentviewer/Bundle.properties-MERGED b/Core/src/org/sleuthkit/autopsy/centralrepository/contentviewer/Bundle.properties-MERGED index 68904ecb93..3734fc5a3e 100755 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/contentviewer/Bundle.properties-MERGED +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/contentviewer/Bundle.properties-MERGED @@ -10,6 +10,7 @@ DataContentViewerOtherCases.correlatedArtifacts.failed=Failed to get frequency d DataContentViewerOtherCases.correlatedArtifacts.isEmpty=There are no files or artifacts to correlate. DataContentViewerOtherCases.correlatedArtifacts.title=Attribute Frequency DataContentViewerOtherCases.earliestCaseNotAvailable=\ Not Enabled. +DataContentViewerOtherCases.foundIn.text=Found %d instances in %d cases and %d data sources. DataContentViewerOtherCases.noOpenCase.errMsg=No open case available. DataContentViewerOtherCases.selectAllMenuItem.text=Select All DataContentViewerOtherCases.showCaseDetailsMenuItem.text=Show Case Details @@ -22,6 +23,7 @@ DataContentViewerOtherCases.showCommonalityMenuItem.text=Show Frequency DataContentViewerOtherCases.earliestCaseDate.text=Earliest Case Date DataContentViewerOtherCases.earliestCaseLabel.toolTipText= DataContentViewerOtherCases.earliestCaseLabel.text=Central Repository Starting Date: +DataContentViewerOtherCases.foundInLabel.text= DataContentViewerOtherCases.title=Other Occurrences DataContentViewerOtherCases.toolTip=Displays instances of the selected file/artifact from other occurrences. DataContentViewerOtherCasesTableModel.attribute=Matched Attribute diff --git a/Core/src/org/sleuthkit/autopsy/contentviewers/Bundle.properties-MERGED b/Core/src/org/sleuthkit/autopsy/contentviewers/Bundle.properties-MERGED index 6c3d0b1d5c..5cd40a900d 100755 --- a/Core/src/org/sleuthkit/autopsy/contentviewers/Bundle.properties-MERGED +++ b/Core/src/org/sleuthkit/autopsy/contentviewers/Bundle.properties-MERGED @@ -32,6 +32,8 @@ GstVideoPanel.progress.buffering=Buffering... GstVideoPanel.progressLabel.bufferingErr=Error buffering file GstVideoPanel.progress.infoLabel.updateErr=Error updating video progress: {0} GstVideoPanel.ExtractMedia.progress.buffering=Buffering {0} +HtmlPanel_showImagesToggleButton_hide=Hide Images +HtmlPanel_showImagesToggleButton_show=Show Images MediaFileViewer.AccessibleContext.accessibleDescription= MediaFileViewer.title=Media MediaFileViewer.toolTip=Displays supported multimedia files (images, videos, audio) @@ -44,8 +46,6 @@ MediaViewVideoPanel.infoLabel.text=info MediaViewImagePanel.imgFileTooLarge.msg=Could not load image file (too large): {0} MessageContentViewer.AtrachmentsPanel.title=Attachments -MessageContentViewer.showImagesToggleButton.hide.text=Hide Images -MessageContentViewer.showImagesToggleButton.text=Show Images MessageContentViewer.title=Message MessageContentViewer.toolTip=Displays messages. Metadata.nodeText.none=None @@ -140,6 +140,7 @@ MediaViewImagePanel.zoomResetButton.text=Reset MediaViewImagePanel.zoomTextField.text= MediaViewImagePanel.rotationTextField.text= MediaViewImagePanel.rotateLeftButton.toolTipText= +HtmlPanel.showImagesToggleButton.text=Show Images # {0} - tableName SQLiteViewer.readTable.errorText=Error getting rows for table: {0} # {0} - tableName diff --git a/Core/src/org/sleuthkit/autopsy/report/Bundle.properties-MERGED b/Core/src/org/sleuthkit/autopsy/report/Bundle.properties-MERGED index 6a7d5876d0..68a553dd27 100755 --- a/Core/src/org/sleuthkit/autopsy/report/Bundle.properties-MERGED +++ b/Core/src/org/sleuthkit/autopsy/report/Bundle.properties-MERGED @@ -1,5 +1,5 @@ # {0} - File name -CreatePortableCaseModule.addFilesToPortableCase.copyingFile=Copying file {0} +CreatePortableCaseModule.copyContentToPortableCase.copyingFile=Copying file {0} # {0} - case folder CreatePortableCaseModule.createCase.caseDirExists=Case folder {0} already exists CreatePortableCaseModule.createCase.errorCreatingCase=Error creating case @@ -7,11 +7,16 @@ CreatePortableCaseModule.createCase.errorCreatingCase=Error creating case CreatePortableCaseModule.createCase.errorCreatingFolder=Error creating folder {0} CreatePortableCaseModule.generateReport.caseClosed=Current case has been closed # {0} - tag name +CreatePortableCaseModule.generateReport.copyingArtifacts=Copying artifacts tagged as {0}... +# {0} - tag name CreatePortableCaseModule.generateReport.copyingFiles=Copying files tagged as {0}... CreatePortableCaseModule.generateReport.copyingTags=Copying tags... CreatePortableCaseModule.generateReport.creatingCase=Creating portable case database... +CreatePortableCaseModule.generateReport.errorCopyingArtifacts=Error copying tagged artifacts CreatePortableCaseModule.generateReport.errorCopyingFiles=Error copying tagged files CreatePortableCaseModule.generateReport.errorCopyingTags=Error copying tags +# {0} - attribute type name +CreatePortableCaseModule.generateReport.errorLookingUpAttrType=Error looking up attribute type {0} CreatePortableCaseModule.generateReport.noTagsSelected=No tags selected for export. # {0} - output folder CreatePortableCaseModule.generateReport.outputDirDoesNotExist=Output folder {0} does not exist diff --git a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/Bundle.properties-MERGED b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/Bundle.properties-MERGED index 61c0d2d2c7..f4febc1d7c 100755 --- a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/Bundle.properties-MERGED +++ b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/Bundle.properties-MERGED @@ -34,7 +34,7 @@ KeywordSearchIngestModule.startupMessage.failedToGetIndexSchema=Failed to get sc KeywordSearchResultFactory.createNodeForKey.noResultsFound.text=No results found. KeywordSearchResultFactory.query.exception.msg=Could not perform the query OpenIDE-Module-Display-Category=Ingest Module -OpenIDE-Module-Long-Description=Keyword Search ingest module.\n\nThe module indexes files found in the disk image at ingest time.\nIt then periodically runs the search on the indexed files using one or more keyword lists (containing pure words and/or regular expressions) and posts results.\n\nThe module also contains additional tools integrated in the main GUI, such as keyword list configuration, keyword seach bar in the top-right corner, extracted text viewer and search results viewer showing highlighted keywords found. +OpenIDE-Module-Long-Description=Keyword Search ingest module.\n\n\The module indexes files found in the disk image at ingest time.\n\It then periodically runs the search on the indexed files using one or more keyword lists (containing pure words and/or regular expressions) and posts results.\n\n\The module also contains additional tools integrated in the main GUI, such as keyword list configuration, keyword seach bar in the top-right corner, extracted text viewer and search results viewer showing highlighted keywords found. OpenIDE-Module-Name=KeywordSearch OptionsCategory_Name_KeywordSearchOptions=Keyword Search OptionsCategory_Keywords_KeywordSearchOptions=Keyword Search diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Bundle.properties-MERGED b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Bundle.properties-MERGED index 1e5595024a..58ea431f13 100755 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Bundle.properties-MERGED +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Bundle.properties-MERGED @@ -7,6 +7,12 @@ DataSourceUsageAnalyzer.customVolume.label=OS Drive ({0}) DataSourceUsageAnalyzer.parentModuleName=Recent Activity Extract.indexError.message=Failed to index artifact for keyword search. Extract.noOpenCase.errMsg=No open case available. +ExtractEdge_getHistory_containerFileNotFound=Error while trying to analyze Edge history +ExtractEdge_Module_Name=Microsoft Edge +ExtractEdge_process_errMsg_errGettingWebCacheFiles=Error trying to retrieving Edge WebCacheV01 file +ExtractEdge_process_errMsg_spartanFail=Failure processing Microsoft Edge spartan.edb file +ExtractEdge_process_errMsg_unableFindESEViewer=Unable to find ESEDatabaseViewer +ExtractEdge_process_errMsg_webcacheFail=Failure processing Microsoft Edge WebCacheV01.dat file ExtractOs.androidOs.label=Android ExtractOs.androidVolume.label=OS Drive (Android) ExtractOs.debianLinuxOs.label=Linux (Debian) @@ -37,8 +43,12 @@ ExtractOs.unitedLinuxVolume.label=OS Drive (Linux United Linux) ExtractOs.windowsVolume.label=OS Drive (Windows) ExtractOs.yellowDogLinuxOs.label=Linux (Yellow Dog) ExtractOs.yellowDogLinuxVolume.label=OS Drive (Linux Yellow Dog) +ExtractSafari_Error_Getting_History=An error occurred while processing Safari history files. +ExtractSafari_Error_Parsing_Bookmark=An error occured while processing Safari Bookmark files +ExtractSafari_Error_Parsing_Cookies=An error occured while processing Safari Cookies files +ExtractSafari_Module_Name=Safari OpenIDE-Module-Display-Category=Ingest Module -OpenIDE-Module-Long-Description=Recent Activity ingest module.\n\nThe module extracts useful information about the recent user activity on the disk image being ingested, such as:\n\n- Recently open documents,\n- Web acitivity (sites visited, stored cookies, bookmarked sites, search engine queries, file downloads),\n- Recently attached devices,\n- Installed programs.\n\nThe module currently supports Windows only disk images.\nThe plugin is also fully functional when deployed on Windows version of Autopsy. +OpenIDE-Module-Long-Description=Recent Activity ingest module.\n\n\The module extracts useful information about the recent user activity on the disk image being ingested, such as:\n\n- Recently open documents,\n- Web acitivity (sites visited, stored cookies, bookmarked sites, search engine queries, file downloads),\n- Recently attached devices,\n- Installed programs.\n\n\The module currently supports Windows only disk images.\n\The plugin is also fully functional when deployed on Windows version of Autopsy. OpenIDE-Module-Name=RecentActivity OpenIDE-Module-Short-Description=Recent Activity finder ingest module Chrome.moduleName=Chrome @@ -136,7 +146,7 @@ SearchEngineURLQueryAnalyzer.init.exception.msg=Unable to find {0}. SearchEngineURLQueryAnalyzer.moduleName.text=Search Engine SearchEngineURLQueryAnalyzer.engineName.none=NONE SearchEngineURLQueryAnalyzer.domainSubStr.none=NONE -SearchEngineURLQueryAnalyzer.toString=Name: {0}\nDomain Substring: {1}\ncount: {2}\nSplit Tokens: \n{3} +SearchEngineURLQueryAnalyzer.toString=Name: {0}\nDomain Substring: {1}\n\count: {2}\nSplit Tokens: \n{3} SearchEngineURLQueryAnalyzer.parentModuleName.noSpace=RecentActivity SearchEngineURLQueryAnalyzer.parentModuleName=Recent Activity UsbDeviceIdMapper.parseAndLookup.text=Product: {0} From 6b6f38d5502d8932e0d17d7147c208b2bcdd2c2b Mon Sep 17 00:00:00 2001 From: rcordovano Date: Tue, 5 Mar 2019 14:51:02 -0500 Subject: [PATCH 77/80] Fix cause of compile warning for DataContentViewerOtherCases.java --- .../contentviewer/DataContentViewerOtherCases.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/contentviewer/DataContentViewerOtherCases.java b/Core/src/org/sleuthkit/autopsy/centralrepository/contentviewer/DataContentViewerOtherCases.java index 4762da103a..308e14be63 100644 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/contentviewer/DataContentViewerOtherCases.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/contentviewer/DataContentViewerOtherCases.java @@ -380,7 +380,7 @@ public class DataContentViewerOtherCases extends JPanel implements DataContentVi * a case to be unique. We should improve this in the future. */ Set cases = new HashSet<>(); - Map devices = new HashMap(); + Map devices = new HashMap<>(); for (int i=0; i < model.getRowCount(); i++) { String caseName = (String) model.getValueAt(i, caseColumnIndex); From 2b739fe3135cc64733cf842d36815894dde75d31 Mon Sep 17 00:00:00 2001 From: Richard Cordovano Date: Tue, 5 Mar 2019 15:14:26 -0500 Subject: [PATCH 78/80] Bundle.properties-MERGE file fixes --- .../sleuthkit/autopsy/keywordsearch/Bundle.properties-MERGED | 2 +- .../sleuthkit/autopsy/recentactivity/Bundle.properties-MERGED | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/Bundle.properties-MERGED b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/Bundle.properties-MERGED index f4febc1d7c..61c0d2d2c7 100755 --- a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/Bundle.properties-MERGED +++ b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/Bundle.properties-MERGED @@ -34,7 +34,7 @@ KeywordSearchIngestModule.startupMessage.failedToGetIndexSchema=Failed to get sc KeywordSearchResultFactory.createNodeForKey.noResultsFound.text=No results found. KeywordSearchResultFactory.query.exception.msg=Could not perform the query OpenIDE-Module-Display-Category=Ingest Module -OpenIDE-Module-Long-Description=Keyword Search ingest module.\n\n\The module indexes files found in the disk image at ingest time.\n\It then periodically runs the search on the indexed files using one or more keyword lists (containing pure words and/or regular expressions) and posts results.\n\n\The module also contains additional tools integrated in the main GUI, such as keyword list configuration, keyword seach bar in the top-right corner, extracted text viewer and search results viewer showing highlighted keywords found. +OpenIDE-Module-Long-Description=Keyword Search ingest module.\n\nThe module indexes files found in the disk image at ingest time.\nIt then periodically runs the search on the indexed files using one or more keyword lists (containing pure words and/or regular expressions) and posts results.\n\nThe module also contains additional tools integrated in the main GUI, such as keyword list configuration, keyword seach bar in the top-right corner, extracted text viewer and search results viewer showing highlighted keywords found. OpenIDE-Module-Name=KeywordSearch OptionsCategory_Name_KeywordSearchOptions=Keyword Search OptionsCategory_Keywords_KeywordSearchOptions=Keyword Search diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Bundle.properties-MERGED b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Bundle.properties-MERGED index 58ea431f13..5b275de006 100755 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Bundle.properties-MERGED +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Bundle.properties-MERGED @@ -48,7 +48,7 @@ ExtractSafari_Error_Parsing_Bookmark=An error occured while processing Safari Bo ExtractSafari_Error_Parsing_Cookies=An error occured while processing Safari Cookies files ExtractSafari_Module_Name=Safari OpenIDE-Module-Display-Category=Ingest Module -OpenIDE-Module-Long-Description=Recent Activity ingest module.\n\n\The module extracts useful information about the recent user activity on the disk image being ingested, such as:\n\n- Recently open documents,\n- Web acitivity (sites visited, stored cookies, bookmarked sites, search engine queries, file downloads),\n- Recently attached devices,\n- Installed programs.\n\n\The module currently supports Windows only disk images.\n\The plugin is also fully functional when deployed on Windows version of Autopsy. +OpenIDE-Module-Long-Description=Recent Activity ingest module.\n\nThe module extracts useful information about the recent user activity on the disk image being ingested, such as:\n\n- Recently open documents,\n- Web acitivity (sites visited, stored cookies, bookmarked sites, search engine queries, file downloads),\n- Recently attached devices,\n- Installed programs.\n\nThe module currently supports Windows only disk images.\nThe plugin is also fully functional when deployed on Windows version of Autopsy. OpenIDE-Module-Name=RecentActivity OpenIDE-Module-Short-Description=Recent Activity finder ingest module Chrome.moduleName=Chrome @@ -146,7 +146,7 @@ SearchEngineURLQueryAnalyzer.init.exception.msg=Unable to find {0}. SearchEngineURLQueryAnalyzer.moduleName.text=Search Engine SearchEngineURLQueryAnalyzer.engineName.none=NONE SearchEngineURLQueryAnalyzer.domainSubStr.none=NONE -SearchEngineURLQueryAnalyzer.toString=Name: {0}\nDomain Substring: {1}\n\count: {2}\nSplit Tokens: \n{3} +SearchEngineURLQueryAnalyzer.toString=Name: {0}\nDomain Substring: {1}\ncount: {2}\nSplit Tokens: \n{3} SearchEngineURLQueryAnalyzer.parentModuleName.noSpace=RecentActivity SearchEngineURLQueryAnalyzer.parentModuleName=Recent Activity UsbDeviceIdMapper.parseAndLookup.text=Product: {0} From 706005772fe99075d1c32ab0e707798649d1bbea Mon Sep 17 00:00:00 2001 From: Kelly Kelly Date: Tue, 5 Mar 2019 16:12:58 -0500 Subject: [PATCH 79/80] removed commented out line from ivy.xml --- .../sleuthkit/autopsy/keywordsearch/Bundle.properties-MERGED | 2 +- RecentActivity/ivy.xml | 3 --- .../sleuthkit/autopsy/recentactivity/Bundle.properties-MERGED | 4 ++-- 3 files changed, 3 insertions(+), 6 deletions(-) diff --git a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/Bundle.properties-MERGED b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/Bundle.properties-MERGED index 61c0d2d2c7..f4febc1d7c 100755 --- a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/Bundle.properties-MERGED +++ b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/Bundle.properties-MERGED @@ -34,7 +34,7 @@ KeywordSearchIngestModule.startupMessage.failedToGetIndexSchema=Failed to get sc KeywordSearchResultFactory.createNodeForKey.noResultsFound.text=No results found. KeywordSearchResultFactory.query.exception.msg=Could not perform the query OpenIDE-Module-Display-Category=Ingest Module -OpenIDE-Module-Long-Description=Keyword Search ingest module.\n\nThe module indexes files found in the disk image at ingest time.\nIt then periodically runs the search on the indexed files using one or more keyword lists (containing pure words and/or regular expressions) and posts results.\n\nThe module also contains additional tools integrated in the main GUI, such as keyword list configuration, keyword seach bar in the top-right corner, extracted text viewer and search results viewer showing highlighted keywords found. +OpenIDE-Module-Long-Description=Keyword Search ingest module.\n\n\The module indexes files found in the disk image at ingest time.\n\It then periodically runs the search on the indexed files using one or more keyword lists (containing pure words and/or regular expressions) and posts results.\n\n\The module also contains additional tools integrated in the main GUI, such as keyword list configuration, keyword seach bar in the top-right corner, extracted text viewer and search results viewer showing highlighted keywords found. OpenIDE-Module-Name=KeywordSearch OptionsCategory_Name_KeywordSearchOptions=Keyword Search OptionsCategory_Keywords_KeywordSearchOptions=Keyword Search diff --git a/RecentActivity/ivy.xml b/RecentActivity/ivy.xml index 26621b0320..290c8371ea 100644 --- a/RecentActivity/ivy.xml +++ b/RecentActivity/ivy.xml @@ -6,7 +6,4 @@ - - - diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Bundle.properties-MERGED b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Bundle.properties-MERGED index 5b275de006..58ea431f13 100755 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Bundle.properties-MERGED +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Bundle.properties-MERGED @@ -48,7 +48,7 @@ ExtractSafari_Error_Parsing_Bookmark=An error occured while processing Safari Bo ExtractSafari_Error_Parsing_Cookies=An error occured while processing Safari Cookies files ExtractSafari_Module_Name=Safari OpenIDE-Module-Display-Category=Ingest Module -OpenIDE-Module-Long-Description=Recent Activity ingest module.\n\nThe module extracts useful information about the recent user activity on the disk image being ingested, such as:\n\n- Recently open documents,\n- Web acitivity (sites visited, stored cookies, bookmarked sites, search engine queries, file downloads),\n- Recently attached devices,\n- Installed programs.\n\nThe module currently supports Windows only disk images.\nThe plugin is also fully functional when deployed on Windows version of Autopsy. +OpenIDE-Module-Long-Description=Recent Activity ingest module.\n\n\The module extracts useful information about the recent user activity on the disk image being ingested, such as:\n\n- Recently open documents,\n- Web acitivity (sites visited, stored cookies, bookmarked sites, search engine queries, file downloads),\n- Recently attached devices,\n- Installed programs.\n\n\The module currently supports Windows only disk images.\n\The plugin is also fully functional when deployed on Windows version of Autopsy. OpenIDE-Module-Name=RecentActivity OpenIDE-Module-Short-Description=Recent Activity finder ingest module Chrome.moduleName=Chrome @@ -146,7 +146,7 @@ SearchEngineURLQueryAnalyzer.init.exception.msg=Unable to find {0}. SearchEngineURLQueryAnalyzer.moduleName.text=Search Engine SearchEngineURLQueryAnalyzer.engineName.none=NONE SearchEngineURLQueryAnalyzer.domainSubStr.none=NONE -SearchEngineURLQueryAnalyzer.toString=Name: {0}\nDomain Substring: {1}\ncount: {2}\nSplit Tokens: \n{3} +SearchEngineURLQueryAnalyzer.toString=Name: {0}\nDomain Substring: {1}\n\count: {2}\nSplit Tokens: \n{3} SearchEngineURLQueryAnalyzer.parentModuleName.noSpace=RecentActivity SearchEngineURLQueryAnalyzer.parentModuleName=Recent Activity UsbDeviceIdMapper.parseAndLookup.text=Product: {0} From ac754393e23c7e49c7dd6a1e5c8c7e26d25f1724 Mon Sep 17 00:00:00 2001 From: Richard Cordovano Date: Tue, 5 Mar 2019 16:33:46 -0500 Subject: [PATCH 80/80] Fix MERGED bundle for KWS, update MERGED bundle for RA --- .../autopsy/keywordsearch/Bundle.properties | 2 +- .../keywordsearch/Bundle.properties-MERGED | 2 +- .../recentactivity/Bundle.properties-MERGED | 32 +++++++++++++++++++ 3 files changed, 34 insertions(+), 2 deletions(-) diff --git a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/Bundle.properties b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/Bundle.properties index 9d82d32924..67e6496584 100644 --- a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/Bundle.properties +++ b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/Bundle.properties @@ -1,5 +1,5 @@ OpenIDE-Module-Display-Category=Ingest Module -OpenIDE-Module-Long-Description=Keyword Search ingest module.\n\n\The module indexes files found in the disk image at ingest time.\n\It then periodically runs the search on the indexed files using one or more keyword lists (containing pure words and/or regular expressions) and posts results.\n\n\The module also contains additional tools integrated in the main GUI, such as keyword list configuration, keyword seach bar in the top-right corner, extracted text viewer and search results viewer showing highlighted keywords found. +OpenIDE-Module-Long-Description=Keyword Search ingest module.\n\nThe module indexes files found in the disk image at ingest time.\nIt then periodically runs the search on the indexed files using one or more keyword lists (containing pure words and/or regular expressions) and posts results.\n\n\The module also contains additional tools integrated in the main GUI, such as keyword list configuration, keyword search bar in the top-right corner, extracted text viewer and search results viewer showing highlighted keywords found. OpenIDE-Module-Name=KeywordSearch OptionsCategory_Name_KeywordSearchOptions=Keyword Search OptionsCategory_Keywords_KeywordSearchOptions=Keyword Search diff --git a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/Bundle.properties-MERGED b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/Bundle.properties-MERGED index f4febc1d7c..034e4355d3 100755 --- a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/Bundle.properties-MERGED +++ b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/Bundle.properties-MERGED @@ -34,7 +34,7 @@ KeywordSearchIngestModule.startupMessage.failedToGetIndexSchema=Failed to get sc KeywordSearchResultFactory.createNodeForKey.noResultsFound.text=No results found. KeywordSearchResultFactory.query.exception.msg=Could not perform the query OpenIDE-Module-Display-Category=Ingest Module -OpenIDE-Module-Long-Description=Keyword Search ingest module.\n\n\The module indexes files found in the disk image at ingest time.\n\It then periodically runs the search on the indexed files using one or more keyword lists (containing pure words and/or regular expressions) and posts results.\n\n\The module also contains additional tools integrated in the main GUI, such as keyword list configuration, keyword seach bar in the top-right corner, extracted text viewer and search results viewer showing highlighted keywords found. +OpenIDE-Module-Long-Description=Keyword Search ingest module.\n\nThe module indexes files found in the disk image at ingest time.\nIt then periodically runs the search on the indexed files using one or more keyword lists (containing pure words and/or regular expressions) and posts results.\n\n\The module also contains additional tools integrated in the main GUI, such as keyword list configuration, keyword search bar in the top-right corner, extracted text viewer and search results viewer showing highlighted keywords found. OpenIDE-Module-Name=KeywordSearch OptionsCategory_Name_KeywordSearchOptions=Keyword Search OptionsCategory_Keywords_KeywordSearchOptions=Keyword Search diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Bundle.properties-MERGED b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Bundle.properties-MERGED index 58ea431f13..937b659039 100755 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Bundle.properties-MERGED +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Bundle.properties-MERGED @@ -43,6 +43,7 @@ ExtractOs.unitedLinuxVolume.label=OS Drive (Linux United Linux) ExtractOs.windowsVolume.label=OS Drive (Windows) ExtractOs.yellowDogLinuxOs.label=Linux (Yellow Dog) ExtractOs.yellowDogLinuxVolume.label=OS Drive (Linux Yellow Dog) +ExtractOS_progressMessage=Checking for OS ExtractSafari_Error_Getting_History=An error occurred while processing Safari history files. ExtractSafari_Error_Parsing_Bookmark=An error occured while processing Safari Bookmark files ExtractSafari_Error_Parsing_Cookies=An error occured while processing Safari Cookies files @@ -119,6 +120,37 @@ Firefox.getDlPre24.errMsg.errParsingArtifacts={0}: Error parsing {1} Firefox web Firefox.getDlV24.errMsg.errFetchFiles=Error fetching 'downloads' files for Firefox. Firefox.getDlV24.errMsg.errAnalyzeFile={0}: Error while trying to analyze file:{1} Firefox.getDlV24.errMsg.errParsingArtifacts={0}: Error parsing {1} Firefox web download artifacts. +Progress_Message_Analyze_Registry=Analyzing Registry Files +Progress_Message_Analyze_Usage=Data Sources Usage Analysis +Progress_Message_Chrome_AutoFill=Chrome Auto Fill +Progress_Message_Chrome_Bookmarks=Chrome Bookmarks +Progress_Message_Chrome_Cookies=Chrome Cookies +Progress_Message_Chrome_Downloads=Chrome Downloads +Progress_Message_Chrome_FormHistory=Chrome Form History +Progress_Message_Chrome_History=Chrome History +Progress_Message_Chrome_Logins=Chrome Logins +Progress_Message_Edge_Bookmarks=Microsoft Edge Bookmarks +Progress_Message_Edge_Cookies=Microsoft Edge Cookies +Progress_Message_Edge_History=Microsoft Edge History +Progress_Message_Extract_Resent_Docs=Recent Documents +Progress_Message_Find_Search_Query=Find Search Queries +Progress_Message_Firefox_AutoFill=Firefox Auto Fill +Progress_Message_Firefox_Bookmarks=Firefox Bookmarks +Progress_Message_Firefox_Cookies=Firefox Cookies +Progress_Message_Firefox_Downloads=Firefox Downloads +Progress_Message_Firefox_FormHistory=Firefox Form History +Progress_Message_Firefox_History=Firefox History +Progress_Message_IE_AutoFill=IE Auto Fill +Progress_Message_IE_Bookmarks=IE Bookmarks +Progress_Message_IE_Cookies=IE Cookies +Progress_Message_IE_Downloads=IE Downloads +Progress_Message_IE_FormHistory=IE Form History +Progress_Message_IE_History=IE History +Progress_Message_IE_Logins=IE Logins +Progress_Message_Safari_Bookmarks=Safari Bookmarks +Progress_Message_Safari_Cookies=Safari Cookies +Progress_Message_Safari_Downloads=Safari Downloads +Progress_Message_Safari_History=Safari History RAImageIngestModule.process.started=Started {0} RAImageIngestModule.process.errModFailed={0} failed - see log for details
RAImageIngestModule.process.errModErrs={0} had errors -- see log