Merge branch 'develop' of https://github.com/sleuthkit/autopsy into develop

This commit is contained in:
raman-bt 2014-02-10 14:32:19 -05:00
commit e747898d69
7 changed files with 168 additions and 79 deletions

View File

@ -23,6 +23,8 @@ import java.awt.Component;
import org.openide.nodes.Node;
import org.openide.util.lookup.ServiceProvider;
import org.sleuthkit.autopsy.corecomponentinterfaces.DataContentViewer;
import org.sleuthkit.autopsy.datamodel.AbstractAbstractFileNode;
import org.sleuthkit.autopsy.datamodel.ContentUtils;
import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.TskCoreException;
import org.sleuthkit.datamodel.TskData.TSK_DB_FILES_TYPE_ENUM;
@ -136,11 +138,10 @@ public class Metadata extends javax.swing.JPanel implements DataContentViewer
addRow(sb, "Size", new Long(file.getSize()).toString() );
addRow(sb, "File Name Allocation", file.getDirFlagAsString());
addRow(sb, "Metadata Allocation", file.getMetaFlagsAsString());
addRow(sb, "Modified", file.getMtimeAsDate());
addRow(sb, "Accessed", file.getAtimeAsDate());
addRow(sb, "Created", file.getCrtimeAsDate());
addRow(sb, "Changed", file.getCtimeAsDate());
addRow(sb, "Modified", ContentUtils.getStringTime(file.getMtime(), file));
addRow(sb, "Accessed", ContentUtils.getStringTime(file.getAtime(), file));
addRow(sb, "Created", ContentUtils.getStringTime(file.getCrtime(), file));
addRow(sb, "Changed", ContentUtils.getStringTime(file.getCtime(), file));
String md5 = file.getMd5Hash();
if (md5 == null) {

View File

@ -256,7 +256,7 @@ public class ExplorerNodeActionVisitor extends ContentVisitor.Default<List<? ext
imgDetailPanel.setImgTypeValue(img.getType().getName());
imgDetailPanel.setImgSectorSizeValue(Long.toString(img.getSsize()));
imgDetailPanel.setImgTotalSizeValue(Long.toString(img.getSize()));
String hash = img.getHash();
String hash=img.getMd5();
// don't show the hash if there isn't one
imgDetailPanel.setVisibleHashInfo(hash != null);
imgDetailPanel.setImgHashValue(hash);

View File

@ -100,7 +100,6 @@ class ExtractIE extends Extract {
dataFound = false;
this.getBookmark(dataSource, controller);
this.getCookie(dataSource, controller);
this.getRecentDocuments(dataSource, controller);
this.getHistory(dataSource, controller);
}
@ -243,62 +242,6 @@ class ExtractIE extends Extract {
services.fireModuleDataEvent(new ModuleDataEvent("Recent Activity", BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_COOKIE));
}
/**
* Find the documents that Windows stores about recent documents and make artifacts.
* @param dataSource
* @param controller
*/
private void getRecentDocuments(Content dataSource, IngestDataSourceWorkerController controller) {
org.sleuthkit.autopsy.casemodule.services.FileManager fileManager = currentCase.getServices().getFileManager();
List<AbstractFile> recentFiles = null;
try {
recentFiles = fileManager.findFiles(dataSource, "%.lnk", "Recent");
} catch (TskCoreException ex) {
logger.log(Level.WARNING, "Error searching for .lnk files.");
this.addErrorMessage(this.getName() + ": Error getting lnk Files.");
return;
}
if (recentFiles.isEmpty()) {
logger.log(Level.INFO, "Didn't find any IE recent files.");
return;
}
dataFound = true;
for (AbstractFile recentFile : recentFiles) {
if (controller.isCancelled()) {
break;
}
if (recentFile.getSize() == 0) {
continue;
}
JLNK lnk = null;
JLnkParser lnkParser = new JLnkParser(new ReadContentInputStream(recentFile), (int) recentFile.getSize());
try {
lnk = lnkParser.parse();
} catch (JLnkParserException e) {
//TODO should throw a specific checked exception
boolean unalloc = recentFile.isMetaFlagSet(TskData.TSK_FS_META_FLAG_ENUM.UNALLOC)
|| recentFile.isDirNameFlagSet(TskData.TSK_FS_NAME_FLAG_ENUM.UNALLOC);
if (unalloc == false) {
logger.log(Level.SEVERE, "Error lnk parsing the file to get recent files" + recentFile, e);
this.addErrorMessage(this.getName() + ": Error parsing Recent File " + recentFile.getName());
}
continue;
}
Collection<BlackboardAttribute> bbattributes = new ArrayList<BlackboardAttribute>();
String path = lnk.getBestPath();
bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PATH.getTypeID(), "RecentActivity", path));
bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PATH_ID.getTypeID(), "RecentActivity", Util.findID(dataSource, path)));
bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME.getTypeID(), "RecentActivity", recentFile.getCrtime()));
this.addArtifact(ARTIFACT_TYPE.TSK_RECENT_OBJECT, recentFile, bbattributes);
}
services.fireModuleDataEvent(new ModuleDataEvent("Recent Activity", BlackboardArtifact.ARTIFACT_TYPE.TSK_RECENT_OBJECT));
}
/**
* Locates index.dat files, runs Pasco on them, and creates artifacts.
* @param dataSource

View File

@ -158,6 +158,7 @@ public final class RAImageIngestModule extends IngestModuleDataSource {
final Extract registry = new ExtractRegistry();
final Extract iexplore = new ExtractIE();
final Extract recentDocuments= new RecentDocumentsByLnk();
final Extract chrome = new Chrome();
final Extract firefox = new Firefox();
final Extract SEUQA = new SearchEngineURLQueryAnalyzer();
@ -165,6 +166,7 @@ public final class RAImageIngestModule extends IngestModuleDataSource {
modules.add(chrome);
modules.add(firefox);
modules.add(iexplore);
modules.add(recentDocuments);
// this needs to run after the web browser modules
modules.add(SEUQA);

View File

@ -0,0 +1,147 @@
/*
*
* Autopsy Forensic Browser
*
* Copyright 2012-2013 Basis Technology Corp.
*
* Copyright 2012 42six Solutions.
* Contact: aebadirad <at> 42six <dot> com
* Project Contact/Architect: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.recentactivity;
// imports
import java.util.ArrayList;
import java.util.List;
import java.util.logging.Level;
import org.sleuthkit.autopsy.coreutils.Logger;
import java.util.Collection;
import org.sleuthkit.autopsy.coreutils.JLNK;
import org.sleuthkit.autopsy.coreutils.JLnkParser;
import org.sleuthkit.autopsy.coreutils.JLnkParserException;
import org.sleuthkit.autopsy.ingest.IngestDataSourceWorkerController;
import org.sleuthkit.autopsy.ingest.IngestServices;
import org.sleuthkit.autopsy.ingest.ModuleDataEvent;
import org.sleuthkit.datamodel.BlackboardArtifact;
import org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE;
import org.sleuthkit.datamodel.BlackboardAttribute;
import org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE;
import org.sleuthkit.datamodel.Content;
import org.sleuthkit.autopsy.ingest.PipelineContext;
import org.sleuthkit.autopsy.ingest.IngestModuleDataSource;
import org.sleuthkit.autopsy.ingest.IngestModuleInit;
import org.sleuthkit.datamodel.*;
/**
* Recent documents class that will extract recent documents in the form of
*.lnk files
*/
class RecentDocumentsByLnk extends Extract {
private static final Logger logger = Logger.getLogger(RecentDocumentsByLnk.class.getName());
private IngestServices services;
final private static String MODULE_VERSION = "1.0";
/**
* Find the documents that Windows stores about recent documents and make artifacts.
* @param dataSource
* @param controller
*/
private void getRecentDocuments(Content dataSource, IngestDataSourceWorkerController controller) {
org.sleuthkit.autopsy.casemodule.services.FileManager fileManager = currentCase.getServices().getFileManager();
List<AbstractFile> recentFiles = null;
try {
recentFiles = fileManager.findFiles(dataSource, "%.lnk", "Recent");
} catch (TskCoreException ex) {
logger.log(Level.WARNING, "Error searching for .lnk files.");
this.addErrorMessage(this.getName() + ": Error getting lnk Files.");
return;
}
if (recentFiles.isEmpty()) {
logger.log(Level.INFO, "Didn't find any recent files.");
return;
}
dataFound = true;
for (AbstractFile recentFile : recentFiles) {
if (controller.isCancelled()) {
break;
}
if (recentFile.getSize() == 0) {
continue;
}
JLNK lnk = null;
JLnkParser lnkParser = new JLnkParser(new ReadContentInputStream(recentFile), (int) recentFile.getSize());
try {
lnk = lnkParser.parse();
} catch (JLnkParserException e) {
//TODO should throw a specific checked exception
boolean unalloc = recentFile.isMetaFlagSet(TskData.TSK_FS_META_FLAG_ENUM.UNALLOC)
|| recentFile.isDirNameFlagSet(TskData.TSK_FS_NAME_FLAG_ENUM.UNALLOC);
if (unalloc == false) {
logger.log(Level.SEVERE, "Error lnk parsing the file to get recent files" + recentFile, e);
this.addErrorMessage(this.getName() + ": Error parsing Recent File " + recentFile.getName());
}
continue;
}
Collection<BlackboardAttribute> bbattributes = new ArrayList<BlackboardAttribute>();
String path = lnk.getBestPath();
bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PATH.getTypeID(), "RecentActivity", path));
bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PATH_ID.getTypeID(), "RecentActivity", Util.findID(dataSource, path)));
bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME.getTypeID(), "RecentActivity", recentFile.getCrtime()));
this.addArtifact(ARTIFACT_TYPE.TSK_RECENT_OBJECT, recentFile, bbattributes);
}
services.fireModuleDataEvent(new ModuleDataEvent("Recent Activity", BlackboardArtifact.ARTIFACT_TYPE.TSK_RECENT_OBJECT));
}
@Override
public String getVersion() {
return MODULE_VERSION;
}
@Override
public void process(PipelineContext<IngestModuleDataSource>pipelineContext, Content dataSource, IngestDataSourceWorkerController controller) {
dataFound = false;
this.getRecentDocuments(dataSource, controller);
}
@Override
public void init(IngestModuleInit initContext) {
services = IngestServices.getDefault();
}
@Override
public void complete() {
}
@Override
public void stop() {
//call regular cleanup from complete() method
complete();
}
@Override
public String getDescription() {
return "Extracts recent documents in windows.";
}
@Override
public boolean hasBackgroundJobsRunning() {
return false;
}
}

View File

@ -88,19 +88,15 @@ public class EwfVerifyIngestModule extends IngestModuleDataSource {
skipped = true;
return;
}
// Get the hash stored in the E01 file from the database
if (skCase.imageHasHash(img)) {
try {
storedHash = skCase.getImageHash(img).toLowerCase();
if ((img.getMd5()!= null) && !img.getMd5().isEmpty())
{
storedHash = img.getMd5().toLowerCase();
logger.info("Hash value stored in " + imgName + ": " + storedHash);
} catch (TskCoreException ex) {
logger.log(Level.SEVERE, "Failed to get stored hash from image " + imgName, ex);
services.postMessage(IngestMessage.createMessage(++messageId, MessageType.ERROR, this,
"Error retrieving stored hash value from " + imgName));
return;
}
} else {
}
else {
services.postMessage(IngestMessage.createMessage(++messageId, MessageType.ERROR, this,
"Image " + imgName + " does not have stored hash."));
return;

View File

@ -47,7 +47,7 @@ def compile(errore, attachli, parsedin, branch):
antBuild("datamodel", False)
print("DataModel")
if(passed):
antBuild("autopsy", True)
antBuild("autopsy", True, branch)
print("Aut")
if(passed):
redo = False
@ -131,7 +131,7 @@ def vsBuild():
redo = True
#Builds Autopsy or the Datamodel
def antBuild(which, Build):
def antBuild(which, Build, branch):
print("building: ", which)
global redo
global passed
@ -165,14 +165,14 @@ def antBuild(which, Build):
open(chk)
except IOError as e:
if(not tryredo):
errorem += "DataModel Java build failed.\n"
errorem += "DataModel Java build failed. on branch " + branch + "\n"
attachl.append(antpth)
if email_enabled:
Emailer.send_email(to, server, subj, errorem, attachl)
passed = False
tryredo = True
elif (succd != 0 and (not tryredo)):
errorem += "Autopsy build failed.\n"
errorem += "Autopsy build failed on branch " + branch + ".\n"
attachl.append(antpth)
Emailer.send_email(to, server, subj, errorem, attachl)
tryredo = True