Merge branch 'develop' of https://github.com/sleuthkit/autopsy into timeline2

This commit is contained in:
U-BASIS\tshahi 2014-02-05 09:54:57 -05:00
commit 30cc43bcc4
8 changed files with 45 additions and 96 deletions

View File

@ -1,7 +1,7 @@
/* /*
* Autopsy Forensic Browser * Autopsy Forensic Browser
* *
* Copyright 2013 Basis Technology Corp. * Copyright 2013-2014 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org * Contact: carrier <at> sleuthkit <dot> org
* *
* Licensed under the Apache License, Version 2.0 (the "License"); * Licensed under the Apache License, Version 2.0 (the "License");
@ -20,12 +20,12 @@
package org.sleuthkit.autopsy.contentviewers; package org.sleuthkit.autopsy.contentviewers;
import java.awt.Component; import java.awt.Component;
import javax.swing.JTextPane;
import org.openide.nodes.Node; import org.openide.nodes.Node;
import org.openide.util.lookup.ServiceProvider; import org.openide.util.lookup.ServiceProvider;
import org.sleuthkit.autopsy.corecomponentinterfaces.DataContentViewer; import org.sleuthkit.autopsy.corecomponentinterfaces.DataContentViewer;
import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.TskCoreException; import org.sleuthkit.datamodel.TskCoreException;
import org.sleuthkit.datamodel.TskData.TSK_DB_FILES_TYPE_ENUM;
/** /**
* Shows file metadata as a list to make it easy to copy and paste. * Shows file metadata as a list to make it easy to copy and paste.
@ -133,6 +133,10 @@ public class Metadata extends javax.swing.JPanel implements DataContentViewer
addRow(sb, "Name", file.getParentPath() + "/" + file.getName()); addRow(sb, "Name", file.getParentPath() + "/" + file.getName());
} }
addRow(sb, "Size", new Long(file.getSize()).toString() );
addRow(sb, "File Name Allocation", file.getDirFlagAsString());
addRow(sb, "Metadata Allocation", file.getMetaFlagsAsString());
addRow(sb, "Modified", file.getMtimeAsDate()); addRow(sb, "Modified", file.getMtimeAsDate());
addRow(sb, "Accessed", file.getAtimeAsDate()); addRow(sb, "Accessed", file.getAtimeAsDate());
addRow(sb, "Created", file.getCrtimeAsDate()); addRow(sb, "Created", file.getCrtimeAsDate());
@ -143,8 +147,12 @@ public class Metadata extends javax.swing.JPanel implements DataContentViewer
md5 = "Not calculated"; md5 = "Not calculated";
} }
addRow(sb, "MD5", md5); addRow(sb, "MD5", md5);
addRow(sb, "Hash Lookup Results", file.getKnown().toString());
addRow(sb, "Internal ID", new Long(file.getId()).toString()); addRow(sb, "Internal ID", new Long(file.getId()).toString());
if (file.getType().compareTo(TSK_DB_FILES_TYPE_ENUM.LOCAL) == 0) {
addRow(sb, "Local Path", file.getLocalAbsPath());
}
endTable(sb); endTable(sb);
setText(sb.toString()); setText(sb.toString());

View File

@ -101,7 +101,12 @@ public class FileExtMismatchIngestModule extends org.sleuthkit.autopsy.ingest.In
// skip non-files // skip non-files
if ((abstractFile.getType() == TskData.TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS) || if ((abstractFile.getType() == TskData.TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS) ||
(abstractFile.getType() == TskData.TSK_DB_FILES_TYPE_ENUM.UNUSED_BLOCKS)) { (abstractFile.getType() == TskData.TSK_DB_FILES_TYPE_ENUM.UNUSED_BLOCKS)) {
return ProcessResult.OK;
}
// deleted files often have content that was not theirs and therefor causes mismatch
if ((abstractFile.isMetaFlagSet(TskData.TSK_FS_META_FLAG_ENUM.UNALLOC)) ||
(abstractFile.isDirNameFlagSet(TskData.TSK_FS_NAME_FLAG_ENUM.UNALLOC))) {
return ProcessResult.OK; return ProcessResult.OK;
} }

View File

@ -1,7 +1,7 @@
/* /*
* Autopsy Forensic Browser * Autopsy Forensic Browser
* *
* Copyright 2012 Basis Technology Corp. * Copyright 2012-2013 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org * Contact: carrier <at> sleuthkit <dot> org
* *
* Licensed under the Apache License, Version 2.0 (the "License"); * Licensed under the Apache License, Version 2.0 (the "License");
@ -59,10 +59,8 @@ import org.sleuthkit.datamodel.ReadContentInputStream;
"text/javascript" //"application/xml", "text/javascript" //"application/xml",
//"application/xml-dtd", //"application/xml-dtd",
); );
private final TikaLanguageIdentifier tikaLanguageIdentifier;
AbstractFileHtmlExtract() { AbstractFileHtmlExtract() {
tikaLanguageIdentifier = new TikaLanguageIdentifier();
this.module = KeywordSearchIngestModule.getDefault(); this.module = KeywordSearchIngestModule.getDefault();
ingester = Server.getIngester(); ingester = Server.getIngester();
} }
@ -166,11 +164,6 @@ import org.sleuthkit.datamodel.ReadContentInputStream;
totalRead = 0; totalRead = 0;
extracted = sb.toString(); extracted = sb.toString();
//attempt to identify language of extracted text and post it to the blackboard
tikaLanguageIdentifier.addLanguageToBlackBoard(extracted, sourceFile);
//converts BOM automatically to charSet encoding //converts BOM automatically to charSet encoding
byte[] encodedBytes = extracted.getBytes(outCharset); byte[] encodedBytes = extracted.getBytes(outCharset);
AbstractFileChunk chunk = new AbstractFileChunk(this, this.numChunks + 1); AbstractFileChunk chunk = new AbstractFileChunk(this, this.numChunks + 1);

View File

@ -1,7 +1,7 @@
/* /*
* Autopsy Forensic Browser * Autopsy Forensic Browser
* *
* Copyright 2012 Basis Technology Corp. * Copyright 2012-2013 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org * Contact: carrier <at> sleuthkit <dot> org
* *
* Licensed under the Apache License, Version 2.0 (the "License"); * Licensed under the Apache License, Version 2.0 (the "License");
@ -39,16 +39,11 @@ import org.sleuthkit.autopsy.ingest.IngestModuleAbstractFile;
import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.ReadContentInputStream; import org.sleuthkit.datamodel.ReadContentInputStream;
import org.apache.tika.Tika; import org.apache.tika.Tika;
import org.apache.tika.language.LanguageIdentifier;
import org.apache.tika.metadata.Metadata; import org.apache.tika.metadata.Metadata;
import org.apache.tika.mime.MediaType; import org.apache.tika.mime.MediaType;
import org.apache.tika.parser.ParseContext; import org.apache.tika.parser.ParseContext;
import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.coreutils.StringExtract; import org.sleuthkit.autopsy.coreutils.StringExtract;
import org.sleuthkit.autopsy.keywordsearch.Ingester.IngesterException; import org.sleuthkit.autopsy.keywordsearch.Ingester.IngesterException;
import org.sleuthkit.datamodel.BlackboardArtifact;
import org.sleuthkit.datamodel.BlackboardAttribute;
import org.sleuthkit.datamodel.TskCoreException;
/** /**
* Extractor of text from TIKA supported AbstractFile content. Extracted text is * Extractor of text from TIKA supported AbstractFile content. Extracted text is
@ -75,11 +70,9 @@ class AbstractFileTikaTextExtract implements AbstractFileExtract {
private int numChunks = 0; private int numChunks = 0;
//private static final String UTF16BOM = "\uFEFF"; disabled prepending of BOM //private static final String UTF16BOM = "\uFEFF"; disabled prepending of BOM
private final ExecutorService tikaParseExecutor = Executors.newSingleThreadExecutor(); private final ExecutorService tikaParseExecutor = Executors.newSingleThreadExecutor();
private final List<String> TIKA_SUPPORTED_TYPES = new ArrayList<String>(); private final List<String> TIKA_SUPPORTED_TYPES = new ArrayList<>();
private final TikaLanguageIdentifier tikaLanguageIdentifier;
AbstractFileTikaTextExtract() { AbstractFileTikaTextExtract() {
tikaLanguageIdentifier = new TikaLanguageIdentifier();
this.module = KeywordSearchIngestModule.getDefault(); this.module = KeywordSearchIngestModule.getDefault();
ingester = Server.getIngester(); ingester = Server.getIngester();
@ -87,7 +80,7 @@ class AbstractFileTikaTextExtract implements AbstractFileExtract {
for (MediaType mt : mediaTypes) { for (MediaType mt : mediaTypes) {
TIKA_SUPPORTED_TYPES.add(mt.getType() + "/" + mt.getSubtype()); TIKA_SUPPORTED_TYPES.add(mt.getType() + "/" + mt.getSubtype());
} }
logger.log(Level.INFO, "Tika supported media types: " + TIKA_SUPPORTED_TYPES); logger.log(Level.INFO, "Tika supported media types: {0}", TIKA_SUPPORTED_TYPES);
} }
@ -138,13 +131,11 @@ class AbstractFileTikaTextExtract implements AbstractFileExtract {
try { try {
future.get(Ingester.getTimeout(sourceFile.getSize()), TimeUnit.SECONDS); future.get(Ingester.getTimeout(sourceFile.getSize()), TimeUnit.SECONDS);
} catch (TimeoutException te) { } catch (TimeoutException te) {
tika = null;
final String msg = "Exception: Tika parse timeout for content: " + sourceFile.getId() + ", " + sourceFile.getName(); final String msg = "Exception: Tika parse timeout for content: " + sourceFile.getId() + ", " + sourceFile.getName();
KeywordSearch.getTikaLogger().log(Level.WARNING, msg, te); KeywordSearch.getTikaLogger().log(Level.WARNING, msg, te);
logger.log(Level.WARNING, msg); logger.log(Level.WARNING, msg);
throw new IngesterException(msg); throw new IngesterException(msg);
} catch (Exception ex) { } catch (Exception ex) {
tika = null;
final String msg = "Exception: Unexpected exception from Tika parse task execution for file: " + sourceFile.getId() + ", " + sourceFile.getName(); final String msg = "Exception: Unexpected exception from Tika parse task execution for file: " + sourceFile.getId() + ", " + sourceFile.getName();
KeywordSearch.getTikaLogger().log(Level.WARNING, msg, ex); KeywordSearch.getTikaLogger().log(Level.WARNING, msg, ex);
logger.log(Level.WARNING, msg); logger.log(Level.WARNING, msg);
@ -221,9 +212,6 @@ class AbstractFileTikaTextExtract implements AbstractFileExtract {
extracted = sb.toString(); extracted = sb.toString();
//attempt to identify language of extracted text and post it to the blackboard
tikaLanguageIdentifier.addLanguageToBlackBoard(extracted, sourceFile);
//converts BOM automatically to charSet encoding //converts BOM automatically to charSet encoding
byte[] encodedBytes = extracted.getBytes(OUTPUT_CHARSET); byte[] encodedBytes = extracted.getBytes(OUTPUT_CHARSET);
AbstractFileChunk chunk = new AbstractFileChunk(this, this.numChunks + 1); AbstractFileChunk chunk = new AbstractFileChunk(this, this.numChunks + 1);

View File

@ -91,7 +91,8 @@ public abstract class KeywordSearchListsAbstract {
ips.add(new Keyword("(([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])", false, BlackboardAttribute.ATTRIBUTE_TYPE.TSK_IP_ADDRESS)); ips.add(new Keyword("(([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])", false, BlackboardAttribute.ATTRIBUTE_TYPE.TSK_IP_ADDRESS));
//email //email
List<Keyword> emails = new ArrayList<Keyword>(); List<Keyword> emails = new ArrayList<Keyword>();
emails.add(new Keyword("[A-Z0-9._%-]+@[A-Z0-9.-]+\\.[A-Z]{2,4}", false, BlackboardAttribute.ATTRIBUTE_TYPE.TSK_EMAIL)); emails.add(new Keyword("(?=.{8})[a-z0-9%+_-]+(?:\\.[a-z0-9%+_-]+)*@(?:[a-z0-9](?:[a-z0-9-]*[a-z0-9])?\\.)+[a-z]{2,4}(?<!\\.txt|\\.exe|\\.dll|\\.jpg|\\.xml)", false, BlackboardAttribute.ATTRIBUTE_TYPE.TSK_EMAIL));
//emails.add(new Keyword("[A-Z0-9._%-]+@[A-Z0-9.-]+\\.[A-Z]{2,4}", false, BlackboardAttribute.ATTRIBUTE_TYPE.TSK_EMAIL));
//URL //URL
List<Keyword> urls = new ArrayList<Keyword>(); List<Keyword> urls = new ArrayList<Keyword>();
//urls.add(new Keyword("http://|https://|^www\\.", false, BlackboardAttribute.ATTRIBUTE_TYPE.TSK_URL)); //urls.add(new Keyword("http://|https://|^www\\.", false, BlackboardAttribute.ATTRIBUTE_TYPE.TSK_URL));

View File

@ -1,61 +0,0 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2013 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.keywordsearch;
import java.util.logging.Level;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.BlackboardArtifact;
import org.sleuthkit.datamodel.BlackboardAttribute;
import org.sleuthkit.datamodel.TskCoreException;
/**
* TextLanguageIdentifier implementation based on a wrapped Tike
* LanguageIdentifier
*/
class TikaLanguageIdentifier implements TextLanguageIdentifier {
private static final Logger logger = Logger.getLogger(TikaLanguageIdentifier.class.getName());
private static final int MIN_STRING_LENGTH = 1000;
@Override
public void addLanguageToBlackBoard(String extracted, AbstractFile sourceFile) {
if (extracted.length() > MIN_STRING_LENGTH) {
org.apache.tika.language.LanguageIdentifier li = new org.apache.tika.language.LanguageIdentifier(extracted);
//logger.log(Level.INFO, sourceFile.getName() + " detected language: " + li.getLanguage()
// + " with " + ((li.isReasonablyCertain()) ? "HIGH" : "LOW") + " confidence");
BlackboardArtifact genInfo;
try {
genInfo = sourceFile.getGenInfoArtifact();
BlackboardAttribute textLang = new BlackboardAttribute(
BlackboardAttribute.ATTRIBUTE_TYPE.TSK_TEXT_LANGUAGE.getTypeID(),
KeywordSearchIngestModule.MODULE_NAME, li.getLanguage());
genInfo.addAttribute(textLang);
} catch (TskCoreException ex) {
logger.log(Level.WARNING, "failed to add TSK_TEXT_LANGUAGE attribute to TSK_GEN_INFO artifact for file: " + sourceFile.getName(), ex);
}
}
}
}

View File

@ -182,7 +182,7 @@ public class RegressionTest extends TestCase {
public void testConfigureHash() { public void testConfigureHash() {
logger.info("Hash Configure"); logger.info("Hash Configure");
JDialog hashMainDialog = JDialogOperator.waitJDialog("Hash Database Configuration", false, false); JDialog hashMainDialog = JDialogOperator.waitJDialog("Hash Set Configuration", false, false);
JDialogOperator hashMainDialogOperator = new JDialogOperator(hashMainDialog); JDialogOperator hashMainDialogOperator = new JDialogOperator(hashMainDialog);
List<String> databases = new ArrayList<String>(); List<String> databases = new ArrayList<String>();
databases.add(System.getProperty("nsrl_path")); databases.add(System.getProperty("nsrl_path"));
@ -190,9 +190,9 @@ public class RegressionTest extends TestCase {
for (String database : databases) { for (String database : databases) {
JButtonOperator importButtonOperator = new JButtonOperator(hashMainDialogOperator, "Import"); JButtonOperator importButtonOperator = new JButtonOperator(hashMainDialogOperator, "Import");
importButtonOperator.pushNoBlock(); importButtonOperator.pushNoBlock();
JDialog addDatabaseDialog = JDialogOperator.waitJDialog("Add Hash Database", false, false); JDialog addDatabaseDialog = JDialogOperator.waitJDialog("Import Hash Database", false, false);
JDialogOperator addDatabaseDialogOperator = new JDialogOperator(addDatabaseDialog); JDialogOperator addDatabaseDialogOperator = new JDialogOperator(addDatabaseDialog);
JButtonOperator browseButtonOperator = new JButtonOperator(addDatabaseDialogOperator, "Browse", 0); JButtonOperator browseButtonOperator = new JButtonOperator(addDatabaseDialogOperator, "Open...", 0);
browseButtonOperator.pushNoBlock(); browseButtonOperator.pushNoBlock();
JFileChooserOperator fileChooserOperator = new JFileChooserOperator(); JFileChooserOperator fileChooserOperator = new JFileChooserOperator();
fileChooserOperator.chooseFile(database); fileChooserOperator.chooseFile(database);
@ -232,8 +232,8 @@ public class RegressionTest extends TestCase {
jfco0.chooseFile(words); jfco0.chooseFile(words);
JTableOperator jto = new JTableOperator(jdo, 0); JTableOperator jto = new JTableOperator(jdo, 0);
jto.clickOnCell(0, 0); jto.clickOnCell(0, 0);
JCheckBoxOperator jcbo = new JCheckBoxOperator(jdo, "Enable for ingest", 0); JCheckBoxOperator jcbo = new JCheckBoxOperator(jdo, "Use during ingest", 0);
if (!jcbo.isSelected()) { if (!(jcbo.isSelected())) {
jcbo.doClick(); jcbo.doClick();
} }
new Timeout("pausing", 1000).sleep(); // give it a second to process new Timeout("pausing", 1000).sleep(); // give it a second to process

View File

@ -895,6 +895,11 @@ public class Timeline extends CallableSystemAction implements Presenter.Toolbar,
} }
} }
/**
* Parse the output of mactime to break the results in to day-sized chunks (in GMT)
* @param f handle to mactime csv output
* @return
*/
private List<YearEpoch> parseMacTime(java.io.File f) { private List<YearEpoch> parseMacTime(java.io.File f) {
List<YearEpoch> years = new ArrayList<>(); List<YearEpoch> years = new ArrayList<>();
Scanner scan; Scanner scan;
@ -911,11 +916,15 @@ public class Timeline extends CallableSystemAction implements Presenter.Toolbar,
YearEpoch ye = null; YearEpoch ye = null;
while (scan.hasNextLine()) { while (scan.hasNextLine()) {
String[] s = scan.nextLine().split(","); //1999-02-08T11:08:08Z, 78706, m..b, rrwxrwxrwx, 0, 0, 8355, /img... String[] s = scan.nextLine().split(","); //1999-02-08T11:08:08Z, 78706, m..b, rrwxrwxrwx, 0, 0, 8355, /img...
// break the date into mon, day and year: Note that the ISO times are in GMT
String[] datetime = s[0].split("T"); //{1999-02-08, 11:08:08Z} String[] datetime = s[0].split("T"); //{1999-02-08, 11:08:08Z}
String[] date = datetime[0].split("-"); // {1999, 02, 08} String[] date = datetime[0].split("-"); // {1999, 02, 08}
int year = Integer.valueOf(date[0]); int year = Integer.valueOf(date[0]);
int month = Integer.valueOf(date[1]) - 1; //Months are zero indexed: 1 = February, 6 = July, 11 = December int month = Integer.valueOf(date[1]) - 1; //Months are zero indexed: 1 = February, 6 = July, 11 = December
int day = Integer.valueOf(date[2]); //Days are 1 indexed int day = Integer.valueOf(date[2]); //Days are 1 indexed
// get the object id out of the modified outpu
long ObjId = Long.valueOf(s[4]); long ObjId = Long.valueOf(s[4]);
// when the year changes, create and add a new YearEpoch object to the list // when the year changes, create and add a new YearEpoch object to the list
@ -925,6 +934,7 @@ public class Timeline extends CallableSystemAction implements Presenter.Toolbar,
prevYear = year; prevYear = year;
} }
// save the object id along with the day
if (ye != null) { if (ye != null) {
ye.add(ObjId, month, day); ye.add(ObjId, month, day);
} }
@ -1038,6 +1048,11 @@ public class Timeline extends CallableSystemAction implements Presenter.Toolbar,
return bodyFilePath; return bodyFilePath;
} }
/**
* Run mactime on the given body file. Generates CSV file with ISO dates (in GMT)
* @param pathToBodyFile
* @return Path to output file.
*/
private String makeMacTime(String pathToBodyFile) { private String makeMacTime(String pathToBodyFile) {
String cmdpath = ""; String cmdpath = "";
String macpath = ""; String macpath = "";