mirror of
https://github.com/overcuriousity/autopsy-flatpak.git
synced 2025-07-14 17:06:16 +00:00
Merge branch 'develop' of https://github.com/sleuthkit/autopsy into timeline2
This commit is contained in:
commit
30cc43bcc4
@ -1,7 +1,7 @@
|
||||
/*
|
||||
* Autopsy Forensic Browser
|
||||
*
|
||||
* Copyright 2013 Basis Technology Corp.
|
||||
* Copyright 2013-2014 Basis Technology Corp.
|
||||
* Contact: carrier <at> sleuthkit <dot> org
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
@ -20,12 +20,12 @@
|
||||
package org.sleuthkit.autopsy.contentviewers;
|
||||
|
||||
import java.awt.Component;
|
||||
import javax.swing.JTextPane;
|
||||
import org.openide.nodes.Node;
|
||||
import org.openide.util.lookup.ServiceProvider;
|
||||
import org.sleuthkit.autopsy.corecomponentinterfaces.DataContentViewer;
|
||||
import org.sleuthkit.datamodel.AbstractFile;
|
||||
import org.sleuthkit.datamodel.TskCoreException;
|
||||
import org.sleuthkit.datamodel.TskData.TSK_DB_FILES_TYPE_ENUM;
|
||||
|
||||
/**
|
||||
* Shows file metadata as a list to make it easy to copy and paste.
|
||||
@ -133,18 +133,26 @@ public class Metadata extends javax.swing.JPanel implements DataContentViewer
|
||||
addRow(sb, "Name", file.getParentPath() + "/" + file.getName());
|
||||
}
|
||||
|
||||
addRow(sb, "Size", new Long(file.getSize()).toString() );
|
||||
addRow(sb, "File Name Allocation", file.getDirFlagAsString());
|
||||
addRow(sb, "Metadata Allocation", file.getMetaFlagsAsString());
|
||||
|
||||
addRow(sb, "Modified", file.getMtimeAsDate());
|
||||
addRow(sb, "Accessed", file.getAtimeAsDate());
|
||||
addRow(sb, "Created", file.getCrtimeAsDate());
|
||||
addRow(sb, "Changed", file.getCtimeAsDate());
|
||||
|
||||
|
||||
String md5 = file.getMd5Hash();
|
||||
if (md5 == null) {
|
||||
md5 = "Not calculated";
|
||||
}
|
||||
addRow(sb, "MD5", md5);
|
||||
addRow(sb, "Hash Lookup Results", file.getKnown().toString());
|
||||
|
||||
addRow(sb, "Internal ID", new Long(file.getId()).toString());
|
||||
if (file.getType().compareTo(TSK_DB_FILES_TYPE_ENUM.LOCAL) == 0) {
|
||||
addRow(sb, "Local Path", file.getLocalAbsPath());
|
||||
}
|
||||
|
||||
endTable(sb);
|
||||
setText(sb.toString());
|
||||
|
@ -100,8 +100,13 @@ public class FileExtMismatchIngestModule extends org.sleuthkit.autopsy.ingest.In
|
||||
public ProcessResult process(PipelineContext<IngestModuleAbstractFile> pipelineContext, AbstractFile abstractFile) {
|
||||
// skip non-files
|
||||
if ((abstractFile.getType() == TskData.TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS) ||
|
||||
(abstractFile.getType() == TskData.TSK_DB_FILES_TYPE_ENUM.UNUSED_BLOCKS)) {
|
||||
|
||||
(abstractFile.getType() == TskData.TSK_DB_FILES_TYPE_ENUM.UNUSED_BLOCKS)) {
|
||||
return ProcessResult.OK;
|
||||
}
|
||||
|
||||
// deleted files often have content that was not theirs and therefor causes mismatch
|
||||
if ((abstractFile.isMetaFlagSet(TskData.TSK_FS_META_FLAG_ENUM.UNALLOC)) ||
|
||||
(abstractFile.isDirNameFlagSet(TskData.TSK_FS_NAME_FLAG_ENUM.UNALLOC))) {
|
||||
return ProcessResult.OK;
|
||||
}
|
||||
|
||||
|
@ -1,7 +1,7 @@
|
||||
/*
|
||||
* Autopsy Forensic Browser
|
||||
*
|
||||
* Copyright 2012 Basis Technology Corp.
|
||||
* Copyright 2012-2013 Basis Technology Corp.
|
||||
* Contact: carrier <at> sleuthkit <dot> org
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
@ -59,10 +59,8 @@ import org.sleuthkit.datamodel.ReadContentInputStream;
|
||||
"text/javascript" //"application/xml",
|
||||
//"application/xml-dtd",
|
||||
);
|
||||
private final TikaLanguageIdentifier tikaLanguageIdentifier;
|
||||
|
||||
AbstractFileHtmlExtract() {
|
||||
tikaLanguageIdentifier = new TikaLanguageIdentifier();
|
||||
this.module = KeywordSearchIngestModule.getDefault();
|
||||
ingester = Server.getIngester();
|
||||
}
|
||||
@ -166,11 +164,6 @@ import org.sleuthkit.datamodel.ReadContentInputStream;
|
||||
totalRead = 0;
|
||||
extracted = sb.toString();
|
||||
|
||||
|
||||
//attempt to identify language of extracted text and post it to the blackboard
|
||||
tikaLanguageIdentifier.addLanguageToBlackBoard(extracted, sourceFile);
|
||||
|
||||
|
||||
//converts BOM automatically to charSet encoding
|
||||
byte[] encodedBytes = extracted.getBytes(outCharset);
|
||||
AbstractFileChunk chunk = new AbstractFileChunk(this, this.numChunks + 1);
|
||||
|
@ -1,7 +1,7 @@
|
||||
/*
|
||||
* Autopsy Forensic Browser
|
||||
*
|
||||
* Copyright 2012 Basis Technology Corp.
|
||||
* Copyright 2012-2013 Basis Technology Corp.
|
||||
* Contact: carrier <at> sleuthkit <dot> org
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
@ -39,16 +39,11 @@ import org.sleuthkit.autopsy.ingest.IngestModuleAbstractFile;
|
||||
import org.sleuthkit.datamodel.AbstractFile;
|
||||
import org.sleuthkit.datamodel.ReadContentInputStream;
|
||||
import org.apache.tika.Tika;
|
||||
import org.apache.tika.language.LanguageIdentifier;
|
||||
import org.apache.tika.metadata.Metadata;
|
||||
import org.apache.tika.mime.MediaType;
|
||||
import org.apache.tika.parser.ParseContext;
|
||||
import org.sleuthkit.autopsy.casemodule.Case;
|
||||
import org.sleuthkit.autopsy.coreutils.StringExtract;
|
||||
import org.sleuthkit.autopsy.keywordsearch.Ingester.IngesterException;
|
||||
import org.sleuthkit.datamodel.BlackboardArtifact;
|
||||
import org.sleuthkit.datamodel.BlackboardAttribute;
|
||||
import org.sleuthkit.datamodel.TskCoreException;
|
||||
|
||||
/**
|
||||
* Extractor of text from TIKA supported AbstractFile content. Extracted text is
|
||||
@ -75,11 +70,9 @@ class AbstractFileTikaTextExtract implements AbstractFileExtract {
|
||||
private int numChunks = 0;
|
||||
//private static final String UTF16BOM = "\uFEFF"; disabled prepending of BOM
|
||||
private final ExecutorService tikaParseExecutor = Executors.newSingleThreadExecutor();
|
||||
private final List<String> TIKA_SUPPORTED_TYPES = new ArrayList<String>();
|
||||
private final TikaLanguageIdentifier tikaLanguageIdentifier;
|
||||
private final List<String> TIKA_SUPPORTED_TYPES = new ArrayList<>();
|
||||
|
||||
AbstractFileTikaTextExtract() {
|
||||
tikaLanguageIdentifier = new TikaLanguageIdentifier();
|
||||
this.module = KeywordSearchIngestModule.getDefault();
|
||||
ingester = Server.getIngester();
|
||||
|
||||
@ -87,7 +80,7 @@ class AbstractFileTikaTextExtract implements AbstractFileExtract {
|
||||
for (MediaType mt : mediaTypes) {
|
||||
TIKA_SUPPORTED_TYPES.add(mt.getType() + "/" + mt.getSubtype());
|
||||
}
|
||||
logger.log(Level.INFO, "Tika supported media types: " + TIKA_SUPPORTED_TYPES);
|
||||
logger.log(Level.INFO, "Tika supported media types: {0}", TIKA_SUPPORTED_TYPES);
|
||||
|
||||
}
|
||||
|
||||
@ -138,13 +131,11 @@ class AbstractFileTikaTextExtract implements AbstractFileExtract {
|
||||
try {
|
||||
future.get(Ingester.getTimeout(sourceFile.getSize()), TimeUnit.SECONDS);
|
||||
} catch (TimeoutException te) {
|
||||
tika = null;
|
||||
final String msg = "Exception: Tika parse timeout for content: " + sourceFile.getId() + ", " + sourceFile.getName();
|
||||
KeywordSearch.getTikaLogger().log(Level.WARNING, msg, te);
|
||||
logger.log(Level.WARNING, msg);
|
||||
throw new IngesterException(msg);
|
||||
} catch (Exception ex) {
|
||||
tika = null;
|
||||
final String msg = "Exception: Unexpected exception from Tika parse task execution for file: " + sourceFile.getId() + ", " + sourceFile.getName();
|
||||
KeywordSearch.getTikaLogger().log(Level.WARNING, msg, ex);
|
||||
logger.log(Level.WARNING, msg);
|
||||
@ -221,9 +212,6 @@ class AbstractFileTikaTextExtract implements AbstractFileExtract {
|
||||
|
||||
extracted = sb.toString();
|
||||
|
||||
//attempt to identify language of extracted text and post it to the blackboard
|
||||
tikaLanguageIdentifier.addLanguageToBlackBoard(extracted, sourceFile);
|
||||
|
||||
//converts BOM automatically to charSet encoding
|
||||
byte[] encodedBytes = extracted.getBytes(OUTPUT_CHARSET);
|
||||
AbstractFileChunk chunk = new AbstractFileChunk(this, this.numChunks + 1);
|
||||
|
@ -91,7 +91,8 @@ public abstract class KeywordSearchListsAbstract {
|
||||
ips.add(new Keyword("(([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])", false, BlackboardAttribute.ATTRIBUTE_TYPE.TSK_IP_ADDRESS));
|
||||
//email
|
||||
List<Keyword> emails = new ArrayList<Keyword>();
|
||||
emails.add(new Keyword("[A-Z0-9._%-]+@[A-Z0-9.-]+\\.[A-Z]{2,4}", false, BlackboardAttribute.ATTRIBUTE_TYPE.TSK_EMAIL));
|
||||
emails.add(new Keyword("(?=.{8})[a-z0-9%+_-]+(?:\\.[a-z0-9%+_-]+)*@(?:[a-z0-9](?:[a-z0-9-]*[a-z0-9])?\\.)+[a-z]{2,4}(?<!\\.txt|\\.exe|\\.dll|\\.jpg|\\.xml)", false, BlackboardAttribute.ATTRIBUTE_TYPE.TSK_EMAIL));
|
||||
//emails.add(new Keyword("[A-Z0-9._%-]+@[A-Z0-9.-]+\\.[A-Z]{2,4}", false, BlackboardAttribute.ATTRIBUTE_TYPE.TSK_EMAIL));
|
||||
//URL
|
||||
List<Keyword> urls = new ArrayList<Keyword>();
|
||||
//urls.add(new Keyword("http://|https://|^www\\.", false, BlackboardAttribute.ATTRIBUTE_TYPE.TSK_URL));
|
||||
|
@ -1,61 +0,0 @@
|
||||
/*
|
||||
* Autopsy Forensic Browser
|
||||
*
|
||||
* Copyright 2013 Basis Technology Corp.
|
||||
* Contact: carrier <at> sleuthkit <dot> org
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.sleuthkit.autopsy.keywordsearch;
|
||||
|
||||
import java.util.logging.Level;
|
||||
import org.sleuthkit.autopsy.coreutils.Logger;
|
||||
import org.sleuthkit.datamodel.AbstractFile;
|
||||
import org.sleuthkit.datamodel.BlackboardArtifact;
|
||||
import org.sleuthkit.datamodel.BlackboardAttribute;
|
||||
import org.sleuthkit.datamodel.TskCoreException;
|
||||
|
||||
/**
|
||||
* TextLanguageIdentifier implementation based on a wrapped Tike
|
||||
* LanguageIdentifier
|
||||
*/
|
||||
class TikaLanguageIdentifier implements TextLanguageIdentifier {
|
||||
|
||||
private static final Logger logger = Logger.getLogger(TikaLanguageIdentifier.class.getName());
|
||||
private static final int MIN_STRING_LENGTH = 1000;
|
||||
|
||||
@Override
|
||||
public void addLanguageToBlackBoard(String extracted, AbstractFile sourceFile) {
|
||||
if (extracted.length() > MIN_STRING_LENGTH) {
|
||||
org.apache.tika.language.LanguageIdentifier li = new org.apache.tika.language.LanguageIdentifier(extracted);
|
||||
|
||||
//logger.log(Level.INFO, sourceFile.getName() + " detected language: " + li.getLanguage()
|
||||
// + " with " + ((li.isReasonablyCertain()) ? "HIGH" : "LOW") + " confidence");
|
||||
|
||||
BlackboardArtifact genInfo;
|
||||
try {
|
||||
genInfo = sourceFile.getGenInfoArtifact();
|
||||
|
||||
BlackboardAttribute textLang = new BlackboardAttribute(
|
||||
BlackboardAttribute.ATTRIBUTE_TYPE.TSK_TEXT_LANGUAGE.getTypeID(),
|
||||
KeywordSearchIngestModule.MODULE_NAME, li.getLanguage());
|
||||
|
||||
genInfo.addAttribute(textLang);
|
||||
|
||||
} catch (TskCoreException ex) {
|
||||
logger.log(Level.WARNING, "failed to add TSK_TEXT_LANGUAGE attribute to TSK_GEN_INFO artifact for file: " + sourceFile.getName(), ex);
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
}
|
12
Testing/test/qa-functional/src/org/sleuthkit/autopsy/testing/RegressionTest.java
Normal file → Executable file
12
Testing/test/qa-functional/src/org/sleuthkit/autopsy/testing/RegressionTest.java
Normal file → Executable file
@ -182,7 +182,7 @@ public class RegressionTest extends TestCase {
|
||||
|
||||
public void testConfigureHash() {
|
||||
logger.info("Hash Configure");
|
||||
JDialog hashMainDialog = JDialogOperator.waitJDialog("Hash Database Configuration", false, false);
|
||||
JDialog hashMainDialog = JDialogOperator.waitJDialog("Hash Set Configuration", false, false);
|
||||
JDialogOperator hashMainDialogOperator = new JDialogOperator(hashMainDialog);
|
||||
List<String> databases = new ArrayList<String>();
|
||||
databases.add(System.getProperty("nsrl_path"));
|
||||
@ -190,9 +190,9 @@ public class RegressionTest extends TestCase {
|
||||
for (String database : databases) {
|
||||
JButtonOperator importButtonOperator = new JButtonOperator(hashMainDialogOperator, "Import");
|
||||
importButtonOperator.pushNoBlock();
|
||||
JDialog addDatabaseDialog = JDialogOperator.waitJDialog("Add Hash Database", false, false);
|
||||
JDialog addDatabaseDialog = JDialogOperator.waitJDialog("Import Hash Database", false, false);
|
||||
JDialogOperator addDatabaseDialogOperator = new JDialogOperator(addDatabaseDialog);
|
||||
JButtonOperator browseButtonOperator = new JButtonOperator(addDatabaseDialogOperator, "Browse", 0);
|
||||
JButtonOperator browseButtonOperator = new JButtonOperator(addDatabaseDialogOperator, "Open...", 0);
|
||||
browseButtonOperator.pushNoBlock();
|
||||
JFileChooserOperator fileChooserOperator = new JFileChooserOperator();
|
||||
fileChooserOperator.chooseFile(database);
|
||||
@ -232,8 +232,8 @@ public class RegressionTest extends TestCase {
|
||||
jfco0.chooseFile(words);
|
||||
JTableOperator jto = new JTableOperator(jdo, 0);
|
||||
jto.clickOnCell(0, 0);
|
||||
JCheckBoxOperator jcbo = new JCheckBoxOperator(jdo, "Enable for ingest", 0);
|
||||
if (!jcbo.isSelected()) {
|
||||
JCheckBoxOperator jcbo = new JCheckBoxOperator(jdo, "Use during ingest", 0);
|
||||
if (!(jcbo.isSelected())) {
|
||||
jcbo.doClick();
|
||||
}
|
||||
new Timeout("pausing", 1000).sleep(); // give it a second to process
|
||||
@ -339,4 +339,4 @@ public class RegressionTest extends TestCase {
|
||||
KeywordSearchListsXML curr = KeywordSearchListsXML.getCurrent();
|
||||
curr.setUseForIngest("URLs", true);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -895,6 +895,11 @@ public class Timeline extends CallableSystemAction implements Presenter.Toolbar,
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse the output of mactime to break the results in to day-sized chunks (in GMT)
|
||||
* @param f handle to mactime csv output
|
||||
* @return
|
||||
*/
|
||||
private List<YearEpoch> parseMacTime(java.io.File f) {
|
||||
List<YearEpoch> years = new ArrayList<>();
|
||||
Scanner scan;
|
||||
@ -911,11 +916,15 @@ public class Timeline extends CallableSystemAction implements Presenter.Toolbar,
|
||||
YearEpoch ye = null;
|
||||
while (scan.hasNextLine()) {
|
||||
String[] s = scan.nextLine().split(","); //1999-02-08T11:08:08Z, 78706, m..b, rrwxrwxrwx, 0, 0, 8355, /img...
|
||||
|
||||
// break the date into mon, day and year: Note that the ISO times are in GMT
|
||||
String[] datetime = s[0].split("T"); //{1999-02-08, 11:08:08Z}
|
||||
String[] date = datetime[0].split("-"); // {1999, 02, 08}
|
||||
int year = Integer.valueOf(date[0]);
|
||||
int month = Integer.valueOf(date[1]) - 1; //Months are zero indexed: 1 = February, 6 = July, 11 = December
|
||||
int day = Integer.valueOf(date[2]); //Days are 1 indexed
|
||||
|
||||
// get the object id out of the modified outpu
|
||||
long ObjId = Long.valueOf(s[4]);
|
||||
|
||||
// when the year changes, create and add a new YearEpoch object to the list
|
||||
@ -925,6 +934,7 @@ public class Timeline extends CallableSystemAction implements Presenter.Toolbar,
|
||||
prevYear = year;
|
||||
}
|
||||
|
||||
// save the object id along with the day
|
||||
if (ye != null) {
|
||||
ye.add(ObjId, month, day);
|
||||
}
|
||||
@ -1038,6 +1048,11 @@ public class Timeline extends CallableSystemAction implements Presenter.Toolbar,
|
||||
return bodyFilePath;
|
||||
}
|
||||
|
||||
/**
|
||||
* Run mactime on the given body file. Generates CSV file with ISO dates (in GMT)
|
||||
* @param pathToBodyFile
|
||||
* @return Path to output file.
|
||||
*/
|
||||
private String makeMacTime(String pathToBodyFile) {
|
||||
String cmdpath = "";
|
||||
String macpath = "";
|
||||
|
Loading…
x
Reference in New Issue
Block a user