mirror of
https://github.com/overcuriousity/autopsy-flatpak.git
synced 2025-07-12 07:56:16 +00:00
Merge branch 'master' of https://github.com/sleuthkit/autopsy
This commit is contained in:
commit
f5ff3b45d9
4
.gitignore
vendored
4
.gitignore
vendored
@ -46,8 +46,10 @@ genfiles.properties
|
||||
/branding/nbproject/*
|
||||
!/branding/nbproject/project.xml
|
||||
!/branding/nbproject/project.properties
|
||||
/test/input/
|
||||
/test/input/*
|
||||
!/test/input/notablehashes.txt-md5.idx
|
||||
!/test/input/notablekeywords.xml
|
||||
!/test/input/NSRL.txt-md5.idx
|
||||
/test/output/*
|
||||
!/test/output/gold
|
||||
/test/output/gold/tmp
|
||||
|
@ -816,7 +816,7 @@ public class Case {
|
||||
* Invoke the creation of startup dialog window.
|
||||
*/
|
||||
static public void invokeStartupDialog() {
|
||||
StartupWindow.getInstance().open();
|
||||
StartupWindowProvider.getInstance().open();
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -86,7 +86,7 @@ public final class CaseCloseAction extends CallableSystemAction implements Prese
|
||||
EventQueue.invokeLater(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
StartupWindow.getInstance().open();
|
||||
StartupWindowProvider.getInstance().open();
|
||||
}
|
||||
});
|
||||
} catch (Exception ex) {
|
||||
|
@ -85,7 +85,7 @@ public final class CaseOpenAction implements ActionListener {
|
||||
} else {
|
||||
// try to close Startup window if there's one
|
||||
try {
|
||||
StartupWindow.getInstance().close();
|
||||
StartupWindowProvider.getInstance().close();
|
||||
} catch (Exception ex) {
|
||||
// no need to show the error message to the user.
|
||||
logger.log(Level.WARNING, "Error closing startup window.", ex);
|
||||
@ -97,7 +97,7 @@ public final class CaseOpenAction implements ActionListener {
|
||||
+ ": " + ex.getMessage(), "Error", JOptionPane.ERROR_MESSAGE);
|
||||
logger.log(Level.WARNING, "Error opening case in folder " + path, ex);
|
||||
|
||||
StartupWindow.getInstance().open();
|
||||
StartupWindowProvider.getInstance().open();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -291,7 +291,7 @@ class NewCaseWizardPanel1 implements WizardDescriptor.ValidatingPanel<WizardDesc
|
||||
createdDirectory = caseDirPath;
|
||||
// try to close Startup window if there's one
|
||||
try {
|
||||
StartupWindow.getInstance().close();
|
||||
StartupWindowProvider.getInstance().close();
|
||||
} catch (Exception ex) {
|
||||
logger.log(Level.WARNING, "Startup window didn't close as expected.", ex);
|
||||
|
||||
|
@ -187,7 +187,7 @@ class OpenRecentCasePanel extends javax.swing.JPanel {
|
||||
if (!casePath.equals("")) {
|
||||
// Close the startup menu
|
||||
try {
|
||||
StartupWindow.getInstance().close();
|
||||
StartupWindowProvider.getInstance().close();
|
||||
CueBannerPanel.closeOpenRecentCasesWindow();
|
||||
} catch (Exception ex) {
|
||||
logger.log(Level.WARNING, "Error: couldn't open case: " + caseName, ex);
|
||||
@ -200,7 +200,7 @@ class OpenRecentCasePanel extends javax.swing.JPanel {
|
||||
|
||||
//if case is not opened, open the start window
|
||||
if (Case.isCaseOpen() == false) {
|
||||
StartupWindow.getInstance().open();
|
||||
StartupWindowProvider.getInstance().open();
|
||||
}
|
||||
|
||||
} else {
|
||||
|
@ -65,7 +65,7 @@ class RecentItems implements ActionListener {
|
||||
|
||||
@Override
|
||||
public void run() {
|
||||
StartupWindow.getInstance().open();
|
||||
StartupWindowProvider.getInstance().open();
|
||||
}
|
||||
|
||||
});
|
||||
|
@ -26,40 +26,28 @@ import java.awt.event.ActionEvent;
|
||||
import java.awt.event.ActionListener;
|
||||
import javax.swing.JDialog;
|
||||
import javax.swing.JFrame;
|
||||
import org.openide.util.lookup.ServiceProvider;
|
||||
|
||||
/**
|
||||
* Displays
|
||||
* The default implementation of the Autopsy startup window
|
||||
*/
|
||||
public final class StartupWindow extends JDialog {
|
||||
@ServiceProvider(service=StartupWindowInterface.class)
|
||||
public final class StartupWindow extends JDialog implements StartupWindowInterface {
|
||||
|
||||
private static StartupWindow instance;
|
||||
private static final String TITLE = "Welcome";
|
||||
private static Dimension DIMENSIONS = new Dimension(750, 400);
|
||||
|
||||
private StartupWindow(JFrame frame, String title, boolean isModal) {
|
||||
super(frame, title, isModal);
|
||||
public StartupWindow() {
|
||||
super(new JFrame(TITLE), TITLE, true);
|
||||
init();
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the startup window
|
||||
* @return the startup window singleton
|
||||
*/
|
||||
public static synchronized StartupWindow getInstance() {
|
||||
if (StartupWindow.instance == null) {
|
||||
JFrame frame = new JFrame(TITLE);
|
||||
boolean isModal = true;
|
||||
StartupWindow.instance = new StartupWindow(frame, TITLE, isModal);
|
||||
}
|
||||
|
||||
|
||||
return instance;
|
||||
}
|
||||
|
||||
/**
|
||||
* Shows the startup window.
|
||||
*/
|
||||
public void init() {
|
||||
private void init() {
|
||||
|
||||
Dimension screenDimension = Toolkit.getDefaultToolkit().getScreenSize();
|
||||
|
||||
@ -88,6 +76,7 @@ public final class StartupWindow extends JDialog {
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public void open() {
|
||||
setVisible(true);
|
||||
}
|
||||
@ -95,6 +84,7 @@ public final class StartupWindow extends JDialog {
|
||||
/**
|
||||
* Closes the startup window.
|
||||
*/
|
||||
@Override
|
||||
public void close() {
|
||||
this.setVisible(false);
|
||||
}
|
||||
|
@ -0,0 +1,35 @@
|
||||
/*
|
||||
* Autopsy Forensic Browser
|
||||
*
|
||||
* Copyright 2013 Basis Technology Corp.
|
||||
* Contact: carrier <at> sleuthkit <dot> org
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.sleuthkit.autopsy.casemodule;
|
||||
|
||||
/**
|
||||
* Interface for startup window implementations
|
||||
*/
|
||||
public interface StartupWindowInterface {
|
||||
|
||||
/**
|
||||
* Shows and makes active the startup window
|
||||
*/
|
||||
public void open();
|
||||
|
||||
/**
|
||||
* Closes the startup window
|
||||
*/
|
||||
public void close();
|
||||
}
|
@ -0,0 +1,103 @@
|
||||
/*
|
||||
* Autopsy Forensic Browser
|
||||
*
|
||||
* Copyright 2013 Basis Technology Corp.
|
||||
* Contact: carrier <at> sleuthkit <dot> org
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.sleuthkit.autopsy.casemodule;
|
||||
|
||||
import java.util.Collection;
|
||||
import java.util.Iterator;
|
||||
import java.util.logging.Level;
|
||||
import org.openide.util.Lookup;
|
||||
import org.sleuthkit.autopsy.coreutils.Logger;
|
||||
|
||||
/**
|
||||
* Provides the start up window to rest of the application. It may return the
|
||||
* main / default startup window, or a custom one if it has been discovered.
|
||||
*
|
||||
* All that is required to create a custom startup window in a module and active it,
|
||||
* is to implement StartupWindowInterface and register it with lookup as a ServiceProvider.
|
||||
* The custom startup window is automatically chosen over the default one, given it is the only external module custom startup window.
|
||||
*/
|
||||
public class StartupWindowProvider implements StartupWindowInterface {
|
||||
|
||||
private static volatile StartupWindowProvider instance;
|
||||
private static final Logger logger = Logger.getLogger(StartupWindowProvider.class.getName());
|
||||
private volatile StartupWindowInterface startupWindowToUse;
|
||||
|
||||
public static StartupWindowProvider getInstance() {
|
||||
if (instance == null) {
|
||||
synchronized (StartupWindowProvider.class) {
|
||||
if (instance == null) {
|
||||
instance = new StartupWindowProvider();
|
||||
instance.init();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return instance;
|
||||
}
|
||||
|
||||
private void init() {
|
||||
if (startupWindowToUse == null) {
|
||||
//discover the registered windows
|
||||
Collection<? extends StartupWindowInterface> startupWindows =
|
||||
Lookup.getDefault().lookupAll(StartupWindowInterface.class);
|
||||
|
||||
int windowsCount = startupWindows.size();
|
||||
if (windowsCount > 2) {
|
||||
logger.log(Level.WARNING, "More than 2 (" + windowsCount + ") start up windows discovered, will use the first custom one");
|
||||
} else if (windowsCount == 1) {
|
||||
startupWindowToUse = startupWindows.iterator().next();
|
||||
logger.log(Level.INFO, "Will use the default startup window: " + startupWindowToUse.toString());
|
||||
} else {
|
||||
//pick the non default one
|
||||
Iterator<? extends StartupWindowInterface> it = startupWindows.iterator();
|
||||
while (it.hasNext()) {
|
||||
StartupWindowInterface window = it.next();
|
||||
if (!org.sleuthkit.autopsy.casemodule.StartupWindow.class.isInstance(window)) {
|
||||
startupWindowToUse = window;
|
||||
logger.log(Level.INFO, "Will use the custom startup window: " + startupWindowToUse.toString());
|
||||
break;
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
if (startupWindowToUse == null) {
|
||||
logger.log(Level.SEVERE, "Unexpected error, no custom startup window found, using the default");
|
||||
startupWindowToUse = new org.sleuthkit.autopsy.casemodule.StartupWindow();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void open() {
|
||||
if (startupWindowToUse != null) {
|
||||
startupWindowToUse.open();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() {
|
||||
if (startupWindowToUse != null) {
|
||||
startupWindowToUse.close();
|
||||
}
|
||||
}
|
||||
}
|
@ -124,7 +124,7 @@ public class Installer extends ModuleInstall {
|
||||
javaFxInit = false;
|
||||
final String msg = "Error initializing JavaFX. ";
|
||||
final String details = " Some features will not be available. "
|
||||
+ " Check that you have the right JRE installed (Sun JRE > 1.7.10). ";
|
||||
+ " Check that you have the right JRE installed (Oracle JRE > 1.7.10). ";
|
||||
logger.log(Level.SEVERE, msg
|
||||
+ details, e);
|
||||
|
||||
|
@ -578,22 +578,21 @@ class IngestScheduler {
|
||||
* of skipped
|
||||
* @return true if should be enqueued, false otherwise
|
||||
*/
|
||||
private static boolean shouldEnqueueTask(ProcessTask processTask) {
|
||||
private static boolean shouldEnqueueTask(final ProcessTask processTask) {
|
||||
final AbstractFile aFile = processTask.file;
|
||||
|
||||
//if it's unalloc file, skip if so scheduled
|
||||
if (processTask.context.isProcessUnalloc() == false) {
|
||||
if (aFile.isVirtual() == true) {
|
||||
if (processTask.context.isProcessUnalloc() == false
|
||||
&& aFile.getType().equals(TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS //unalloc files
|
||||
) ) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
String fileName = aFile.getName();
|
||||
if (fileName.equals(".") || fileName.equals("..")) {
|
||||
return false;
|
||||
}
|
||||
if (aFile.isVirtual() == false && aFile.isFile() == true
|
||||
&& aFile.getType() == TSK_DB_FILES_TYPE_ENUM.FS) {
|
||||
else if (aFile instanceof org.sleuthkit.datamodel.File ) {
|
||||
final org.sleuthkit.datamodel.File f = (File) aFile;
|
||||
|
||||
//skip files in root dir, starting with $, containing : (not default attributes)
|
||||
|
@ -23,11 +23,11 @@ import javax.swing.JPanel;
|
||||
public interface GeneralReportModule extends ReportModule {
|
||||
|
||||
/**
|
||||
* Generate the report and update the report's ProgressPanel, then save the report
|
||||
* to the reportPath.
|
||||
* Called to generate the report. Method is responsible for saving the file at the
|
||||
* path specified and updating progress via the progressPanel object.
|
||||
*
|
||||
* @param reportPath path to save the report
|
||||
* @param progressPanel panel to update the report's progress
|
||||
* @param progressPanel panel to update the report's progress with
|
||||
*/
|
||||
public void generateReport(String reportPath, ReportProgressPanel progressPanel);
|
||||
|
||||
@ -35,7 +35,7 @@ public interface GeneralReportModule extends ReportModule {
|
||||
* Returns the configuration panel for the report, which is displayed in
|
||||
* the report configuration step of the report wizard.
|
||||
*
|
||||
* @return the report's configuration panel
|
||||
* @return Configuration panel or null if the module does not need configuration.
|
||||
*/
|
||||
public JPanel getConfigurationPanel();
|
||||
|
||||
|
@ -43,8 +43,7 @@ public interface ReportModule {
|
||||
public String getDescription();
|
||||
|
||||
/**
|
||||
* Calls to the report module to execute a method to get the extension that
|
||||
* is used for the report
|
||||
* Returns the extension that is used for the report
|
||||
*
|
||||
* @return String the extension the file will be saved as
|
||||
*
|
||||
|
@ -53,7 +53,7 @@
|
||||
<target name="retrieve-deps-local" description="build library dependencies that are stored local-only">
|
||||
<!-- javafx: note: this a workaround, needs to match the one from jre/jdk and ideally be automatically included -->
|
||||
<!-- javafx native libs are always on runtime classpath from jre -->
|
||||
<copy file="${thirdparty.dir}/jfxrt/1.7.13/jfxrt.jar" todir="release/modules/ext/" />
|
||||
<copy file="${thirdparty.dir}/jfxrt/1.7.21/jfxrt.jar" todir="release/modules/ext/" />
|
||||
</target>
|
||||
|
||||
<target name="retrieve-deps" description="retrieve dependencies using ivy" depends="init-ivy,build-native-libs">
|
||||
|
@ -2,9 +2,9 @@ file.reference.activation-1.1.jar=release/modules/ext/activation-1.1.jar
|
||||
file.reference.ant-1.8.2.jar=release/modules/ext/ant-1.8.2.jar
|
||||
file.reference.ant-launcher-1.8.2.jar=release/modules/ext/ant-launcher-1.8.2.jar
|
||||
file.reference.AppleJavaExtensions-1.4.jar=release/modules/ext/AppleJavaExtensions-1.4.jar
|
||||
file.reference.avalon-framework-4.1.3.jar=release/modules/ext/avalon-framework-4.1.3.jar
|
||||
file.reference.commons-codec-1.5.jar=release/modules/ext/commons-codec-1.5.jar
|
||||
file.reference.commons-io-2.4.jar=release/modules/ext/commons-io-2.4.jar
|
||||
file.reference.commons-lang-2.6.jar=release/modules/ext/commons-lang-2.6.jar
|
||||
file.reference.commons-lang3-3.0-javadoc.jar=release/modules/ext/commons-lang3-3.0-javadoc.jar
|
||||
file.reference.commons-lang3-3.0-sources.jar=release/modules/ext/commons-lang3-3.0-sources.jar
|
||||
file.reference.commons-lang3-3.0.jar=release/modules/ext/commons-lang3-3.0.jar
|
||||
|
@ -816,10 +816,6 @@
|
||||
<runtime-relative-path>ext/commons-lang-2.6.jar</runtime-relative-path>
|
||||
<binary-origin>release/modules/ext/commons-lang-2.6.jar</binary-origin>
|
||||
</class-path-extension>
|
||||
<class-path-extension>
|
||||
<runtime-relative-path>ext/avalon-framework-4.1.3.jar</runtime-relative-path>
|
||||
<binary-origin>release/modules/ext/avalon-framework-4.1.3.jar</binary-origin>
|
||||
</class-path-extension>
|
||||
<class-path-extension>
|
||||
<runtime-relative-path>ext/javassist-3.12.1.GA.jar</runtime-relative-path>
|
||||
<binary-origin>release/modules/ext/javassist-3.12.1.GA.jar</binary-origin>
|
||||
@ -828,14 +824,14 @@
|
||||
<runtime-relative-path>ext/ant-launcher-1.8.2.jar</runtime-relative-path>
|
||||
<binary-origin>release/modules/ext/ant-launcher-1.8.2.jar</binary-origin>
|
||||
</class-path-extension>
|
||||
<class-path-extension>
|
||||
<runtime-relative-path>ext/jcalendarbutton-1.4.6.jar</runtime-relative-path>
|
||||
<binary-origin>release/modules/ext/jcalendarbutton-1.4.6.jar</binary-origin>
|
||||
</class-path-extension>
|
||||
<class-path-extension>
|
||||
<runtime-relative-path>ext/mail-1.4.3.jar</runtime-relative-path>
|
||||
<binary-origin>release/modules/ext/mail-1.4.3.jar</binary-origin>
|
||||
</class-path-extension>
|
||||
<class-path-extension>
|
||||
<runtime-relative-path>ext/jcalendarbutton-1.4.6.jar</runtime-relative-path>
|
||||
<binary-origin>release/modules/ext/jcalendarbutton-1.4.6.jar</binary-origin>
|
||||
</class-path-extension>
|
||||
<class-path-extension>
|
||||
<runtime-relative-path>ext/xml-apis-1.0.b2.jar</runtime-relative-path>
|
||||
<binary-origin>release/modules/ext/xml-apis-1.0.b2.jar</binary-origin>
|
||||
@ -920,6 +916,10 @@
|
||||
<runtime-relative-path>ext/poi-excelant-3.8.jar</runtime-relative-path>
|
||||
<binary-origin>release/modules/ext/poi-excelant-3.8.jar</binary-origin>
|
||||
</class-path-extension>
|
||||
<class-path-extension>
|
||||
<runtime-relative-path>ext/avalon-framework-4.1.5.jar</runtime-relative-path>
|
||||
<binary-origin>release/modules/ext/avalon-framework-4.1.5.jar</binary-origin>
|
||||
</class-path-extension>
|
||||
</data>
|
||||
</configuration>
|
||||
</project>
|
||||
|
1
NEWS.txt
1
NEWS.txt
@ -7,6 +7,7 @@ Improvements:
|
||||
|
||||
Bugfixes:
|
||||
- Keyword Search: fix when Solr does not cleanly shutdown
|
||||
- fix for "Process Unallocated Space" option doesn't do anything
|
||||
|
||||
|
||||
|
||||
|
Binary file not shown.
@ -26,6 +26,8 @@ import urllib2
|
||||
import re
|
||||
import zipfile
|
||||
import zlib
|
||||
import Emailer
|
||||
import srcupdater
|
||||
|
||||
#
|
||||
# Please read me...
|
||||
@ -69,6 +71,8 @@ class Args:
|
||||
self.exception_string = ""
|
||||
self.contin = False
|
||||
self.gold_creation = False
|
||||
self.daily = False
|
||||
self.fr = False
|
||||
|
||||
def parse(self):
|
||||
global nxtproc
|
||||
@ -134,6 +138,12 @@ class Args:
|
||||
elif arg == "-g" or arg == "--gold":
|
||||
printout("Creating gold standards")
|
||||
self.gold_creation = True
|
||||
elif arg == "-d" or arg == "--daily":
|
||||
printout("Running daily")
|
||||
self.daily = True
|
||||
elif arg == "-fr" or arg == "--forcerun":
|
||||
printout("Not downloading new images")
|
||||
self.fr = True
|
||||
else:
|
||||
printout(usage())
|
||||
return False
|
||||
@ -148,9 +158,9 @@ class Args:
|
||||
class TestAutopsy:
|
||||
def __init__(self):
|
||||
# Paths:
|
||||
self.input_dir = make_local_path("..","input")
|
||||
self.input_dir = Emailer.make_local_path("..","input")
|
||||
self.output_dir = ""
|
||||
self.gold = make_local_path("..", "output", "gold", "tmp")
|
||||
self.gold = Emailer.make_local_path("..", "output", "gold", "tmp")
|
||||
# Logs:
|
||||
self.antlog_dir = ""
|
||||
self.common_log = ""
|
||||
@ -290,7 +300,8 @@ class Database:
|
||||
global failedbool
|
||||
global errorem
|
||||
failedbool = True
|
||||
errorem += "There was a difference in the number of artifacts for " + case.image + ".\n"
|
||||
global imgfail
|
||||
imgfail = True
|
||||
return "; ".join(self.artifact_comparison)
|
||||
|
||||
def get_attribute_comparison(self):
|
||||
@ -299,7 +310,8 @@ class Database:
|
||||
global failedbool
|
||||
global errorem
|
||||
failedbool = True
|
||||
errorem += "There was a difference in the number of attributes for " + case.image + ".\n"
|
||||
global imgfail
|
||||
imgfail = True
|
||||
list = []
|
||||
for error in self.attribute_comparison:
|
||||
list.append(error)
|
||||
@ -307,7 +319,7 @@ class Database:
|
||||
|
||||
def generate_autopsy_artifacts(self):
|
||||
if not self.autopsy_artifacts:
|
||||
autopsy_db_file = make_path(case.output_dir, case.image_name,
|
||||
autopsy_db_file = Emailer.make_path(case.output_dir, case.image_name,
|
||||
"AutopsyTestCase", "autopsy.db")
|
||||
autopsy_con = sqlite3.connect(autopsy_db_file)
|
||||
autopsy_cur = autopsy_con.cursor()
|
||||
@ -325,7 +337,7 @@ class Database:
|
||||
|
||||
def generate_autopsy_attributes(self):
|
||||
if self.autopsy_attributes == 0:
|
||||
autopsy_db_file = make_path(case.output_dir, case.image_name,
|
||||
autopsy_db_file = Emailer.make_path(case.output_dir, case.image_name,
|
||||
"AutopsyTestCase", "autopsy.db")
|
||||
autopsy_con = sqlite3.connect(autopsy_db_file)
|
||||
autopsy_cur = autopsy_con.cursor()
|
||||
@ -335,7 +347,7 @@ class Database:
|
||||
|
||||
def generate_autopsy_objects(self):
|
||||
if self.autopsy_objects == 0:
|
||||
autopsy_db_file = make_path(case.output_dir, case.image_name,
|
||||
autopsy_db_file = Emailer.make_path(case.output_dir, case.image_name,
|
||||
"AutopsyTestCase", "autopsy.db")
|
||||
autopsy_con = sqlite3.connect(autopsy_db_file)
|
||||
autopsy_cur = autopsy_con.cursor()
|
||||
@ -345,7 +357,9 @@ class Database:
|
||||
|
||||
def generate_gold_artifacts(self):
|
||||
if not self.gold_artifacts:
|
||||
gold_db_file = make_path(case.gold, case.image_name, "autopsy.db")
|
||||
gold_db_file = Emailer.make_path(case.gold, case.image_name, "autopsy.db")
|
||||
if(not file_exists(gold_db_file)):
|
||||
gold_db_file = Emailer.make_path(case.gold_parse, case.image_name, "autopsy.db")
|
||||
gold_con = sqlite3.connect(gold_db_file)
|
||||
gold_cur = gold_con.cursor()
|
||||
gold_cur.execute("SELECT COUNT(*) FROM blackboard_artifact_types")
|
||||
@ -361,7 +375,9 @@ class Database:
|
||||
|
||||
def generate_gold_attributes(self):
|
||||
if self.gold_attributes == 0:
|
||||
gold_db_file = make_path(case.gold, case.image_name, "autopsy.db")
|
||||
gold_db_file = Emailer.make_path(case.gold, case.image_name, "autopsy.db")
|
||||
if(not file_exists(gold_db_file)):
|
||||
gold_db_file = Emailer.make_path(case.gold_parse, case.image_name, "autopsy.db")
|
||||
gold_con = sqlite3.connect(gold_db_file)
|
||||
gold_cur = gold_con.cursor()
|
||||
gold_cur.execute("SELECT COUNT(*) FROM blackboard_attributes")
|
||||
@ -369,7 +385,9 @@ class Database:
|
||||
|
||||
def generate_gold_objects(self):
|
||||
if self.gold_objects == 0:
|
||||
gold_db_file = make_path(case.gold, case.image_name, "autopsy.db")
|
||||
gold_db_file = Emailer.make_path(case.gold, case.image_name, "autopsy.db")
|
||||
if(not file_exists(gold_db_file)):
|
||||
gold_db_file = Emailer.make_path(case.gold_parse, case.image_name, "autopsy.db")
|
||||
gold_con = sqlite3.connect(gold_db_file)
|
||||
gold_cur = gold_con.cursor()
|
||||
gold_cur.execute("SELECT COUNT(*) FROM tsk_objects")
|
||||
@ -387,23 +405,32 @@ class Database:
|
||||
def run_config_test(config_file):
|
||||
try:
|
||||
global parsed
|
||||
global errorem
|
||||
global attachl
|
||||
count = 0
|
||||
parsed = parse(config_file)
|
||||
case
|
||||
counts = {}
|
||||
if parsed.getElementsByTagName("indir"):
|
||||
case.input_dir = parsed.getElementsByTagName("indir")[0].getAttribute("value").encode().decode("utf_8")
|
||||
if parsed.getElementsByTagName("global_csv"):
|
||||
case.global_csv = parsed.getElementsByTagName("global_csv")[0].getAttribute("value").encode().decode("utf_8")
|
||||
case.global_csv = make_local_path(case.global_csv)
|
||||
|
||||
case.global_csv = Emailer.make_local_path(case.global_csv)
|
||||
if parsed.getElementsByTagName("golddir"):
|
||||
case.gold_parse = parsed.getElementsByTagName("golddir")[0].getAttribute("value").encode().decode("utf_8")
|
||||
case.gold_parse = Emailer.make_path(case.gold_parse, "tmp")
|
||||
else:
|
||||
case.gold_parse = case.gold
|
||||
# Generate the top navbar of the HTML for easy access to all images
|
||||
values = []
|
||||
for element in parsed.getElementsByTagName("image"):
|
||||
value = element.getAttribute("value").encode().decode("utf_8")
|
||||
if file_exists(value):
|
||||
values.append(value)
|
||||
else:
|
||||
print("File: ", value, " doesn't exist")
|
||||
count = len(values)
|
||||
archives = make_path(case.gold, "..")
|
||||
archives = Emailer.make_path(case.gold, "..")
|
||||
arcount = 0
|
||||
for file in os.listdir(archives):
|
||||
if not(file == 'tmp'):
|
||||
@ -425,7 +452,8 @@ def run_config_test(config_file):
|
||||
#Begin infiniloop
|
||||
if(newDay()):
|
||||
global daycount
|
||||
compile()
|
||||
setDay()
|
||||
srcupdater.compile(errorem, attachl, parsed)
|
||||
if(daycount > 0):
|
||||
print("starting process")
|
||||
outputer = open("ScriptLog.txt", "a")
|
||||
@ -449,50 +477,12 @@ def run_config_test(config_file):
|
||||
printerror(str(e) + "\n")
|
||||
logging.critical(traceback.format_exc())
|
||||
|
||||
def compile():
|
||||
global redo
|
||||
global tryredo
|
||||
global daycount
|
||||
global nxtproc
|
||||
global failedbool
|
||||
global errorem
|
||||
global attachl
|
||||
global passed
|
||||
passed = True
|
||||
tryredo = False
|
||||
setDay()
|
||||
redo = True
|
||||
while(redo):
|
||||
passed = True
|
||||
if(passed):
|
||||
gitPull("sleuthkit")
|
||||
if(passed):
|
||||
vsBuild()
|
||||
if(passed):
|
||||
gitPull("autopsy")
|
||||
if(passed):
|
||||
antBuild("datamodel", False)
|
||||
if(passed):
|
||||
antBuild("autopsy", True)
|
||||
if(passed):
|
||||
redo = False
|
||||
else:
|
||||
print("Compile Failed")
|
||||
time.sleep(3600)
|
||||
attachl = []
|
||||
errorem = "The test standard didn't match the gold standard.\n"
|
||||
failedbool = False
|
||||
if(tryredo):
|
||||
errorem += "Rebuilt properly.\n"
|
||||
send_email()
|
||||
attachl = []
|
||||
errorem = "The test standard didn't match the gold standard.\n"
|
||||
passed = True
|
||||
|
||||
# Runs the test on the single given file.
|
||||
# The path must be guarenteed to be a correct path.
|
||||
def run_test(image_file, count):
|
||||
global parsed
|
||||
global imgfail
|
||||
imgfail = False
|
||||
if image_type(image_file) == IMGTYPE.UNKNOWN:
|
||||
printerror("Error: Image type is unrecognized:")
|
||||
printerror(image_file + "\n")
|
||||
@ -502,24 +492,24 @@ def run_test(image_file, count):
|
||||
case.image_file = image_file
|
||||
case.image_name = case.get_image_name(image_file) + "(" + str(count) + ")"
|
||||
case.image = case.get_image_name(image_file)
|
||||
case.common_log_path = make_local_path(case.output_dir, case.image_name, case.image_name+case.common_log)
|
||||
case.warning_log = make_local_path(case.output_dir, case.image_name, "AutopsyLogs.txt")
|
||||
case.antlog_dir = make_local_path(case.output_dir, case.image_name, "antlog.txt")
|
||||
case.common_log_path = Emailer.make_local_path(case.output_dir, case.image_name, case.image_name+case.common_log)
|
||||
case.warning_log = Emailer.make_local_path(case.output_dir, case.image_name, "AutopsyLogs.txt")
|
||||
case.antlog_dir = Emailer.make_local_path(case.output_dir, case.image_name, "antlog.txt")
|
||||
if(args.list):
|
||||
element = parsed.getElementsByTagName("build")
|
||||
if(len(element)<=0):
|
||||
toval = make_path("..", "build.xml")
|
||||
toval = Emailer.make_path("..", "build.xml")
|
||||
else:
|
||||
element = element[0]
|
||||
toval = element.getAttribute("value").encode().decode("utf_8")
|
||||
if(toval==None):
|
||||
toval = make_path("..", "build.xml")
|
||||
toval = Emailer.make_path("..", "build.xml")
|
||||
else:
|
||||
toval = make_path("..", "build.xml")
|
||||
toval = Emailer.make_path("..", "build.xml")
|
||||
case.build_path = toval
|
||||
case.known_bad_path = make_path(case.input_dir, "notablehashes.txt-md5.idx")
|
||||
case.keyword_path = make_path(case.input_dir, "notablekeywords.xml")
|
||||
case.nsrl_path = make_path(case.input_dir, "nsrl.txt-md5.idx")
|
||||
case.known_bad_path = Emailer.make_path(case.input_dir, "notablehashes.txt-md5.idx")
|
||||
case.keyword_path = Emailer.make_path(case.input_dir, "notablekeywords.xml")
|
||||
case.nsrl_path = Emailer.make_path(case.input_dir, "nsrl.txt-md5.idx")
|
||||
|
||||
logging.debug("--------------------")
|
||||
logging.debug(case.image_name)
|
||||
@ -538,7 +528,7 @@ def run_test(image_file, count):
|
||||
logging.critical(traceback.format_exc())
|
||||
# If NOT keeping Solr index (-k)
|
||||
if not args.keep:
|
||||
solr_index = make_local_path(case.output_dir, case.image_name, "AutopsyTestCase", "KeywordSearch")
|
||||
solr_index = Emailer.make_local_path(case.output_dir, case.image_name, "AutopsyTestCase", "KeywordSearch")
|
||||
if clear_dir(solr_index):
|
||||
print_report([], "DELETE SOLR INDEX", "Solr index deleted.")
|
||||
elif args.keep:
|
||||
@ -558,8 +548,12 @@ def run_test(image_file, count):
|
||||
if not args.gold_creation:
|
||||
try:
|
||||
gold_path = case.gold
|
||||
img_gold = make_path(case.gold, case.image_name)
|
||||
img_archive = make_local_path("..", "output", "gold", case.image_name+"-archive.zip")
|
||||
img_gold = Emailer.make_path(case.gold, case.image_name)
|
||||
img_archive = Emailer.make_local_path("..", "output", "gold", case.image_name+"-archive.zip")
|
||||
if(not file_exists(img_archive)):
|
||||
img_archive = Emailer.make_path(case.gold_parse, "..", case.image_name+"-archive.zip")
|
||||
gold_path = case.gold_parse
|
||||
img_gold = Emailer.make_path(gold_path, case.image_name)
|
||||
extrctr = zipfile.ZipFile(img_archive, 'r', compression=zipfile.ZIP_DEFLATED)
|
||||
extrctr.extractall(gold_path)
|
||||
extrctr.close
|
||||
@ -580,7 +574,7 @@ def run_test(image_file, count):
|
||||
if args.rebuild or args.gold_creation:
|
||||
rebuild()
|
||||
# Reset the case and return the tests sucessfully finished
|
||||
clear_dir(make_path(case.output_dir, case.image_name, "AutopsyTestCase", "ModuleOutput", "keywordsearch"))
|
||||
clear_dir(Emailer.make_path(case.output_dir, case.image_name, "AutopsyTestCase", "ModuleOutput", "keywordsearch"))
|
||||
case.reset()
|
||||
return True
|
||||
|
||||
@ -604,8 +598,8 @@ def run_ant():
|
||||
case.ant.append("-Dknown_bad_path=" + case.known_bad_path)
|
||||
case.ant.append("-Dkeyword_path=" + case.keyword_path)
|
||||
case.ant.append("-Dnsrl_path=" + case.nsrl_path)
|
||||
case.ant.append("-Dgold_path=" + make_path(case.gold))
|
||||
case.ant.append("-Dout_path=" + make_local_path(case.output_dir, case.image_name))
|
||||
case.ant.append("-Dgold_path=" + Emailer.make_path(case.gold))
|
||||
case.ant.append("-Dout_path=" + Emailer.make_local_path(case.output_dir, case.image_name))
|
||||
case.ant.append("-Dignore_unalloc=" + "%s" % args.unallocated)
|
||||
case.ant.append("-Dcontin_mode=" + str(args.contin))
|
||||
case.ant.append("-Dtest.timeout=" + str(case.timeout))
|
||||
@ -613,7 +607,7 @@ def run_ant():
|
||||
printout("Ingesting Image:\n" + case.image_file + "\n")
|
||||
printout("CMD: " + " ".join(case.ant))
|
||||
printout("Starting test...\n")
|
||||
antoutpth = make_local_path(case.output_dir, "antRunOutput.txt")
|
||||
antoutpth = Emailer.make_local_path(case.output_dir, "antRunOutput.txt")
|
||||
antout = open(antoutpth, "a")
|
||||
if SYS is OS.CYGWIN:
|
||||
subprocess.call(case.ant, stdout=antout)
|
||||
@ -651,33 +645,33 @@ def image_type(image_file):
|
||||
def rebuild():
|
||||
# Errors to print
|
||||
errors = []
|
||||
if(case.gold_parse == None):
|
||||
case.gold_parse = case.gold
|
||||
# Delete the current gold standards
|
||||
gold_dir = make_path(case.gold, case.image_name)
|
||||
print("here")
|
||||
gold_dir = Emailer.make_path(case.gold_parse)
|
||||
clear_dir(gold_dir)
|
||||
print("here1")
|
||||
dbinpth = make_path(case.output_dir, case.image_name, "AutopsyTestCase", "autopsy.db")
|
||||
dboutpth = make_path(case.gold, case.image_name, "autopsy.db")
|
||||
if not os.path.exists(case.gold):
|
||||
os.makedirs(case.gold)
|
||||
dbinpth = Emailer.make_path(case.output_dir, case.image_name, "AutopsyTestCase", "autopsy.db")
|
||||
dboutpth = Emailer.make_path(gold_dir, "autopsy.db")
|
||||
if not os.path.exists(case.gold_parse):
|
||||
os.makedirs(case.gold_parse)
|
||||
if not os.path.exists(gold_dir):
|
||||
os.makedirs(gold_dir)
|
||||
copy_file(dbinpth, dboutpth)
|
||||
error_pth = make_path(case.gold, case.image_name, case.image_name+"SortedErrors.txt")
|
||||
error_pth = Emailer.make_path(gold_dir, case.image_name+"SortedErrors.txt")
|
||||
copy_file(case.sorted_log, error_pth)
|
||||
# Rebuild the HTML report
|
||||
htmlfolder = ""
|
||||
for fs in os.listdir(os.path.join(os.getcwd(),case.output_dir, case.image_name, "AutopsyTestCase", "Reports")):
|
||||
if os.path.isdir(os.path.join(os.getcwd(), case.output_dir, case.image_name, "AutopsyTestCase", "Reports", fs)):
|
||||
htmlfolder = fs
|
||||
autopsy_html_path = make_local_path(case.output_dir, case.image_name, "AutopsyTestCase", "Reports", htmlfolder)
|
||||
autopsy_html_path = Emailer.make_local_path(case.output_dir, case.image_name, "AutopsyTestCase", "Reports", htmlfolder)
|
||||
|
||||
html_path = make_path(case.output_dir, case.image_name,
|
||||
html_path = Emailer.make_path(case.output_dir, case.image_name,
|
||||
"AutopsyTestCase", "Reports")
|
||||
try:
|
||||
os.makedirs(os.path.join(case.gold, case.image_name, htmlfolder))
|
||||
os.makedirs(os.path.join(gold_dir, htmlfolder))
|
||||
for file in os.listdir(autopsy_html_path):
|
||||
html_to = make_path(case.gold, case.image_name, file.replace("HTML Report", "Report"))
|
||||
html_to = Emailer.make_path(gold_dir, file.replace("HTML Report", "Report"))
|
||||
copy_dir(get_file_in_dir(autopsy_html_path, file), html_to)
|
||||
except FileNotFoundException as e:
|
||||
errors.append(e.error)
|
||||
@ -685,11 +679,10 @@ def rebuild():
|
||||
errors.append("Error: Unknown fatal error when rebuilding the gold html report.")
|
||||
errors.append(str(e) + "\n")
|
||||
oldcwd = os.getcwd()
|
||||
zpdir = case.gold
|
||||
zpdir = case.gold_parse
|
||||
os.chdir(zpdir)
|
||||
img_gold = case.image_name
|
||||
print(img_gold)
|
||||
img_archive = make_path("..", case.image_name+"-archive.zip")
|
||||
img_archive = Emailer.make_path("..", case.image_name+"-archive.zip")
|
||||
comprssr = zipfile.ZipFile(img_archive, 'w',compression=zipfile.ZIP_DEFLATED)
|
||||
zipdir(img_gold, comprssr)
|
||||
comprssr.close()
|
||||
@ -709,8 +702,10 @@ def zipdir(path, zip):
|
||||
# from queries while comparing
|
||||
def compare_to_gold_db():
|
||||
# SQLITE needs unix style pathing
|
||||
gold_db_file = make_path(case.gold, case.image_name, "autopsy.db")
|
||||
autopsy_db_file = make_path(case.output_dir, case.image_name,
|
||||
gold_db_file = Emailer.make_path(case.gold, case.image_name, "autopsy.db")
|
||||
if(not file_exists(gold_db_file)):
|
||||
gold_db_file = Emailer.make_path(case.gold_parse, case.image_name, "autopsy.db")
|
||||
autopsy_db_file = Emailer.make_path(case.output_dir, case.image_name,
|
||||
"AutopsyTestCase", "autopsy.db")
|
||||
# Try to query the databases. Ignore any exceptions, the function will
|
||||
# return an error later on if these do fail
|
||||
@ -764,12 +759,14 @@ def compare_to_gold_db():
|
||||
# Using the global case's variables, compare the html report file made by
|
||||
# the regression test against the gold standard html report
|
||||
def compare_to_gold_html():
|
||||
gold_html_file = make_path(case.gold, case.image_name, "Report", "index.html")
|
||||
gold_html_file = Emailer.make_path(case.gold, case.image_name, "Report", "index.html")
|
||||
if(not file_exists(gold_html_file)):
|
||||
gold_html_file = Emailer.make_path(case.gold_parse, case.image_name, "Report", "index.html")
|
||||
htmlfolder = ""
|
||||
for fs in os.listdir(make_path(case.output_dir, case.image_name, "AutopsyTestCase", "Reports")):
|
||||
if os.path.isdir(make_path(case.output_dir, case.image_name, "AutopsyTestCase", "Reports", fs)):
|
||||
for fs in os.listdir(Emailer.make_path(case.output_dir, case.image_name, "AutopsyTestCase", "Reports")):
|
||||
if os.path.isdir(Emailer.make_path(case.output_dir, case.image_name, "AutopsyTestCase", "Reports", fs)):
|
||||
htmlfolder = fs
|
||||
autopsy_html_path = make_path(case.output_dir, case.image_name, "AutopsyTestCase", "Reports", htmlfolder, "HTML Report") #, "AutopsyTestCase", "Reports", htmlfolder)
|
||||
autopsy_html_path = Emailer.make_path(case.output_dir, case.image_name, "AutopsyTestCase", "Reports", htmlfolder, "HTML Report") #, "AutopsyTestCase", "Reports", htmlfolder)
|
||||
|
||||
|
||||
try:
|
||||
@ -784,14 +781,20 @@ def compare_to_gold_html():
|
||||
return
|
||||
#Find all gold .html files belonging to this case
|
||||
ListGoldHTML = []
|
||||
for fs in os.listdir(make_path(case.output_dir, case.image_name, "AutopsyTestCase", "Reports", htmlfolder)):
|
||||
for fs in os.listdir(Emailer.make_path(case.output_dir, case.image_name, "AutopsyTestCase", "Reports", htmlfolder)):
|
||||
if(fs.endswith(".html")):
|
||||
ListGoldHTML.append(os.path.join(case.output_dir, case.image_name, "AutopsyTestCase", "Reports", htmlfolder, fs))
|
||||
#Find all new .html files belonging to this case
|
||||
ListNewHTML = []
|
||||
for fs in os.listdir(make_path(case.gold, case.image_name)):
|
||||
if (fs.endswith(".html")):
|
||||
ListNewHTML.append(make_path(case.gold, case.image_name, fs))
|
||||
if(os.path.exists(Emailer.make_path(case.gold, case.image_name))):
|
||||
for fs in os.listdir(Emailer.make_path(case.gold, case.image_name)):
|
||||
if (fs.endswith(".html")):
|
||||
ListNewHTML.append(Emailer.make_path(case.gold, case.image_name, fs))
|
||||
if(not case.gold_parse == None or case.gold == case.gold_parse):
|
||||
if(file_exists(Emailer.make_path(case.gold_parse, case.image_name))):
|
||||
for fs in os.listdir(Emailer.make_path(case.gold_parse, case.image_name)):
|
||||
if (fs.endswith(".html")):
|
||||
ListNewHTML.append(Emailer.make_path(case.gold_parse, case.image_name, fs))
|
||||
#ensure both reports have the same number of files and are in the same order
|
||||
if(len(ListGoldHTML) != len(ListNewHTML)):
|
||||
printerror("The reports did not have the same number of files. One of the reports may have been corrupted")
|
||||
@ -825,18 +828,21 @@ def compare_to_gold_html():
|
||||
def compare_bb_artifacts():
|
||||
exceptions = []
|
||||
try:
|
||||
global failedbool
|
||||
global errorem
|
||||
if database.gold_artifacts != database.autopsy_artifacts:
|
||||
failedbool = True
|
||||
global imgfail
|
||||
imgfail = True
|
||||
errorem += "There was a difference in the number of artifacts for " + case.image + ".\n"
|
||||
for type_id in range(1, 13):
|
||||
if database.gold_artifacts != database.autopsy_artifacts:
|
||||
if database.gold_artifacts[type_id] != database.autopsy_artifacts[type_id]:
|
||||
error = str("Artifact counts do not match for type id %d. " % type_id)
|
||||
error += str("Gold: %d, Test: %d" %
|
||||
(database.gold_artifacts[type_id],
|
||||
database.autopsy_artifacts[type_id]))
|
||||
exceptions.append(error)
|
||||
global failedbool
|
||||
global errorem
|
||||
failedbool = True
|
||||
errorem += "There was a difference in the number of artifacts for " + case.image + ".\n"
|
||||
return exceptions
|
||||
return exceptions
|
||||
except Exception as e:
|
||||
exceptions.append("Error: Unable to compare blackboard_artifacts.\n")
|
||||
return exceptions
|
||||
@ -853,6 +859,8 @@ def compare_bb_attributes():
|
||||
global failedbool
|
||||
global errorem
|
||||
failedbool = True
|
||||
global imgfail
|
||||
imgfail = True
|
||||
errorem += "There was a difference in the number of attributes for " + case.image + ".\n"
|
||||
return exceptions
|
||||
except Exception as e:
|
||||
@ -871,6 +879,8 @@ def compare_tsk_objects():
|
||||
global failedbool
|
||||
global errorem
|
||||
failedbool = True
|
||||
global imgfail
|
||||
imgfail = True
|
||||
errorem += "There was a difference between the tsk object counts for " + case.image + " .\n"
|
||||
return exceptions
|
||||
except Exception as e:
|
||||
@ -887,32 +897,30 @@ def compare_tsk_objects():
|
||||
# from each log file generated by Autopsy
|
||||
def generate_common_log():
|
||||
try:
|
||||
logs_path = make_local_path(case.output_dir, case.image_name, "logs")
|
||||
logs_path = Emailer.make_local_path(case.output_dir, case.image_name, "logs")
|
||||
common_log = codecs.open(case.common_log_path, "w", "utf_8")
|
||||
warning_log = codecs.open(case.warning_log, "w", "utf_8")
|
||||
common_log.write("--------------------------------------------------\n")
|
||||
common_log.write(case.image_name + "\n")
|
||||
common_log.write("--------------------------------------------------\n")
|
||||
rep_path = make_local_path(case.output_dir)
|
||||
rep_path = Emailer.make_local_path(case.output_dir)
|
||||
rep_path = rep_path.replace("\\\\", "\\")
|
||||
for file in os.listdir(logs_path):
|
||||
log = codecs.open(make_path(logs_path, file), "r", "utf_8")
|
||||
log = codecs.open(Emailer.make_path(logs_path, file), "r", "utf_8")
|
||||
for line in log:
|
||||
line = line.replace(rep_path, "CASE")
|
||||
if line.startswith("Exception"):
|
||||
common_log.write("From " + file +":\n" + line + "\n")
|
||||
elif line.startswith("WARNING"):
|
||||
common_log.write("From " + file +":\n" + line + "\n")
|
||||
common_log.write(file +": " + line)
|
||||
elif line.startswith("Error"):
|
||||
common_log.write("From " + file +":\n" + line + "\n")
|
||||
common_log.write(file +": " + line)
|
||||
elif line.startswith("SEVERE"):
|
||||
common_log.write("From " + file +":\n" + line + "\n")
|
||||
common_log.write(file +":" + line)
|
||||
else:
|
||||
warning_log.write("From " + file +":\n" + line + "\n")
|
||||
warning_log.write(file +": " + line)
|
||||
log.close()
|
||||
common_log.write("\n\n")
|
||||
common_log.write("\n")
|
||||
common_log.close()
|
||||
case.sorted_log = make_local_path(case.output_dir, case.image_name, case.image_name + "SortedErrors.txt")
|
||||
case.sorted_log = Emailer.make_local_path(case.output_dir, case.image_name, case.image_name + "SortedErrors.txt")
|
||||
srtcmdlst = ["sort", case.common_log_path, "-o", case.sorted_log]
|
||||
subprocess.call(srtcmdlst)
|
||||
except Exception as e:
|
||||
@ -920,31 +928,36 @@ def generate_common_log():
|
||||
printerror(str(e) + "\n")
|
||||
logging.critical(traceback.format_exc())
|
||||
|
||||
def compare_errors():
|
||||
gold_dir = make_path(case.gold, case.image_name, case.image_name + "SortedErrors.txt")
|
||||
def compare_errors():
|
||||
gold_dir = Emailer.make_path(case.gold, case.image_name, case.image_name + "SortedErrors.txt")
|
||||
if(not file_exists(gold_dir)):
|
||||
gold_dir = Emailer.make_path(case.gold_parse, case.image_name, case.image_name + "SortedErrors.txt")
|
||||
common_log = codecs.open(case.sorted_log, "r", "utf_8")
|
||||
gold_log = codecs.open(gold_dir, "r", "utf_8")
|
||||
gold_dat = gold_log.read()
|
||||
common_dat = common_log.read()
|
||||
patrn = re.compile("\d")
|
||||
if (not((re.sub(patrn, 'd', gold_dat)) == (re.sub(patrn, 'd', common_dat)))):
|
||||
diff_dir = make_local_path(case.output_dir, case.image_name, "ErrorDiff.txt")
|
||||
diff_dir = Emailer.make_local_path(case.output_dir, case.image_name, case.image_name+"_AutopsyErrors-Diff.txt")
|
||||
diff_file = open(diff_dir, "w")
|
||||
dffcmdlst = ["diff", case.sorted_log, gold_dir]
|
||||
subprocess.call(dffcmdlst, stdout = diff_file)
|
||||
global attachl
|
||||
global errorem
|
||||
global failedbool
|
||||
attachl.append(case.sorted_log)
|
||||
attachl.append(diff_dir)
|
||||
errorem += "There was a difference in the exceptions Log.\n"
|
||||
errorem += "There was a difference in the exceptions Log for " + case.image_name + ".\n"
|
||||
print("Exceptions didn't match.\n")
|
||||
failedbool = True
|
||||
global imgfail
|
||||
imgfail = True
|
||||
|
||||
# Fill in the global case's variables that require the log files
|
||||
def fill_case_data():
|
||||
try:
|
||||
# Open autopsy.log.0
|
||||
log_path = make_local_path(case.output_dir, case.image_name, "logs", "autopsy.log.0")
|
||||
log_path = Emailer.make_local_path(case.output_dir, case.image_name, "logs", "autopsy.log.0")
|
||||
log = open(log_path)
|
||||
|
||||
# Set the case starting time based off the first line of autopsy.log.0
|
||||
@ -1047,10 +1060,10 @@ def generate_csv(csv_path):
|
||||
vars.append( str(database.autopsy_objects) )
|
||||
vars.append( str(database.get_artifacts_count()) )
|
||||
vars.append( str(database.autopsy_attributes) )
|
||||
vars.append( make_local_path("gold", case.image_name, "autopsy.db") )
|
||||
vars.append( Emailer.make_local_path("gold", case.image_name, "autopsy.db") )
|
||||
vars.append( database.get_artifact_comparison() )
|
||||
vars.append( database.get_attribute_comparison() )
|
||||
vars.append( make_local_path("gold", case.image_name, "standard.html") )
|
||||
vars.append( Emailer.make_local_path("gold", case.image_name, "standard.html") )
|
||||
vars.append( str(case.report_passed) )
|
||||
vars.append( case.ant_to_string() )
|
||||
|
||||
@ -1108,11 +1121,11 @@ def csv_header(csv_path):
|
||||
# Returns a list of all the exceptions listed in all the autopsy logs
|
||||
def get_exceptions():
|
||||
exceptions = []
|
||||
logs_path = make_local_path(case.output_dir, case.image_name, "logs")
|
||||
logs_path = Emailer.make_local_path(case.output_dir, case.image_name, "logs")
|
||||
results = []
|
||||
for file in os.listdir(logs_path):
|
||||
if "autopsy.log" in file:
|
||||
log = codecs.open(make_path(logs_path, file), "r", "utf_8")
|
||||
log = codecs.open(Emailer.make_path(logs_path, file), "r", "utf_8")
|
||||
ex = re.compile("\SException")
|
||||
er = re.compile("\SError")
|
||||
for line in log:
|
||||
@ -1143,10 +1156,10 @@ def report_all_errors():
|
||||
# Searched all the known logs for the given regex
|
||||
# The function expects regex = re.compile(...)
|
||||
def regex_search_logs(regex):
|
||||
logs_path = make_local_path(case.output_dir, case.image_name, "logs")
|
||||
logs_path = Emailer.make_local_path(case.output_dir, case.image_name, "logs")
|
||||
results = []
|
||||
for file in os.listdir(logs_path):
|
||||
log = codecs.open(make_path(logs_path, file), "r", "utf_8")
|
||||
log = codecs.open(Emailer.make_path(logs_path, file), "r", "utf_8")
|
||||
for line in log:
|
||||
if regex.search(line):
|
||||
results.append(line)
|
||||
@ -1157,10 +1170,10 @@ def regex_search_logs(regex):
|
||||
# Search through all the known log files for a specific string.
|
||||
# Returns a list of all lines with that string
|
||||
def search_logs(string):
|
||||
logs_path = make_local_path(case.output_dir, case.image_name, "logs")
|
||||
logs_path = Emailer.make_local_path(case.output_dir, case.image_name, "logs")
|
||||
results = []
|
||||
for file in os.listdir(logs_path):
|
||||
log = codecs.open(make_path(logs_path, file), "r", "utf_8")
|
||||
log = codecs.open(Emailer.make_path(logs_path, file), "r", "utf_8")
|
||||
for line in log:
|
||||
if string in line:
|
||||
results.append(line)
|
||||
@ -1180,7 +1193,7 @@ def search_common_log(string):
|
||||
# Searches the given log for the given string
|
||||
# Returns a list of all lines with that string
|
||||
def search_log(log, string):
|
||||
logs_path = make_local_path(case.output_dir, case.image_name, "logs", log)
|
||||
logs_path = Emailer.make_local_path(case.output_dir, case.image_name, "logs", log)
|
||||
try:
|
||||
results = []
|
||||
log = codecs.open(logs_path, "r", "utf_8")
|
||||
@ -1196,11 +1209,11 @@ def search_log(log, string):
|
||||
# Search through all the the logs of the given type
|
||||
# Types include autopsy, tika, and solr
|
||||
def search_log_set(type, string):
|
||||
logs_path = make_local_path(case.output_dir, case.image_name, "logs")
|
||||
logs_path = Emailer.make_local_path(case.output_dir, case.image_name, "logs")
|
||||
results = []
|
||||
for file in os.listdir(logs_path):
|
||||
if type in file:
|
||||
log = codecs.open(make_path(logs_path, file), "r", "utf_8")
|
||||
log = codecs.open(Emailer.make_path(logs_path, file), "r", "utf_8")
|
||||
for line in log:
|
||||
if string in line:
|
||||
results.append(line)
|
||||
@ -1240,6 +1253,7 @@ def printout(string):
|
||||
def generate_html():
|
||||
# If the file doesn't exist yet, this is the first case to run for
|
||||
# this test, so we need to make the start of the html log
|
||||
global imgfail
|
||||
if not file_exists(case.html_log):
|
||||
write_html_head()
|
||||
try:
|
||||
@ -1254,9 +1268,12 @@ def generate_html():
|
||||
<a href='#" + case.image_name + "-general'>General Output</a> |\
|
||||
<a href='#" + case.image_name + "-logs'>Logs</a>\
|
||||
</h2>"
|
||||
|
||||
# The script errors found
|
||||
errors = "<div id='errors'>\
|
||||
if imgfail:
|
||||
ids = 'errors1'
|
||||
else:
|
||||
ids = 'errors'
|
||||
errors = "<div id='" + ids + "'>\
|
||||
<h2><a name='" + case.image_name + "-errors'>Errors and Warnings</a></h2>\
|
||||
<hr color='#FF0000'>"
|
||||
# For each error we have logged in the case
|
||||
@ -1271,16 +1288,16 @@ def generate_html():
|
||||
# Links to the logs
|
||||
logs = "<div id='logs'>\
|
||||
<h2><a name='" + case.image_name + "-logs'>Logs</a></h2>\
|
||||
<hr color='#00a00f'>"
|
||||
logs_path = make_local_path(case.output_dir, case.image_name, "logs")
|
||||
<hr color='#282828'>"
|
||||
logs_path = Emailer.make_local_path(case.output_dir, case.image_name, "logs")
|
||||
for file in os.listdir(logs_path):
|
||||
logs += "<p><a href='file:\\" + make_path(logs_path, file) + "' target='_blank'>" + file + "</a></p>"
|
||||
logs += "<p><a href='file:\\" + Emailer.make_path(logs_path, file) + "' target='_blank'>" + file + "</a></p>"
|
||||
logs += "</div>"
|
||||
|
||||
# All the testing information
|
||||
info = "<div id='info'>\
|
||||
<h2><a name='" + case.image_name + "-info'>Information</a></h2>\
|
||||
<hr color='#0005FF'>\
|
||||
<hr color='#282828'>\
|
||||
<table cellspacing='5px'>"
|
||||
# The individual elements
|
||||
info += "<tr><td>Image Path:</td>"
|
||||
@ -1369,13 +1386,14 @@ def write_html_head():
|
||||
h1 span { font-size: 12px; font-weight: 100; }\
|
||||
h2 { font-family: Tahoma; padding: 0px; margin: 0px; }\
|
||||
hr { width: 100%; height: 1px; border: none; margin-top: 10px; margin-bottom: 10px; }\
|
||||
#errors { background: #FFCFCF; border: 1px solid #FF0000; color: #FF0000; padding: 10px; margin: 20px; }\
|
||||
#info { background: #D2D3FF; border: 1px solid #0005FF; color: #0005FF; padding: 10px; margin: 20px; }\
|
||||
#errors { background: #CCCCCC; border: 1px solid #282828; color: #282828; padding: 10px; margin: 20px; }\
|
||||
#errors1 { background: #CC0000; border: 1px solid #282828; color: #282828; padding: 10px; margin: 20px; }\
|
||||
#info { background: #CCCCCC; border: 1px solid #282828; color: #282828; padding: 10px; margin: 20px; }\
|
||||
#general { background: #CCCCCC; border: 1px solid #282828; color: #282828; padding: 10px; margin: 20px; }\
|
||||
#logs { background: #8cff97; border: 1px solid #00820c; color: #00820c; padding: 10px; margin: 20px; }\
|
||||
#logs { background: #CCCCCC; border: 1px solid #282828; color: #282828; padding: 10px; margin: 20px; }\
|
||||
#errors p, #info p, #general p, #logs p { pading: 0px; margin: 0px; margin-left: 5px; }\
|
||||
#info table td { color: #0005FF; font-size: 12px; min-width: 225px; }\
|
||||
#logs a { color: #00820c; }\
|
||||
#info table td { color: ##282828; font-size: 12px; min-width: 225px; }\
|
||||
#logs a { color: ##282828; }\
|
||||
</style>\
|
||||
<body>"
|
||||
html.write(head)
|
||||
@ -1398,7 +1416,7 @@ def html_add_images(full_image_names):
|
||||
links = []
|
||||
for full_name in full_image_names:
|
||||
name = case.get_image_name(full_name)
|
||||
links.append("<a href='#" + name + "'>" + name + "</a>")
|
||||
links.append("<a href='#" + name + "(0)'>" + name + "</a>")
|
||||
html.write("<p align='center'>" + (" | ".join(links)) + "</p>")
|
||||
|
||||
|
||||
@ -1420,100 +1438,6 @@ def getDay():
|
||||
def newDay():
|
||||
return getLastDay() != getDay()
|
||||
|
||||
#Pulls from git
|
||||
def gitPull(TskOrAutopsy):
|
||||
global SYS
|
||||
global errorem
|
||||
global attachl
|
||||
ccwd = ""
|
||||
gppth = make_local_path(case.output_dir, "GitPullOutput" + TskOrAutopsy + ".txt")
|
||||
attachl.append(gppth)
|
||||
gpout = open(gppth, 'a')
|
||||
toPull = "http://www.github.com/sleuthkit/" + TskOrAutopsy
|
||||
call = ["git", "pull", toPull]
|
||||
if TskOrAutopsy == "sleuthkit":
|
||||
ccwd = os.path.join("..", "..", "..", "sleuthkit")
|
||||
else:
|
||||
ccwd = os.path.join("..", "..")
|
||||
subprocess.call(call, stdout=gpout, cwd=ccwd)
|
||||
gpout.close()
|
||||
|
||||
|
||||
#Builds TSK as a win32 applicatiion
|
||||
def vsBuild():
|
||||
global redo
|
||||
global tryredo
|
||||
global passed
|
||||
#Please ensure that the current working directory is $autopsy/testing/script
|
||||
vs = []
|
||||
vs.append("/cygdrive/c/windows/microsoft.NET/framework/v4.0.30319/MSBuild.exe")
|
||||
vs.append(os.path.join("..", "..", "..","sleuthkit", "win32", "Tsk-win.sln"))
|
||||
vs.append("/p:configuration=release")
|
||||
vs.append("/p:platform=win32")
|
||||
vs.append("/t:clean")
|
||||
vs.append("/t:rebuild")
|
||||
print(vs)
|
||||
VSpth = make_local_path(case.output_dir, "VSOutput.txt")
|
||||
VSout = open(VSpth, 'a')
|
||||
subprocess.call(vs, stdout=VSout)
|
||||
VSout.close()
|
||||
chk = os.path.join("..", "..", "..","sleuthkit", "win32", "Release", "libtsk_jni.dll")
|
||||
try:
|
||||
open(chk)
|
||||
except IOError as e:
|
||||
global errorem
|
||||
global attachl
|
||||
if(not tryredo):
|
||||
errorem += "LIBTSK C++ failed to build.\n"
|
||||
attachl.append(VSpth)
|
||||
send_email()
|
||||
tryredo = True
|
||||
passed = False
|
||||
redo = True
|
||||
|
||||
|
||||
|
||||
#Builds Autopsy or the Datamodel
|
||||
def antBuild(which, Build):
|
||||
global redo
|
||||
global passed
|
||||
global tryredo
|
||||
directory = os.path.join("..", "..")
|
||||
ant = []
|
||||
if which == "datamodel":
|
||||
directory = os.path.join("..", "..", "..", "sleuthkit", "bindings", "java")
|
||||
ant.append("ant")
|
||||
ant.append("-f")
|
||||
ant.append(directory)
|
||||
ant.append("clean")
|
||||
if(Build):
|
||||
ant.append("build")
|
||||
else:
|
||||
ant.append("dist")
|
||||
antpth = make_local_path(case.output_dir, "ant" + which + "Output.txt")
|
||||
antout = open(antpth, 'a')
|
||||
succd = subprocess.call(ant, stdout=antout)
|
||||
antout.close()
|
||||
global errorem
|
||||
global attachl
|
||||
if which == "datamodel":
|
||||
chk = os.path.join("..", "..", "..","sleuthkit", "bindings", "java", "dist", "TSK_DataModel.jar")
|
||||
try:
|
||||
open(chk)
|
||||
except IOError as e:
|
||||
if(not tryredo):
|
||||
errorem += "DataModel Java build failed.\n"
|
||||
attachl.append(antpth)
|
||||
send_email()
|
||||
passed = False
|
||||
tryredo = True
|
||||
elif (succd != 0 and (not tryredo)):
|
||||
errorem += "Autopsy build failed.\n"
|
||||
attachl.append(antpth)
|
||||
send_email()
|
||||
tryredo = True
|
||||
elif (succd != 0):
|
||||
passed = False
|
||||
|
||||
|
||||
#Watches clock and waits for current ingest to be done
|
||||
@ -1533,39 +1457,11 @@ def dir_exists(dir):
|
||||
except:
|
||||
return False
|
||||
|
||||
# Returns a Windows style path starting with the cwd and
|
||||
# ending with the list of directories given
|
||||
def make_local_path(*dirs):
|
||||
path = wgetcwd()
|
||||
for dir in dirs:
|
||||
path += ("\\" + dir)
|
||||
return path_fix(path)
|
||||
|
||||
# Returns a Windows style path based only off the given directories
|
||||
def make_path(*dirs):
|
||||
path = dirs[0]
|
||||
for dir in dirs[1:]:
|
||||
path += ("\\" + dir)
|
||||
return path_fix(path)
|
||||
|
||||
# Fix a standard os.path by making it Windows format
|
||||
def path_fix(path):
|
||||
return path.replace("/", "\\")
|
||||
|
||||
# Gets the true current working directory instead of Cygwin's
|
||||
def wgetcwd():
|
||||
if SYS is OS.CYGWIN:
|
||||
proc = subprocess.Popen(("cygpath", "-m", os.getcwd()), stdout=subprocess.PIPE)
|
||||
out,err = proc.communicate()
|
||||
return out.rstrip()
|
||||
elif SYS is OS.WIN:
|
||||
return os.getcwd()
|
||||
|
||||
# Copy the log files from Autopsy's default directory
|
||||
def copy_logs():
|
||||
try:
|
||||
log_dir = os.path.join("..", "..", "Testing","build","test","qa-functional","work","userdir0","var","log")
|
||||
shutil.copytree(log_dir, make_local_path(case.output_dir, case.image_name, "logs"))
|
||||
shutil.copytree(log_dir, Emailer.make_local_path(case.output_dir, case.image_name, "logs"))
|
||||
except Exception as e:
|
||||
printerror("Error: Failed to copy the logs.")
|
||||
printerror(str(e) + "\n")
|
||||
@ -1595,7 +1491,6 @@ def del_dir(dir):
|
||||
def copy_file(ffrom, to):
|
||||
try :
|
||||
if not file_exists(ffrom):
|
||||
print("hi")
|
||||
raise FileNotFoundException(ffrom)
|
||||
shutil.copy(ffrom, to)
|
||||
except:
|
||||
@ -1614,7 +1509,7 @@ def get_file_in_dir(dir, ext):
|
||||
try:
|
||||
for file in os.listdir(dir):
|
||||
if file.endswith(ext):
|
||||
return make_path(dir, file)
|
||||
return Emailer.make_path(dir, file)
|
||||
# If nothing has been found, raise an exception
|
||||
raise FileNotFoundException(dir)
|
||||
except:
|
||||
@ -1625,7 +1520,7 @@ def find_file_in_dir(dir, name, ext):
|
||||
for file in os.listdir(dir):
|
||||
if file.startswith(name):
|
||||
if file.endswith(ext):
|
||||
return make_path(dir, file)
|
||||
return Emailer.make_path(dir, file)
|
||||
raise FileNotFoundException(dir)
|
||||
except:
|
||||
raise DirNotFoundException(dir)
|
||||
@ -1698,6 +1593,7 @@ Options:
|
||||
-e ex Prints out all errors containing ex.
|
||||
-l cfg Runs from configuration file cfg.
|
||||
-c Runs in a loop over the configuration file until canceled. Must be used in conjunction with -l
|
||||
-fr Will not try download gold standard images
|
||||
"""
|
||||
|
||||
|
||||
@ -1742,14 +1638,18 @@ class DirNotFoundException(Exception):
|
||||
|
||||
#Executes the tests, makes continuous testing easier
|
||||
def execute_test():
|
||||
global parsed
|
||||
global errorem
|
||||
global failedbool
|
||||
global html
|
||||
global attachl
|
||||
case.output_dir = make_path("..", "output", "results", time.strftime("%Y.%m.%d-%H.%M.%S"))
|
||||
if(not dir_exists(Emailer.make_path("..", "output", "results"))):
|
||||
os.makedirs(Emailer.make_path("..", "output", "results",))
|
||||
case.output_dir = Emailer.make_path("..", "output", "results", time.strftime("%Y.%m.%d-%H.%M.%S"))
|
||||
os.makedirs(case.output_dir)
|
||||
case.common_log = "AutopsyErrors.txt"
|
||||
case.csv = make_local_path(case.output_dir, "CSV.txt")
|
||||
case.html_log = make_local_path(case.output_dir, "AutopsyTestCase.html")
|
||||
case.csv = Emailer.make_local_path(case.output_dir, "CSV.txt")
|
||||
case.html_log = Emailer.make_local_path(case.output_dir, "AutopsyTestCase.html")
|
||||
log_name = case.output_dir + "\\regression.log"
|
||||
logging.basicConfig(filename=log_name, level=logging.DEBUG)
|
||||
# If user wants to do a single file and a list (contradictory?)
|
||||
@ -1772,7 +1672,7 @@ def execute_test():
|
||||
run_test(args.single_file, 0)
|
||||
# If user has not selected a single file, and does not want to ignore
|
||||
# the input directory, continue on to parsing ../input
|
||||
if (not args.single) and (not args.ignore):
|
||||
if (not args.single) and (not args.ignore) and (not args.list):
|
||||
args.config_file = "config.xml"
|
||||
if not file_exists(args.config_file):
|
||||
printerror("Error: Configuration file does not exist at:")
|
||||
@ -1784,66 +1684,31 @@ def execute_test():
|
||||
logres = search_common_log("TskCoreException")
|
||||
if (len(logres)>0):
|
||||
failedbool = True
|
||||
global imgfail
|
||||
imgfail = True
|
||||
global errorem
|
||||
errorem += "There were Autopsy errors.\n"
|
||||
errorem += "Autopsy Nightly test failed.\n"
|
||||
passFail = False
|
||||
for lm in logres:
|
||||
errorem += lm
|
||||
html.close()
|
||||
if failedbool:
|
||||
passFail = False
|
||||
attachl.append(case.common_log_path)
|
||||
attachl.insert(0, html.name)
|
||||
else:
|
||||
errorem = ""
|
||||
errorem += "There were no Errors.\n"
|
||||
errorem += "Autopsy Nightly test passed.\n"
|
||||
passFail = True
|
||||
attachl = []
|
||||
if not args.gold_creation:
|
||||
send_email()
|
||||
|
||||
|
||||
def send_email():
|
||||
global parsed
|
||||
global errorem
|
||||
global attachl
|
||||
global html
|
||||
if(not args.list):
|
||||
sys.exit()
|
||||
element = parsed.getElementsByTagName("email")
|
||||
if(len(element)<=0):
|
||||
return
|
||||
element = element[0]
|
||||
toval = element.getAttribute("value").encode().decode("utf_8")
|
||||
if(toval==None):
|
||||
return
|
||||
element = parsed.getElementsByTagName("mail_server")[0]
|
||||
serverval = element.getAttribute("value").encode().decode("utf_8")
|
||||
# Create the container (outer) email message.
|
||||
msg = MIMEMultipart()
|
||||
msg['Subject'] = 'Email Test'
|
||||
# me == the sender's email address
|
||||
# family = the list of all recipients' email addresses
|
||||
msg['From'] = 'AutopsyContinuousTest'
|
||||
msg['To'] = toval
|
||||
msg.preamble = 'This is a test'
|
||||
container = MIMEText(errorem, 'plain')
|
||||
msg.attach(container)
|
||||
Build_email(msg)
|
||||
s = smtplib.SMTP(serverval)
|
||||
s.sendmail(msg['From'], msg['To'], msg.as_string())
|
||||
s.quit()
|
||||
|
||||
def Build_email(msg):
|
||||
global attachl
|
||||
for file in attachl:
|
||||
part = MIMEBase('application', "octet-stream")
|
||||
atach = open(file, "rb")
|
||||
attch = atach.read()
|
||||
noml = file.split("\\")
|
||||
nom = noml[len(noml)-1]
|
||||
part.set_payload(attch)
|
||||
Encoders.encode_base64(part)
|
||||
part.add_header('Content-Disposition', 'attachment; filename="' + nom + '"')
|
||||
msg.attach(part)
|
||||
Emailer.send_email(parsed, errorem, attachl, passFail)
|
||||
|
||||
def secs_till_tommorow():
|
||||
seconds = (23*3600)-(int(strftime("%H", localtime()))*3600)
|
||||
seconds += (59*60)-(int(strftime("%M", localtime()))*60)
|
||||
seconds += 60-(int(strftime("%S", localtime())))
|
||||
return seconds+5
|
||||
#----------------------#
|
||||
# Main #
|
||||
#----------------------#
|
||||
@ -1860,17 +1725,6 @@ def main():
|
||||
global daycount
|
||||
global redo
|
||||
global passed
|
||||
inpvar = raw_input("Your input images may be out of date, do you want to update?(y/n): ")
|
||||
if(inpvar.lower() == 'y' or inpvar.lower() == 'yes'):
|
||||
antin = ["ant"]
|
||||
antin.append("-f")
|
||||
antin.append(os.path.join("..","..","build.xml"))
|
||||
antin.append("test-download-imgs")
|
||||
if SYS is OS.CYGWIN:
|
||||
subprocess.call(antin)
|
||||
elif SYS is OS.WIN:
|
||||
theproc = subprocess.Popen(antin, shell = True, stdout=subprocess.PIPE)
|
||||
theproc.communicate()
|
||||
daycount = 0
|
||||
failedbool = False
|
||||
redo = False
|
||||
@ -1884,19 +1738,29 @@ def main():
|
||||
# The arguments were given wrong:
|
||||
if not args.parse():
|
||||
case.reset()
|
||||
pass
|
||||
return
|
||||
if(not args.fr):
|
||||
antin = ["ant"]
|
||||
antin.append("-f")
|
||||
antin.append(os.path.join("..","..","build.xml"))
|
||||
antin.append("test-download-imgs")
|
||||
if SYS is OS.CYGWIN:
|
||||
subprocess.call(antin)
|
||||
elif SYS is OS.WIN:
|
||||
theproc = subprocess.Popen(antin, shell = True, stdout=subprocess.PIPE)
|
||||
theproc.communicate()
|
||||
# Otherwise test away!
|
||||
else:
|
||||
execute_test()
|
||||
if(args.daily and args.contin):
|
||||
time.sleep(secs_till_tommorow())
|
||||
while args.contin:
|
||||
redo = False
|
||||
attachl = []
|
||||
errorem = "The test standard didn't match the gold standard.\n"
|
||||
failedbool = False
|
||||
passed = False
|
||||
execute_test()
|
||||
while args.contin:
|
||||
redo = False
|
||||
attachl = []
|
||||
errorem = "The test standard didn't match the gold standard.\n"
|
||||
failedbool = False
|
||||
passed = False
|
||||
execute_test()
|
||||
case = TestAutopsy()
|
||||
|
||||
case = TestAutopsy()
|
||||
|
||||
class OS:
|
||||
LINUX, MAC, WIN, CYGWIN = range(4)
|
||||
|
@ -302,6 +302,7 @@ public class RegressionTest extends TestCase{
|
||||
JDialog previewDialog = JDialogOperator.waitJDialog("Progress", false, false);
|
||||
screenshot("Progress");
|
||||
JDialogOperator previewDialogOperator = new JDialogOperator(previewDialog);
|
||||
JLabel waiter = JLabelOperator.waitJLabel(previewDialog, "Complete", false, false);
|
||||
JButtonOperator jbo2 = new JButtonOperator(previewDialogOperator, "Close");
|
||||
jbo2.pushNoBlock();
|
||||
new Timeout("pausing", 3000).sleep(); // Give the program a second to idle to be safe
|
||||
|
@ -38,16 +38,16 @@ If you implement GeneralReportModule, the overriden methods will be:
|
||||
- org.sleuthkit.autopsy.report.GeneralReportModule::generateReport(String reportPath, ReportProgressPanel progressPanel)
|
||||
- org.sleuthkit.autopsy.report.GeneralReportModule::getConfigurationPanel()
|
||||
|
||||
For general report modules, Autopsy will simply call the generateReport(String reportPath, ReportProgressPanel progressPanel) method and leave it up to the module to aquire and report data in its desired format. The only requirements are that the module saves to the given report path and updates the org.sleuthkit.autopsy.report.ReportProgressPanel as the report progresses.
|
||||
For general report modules, Autopsy will simply call the generateReport(String reportPath, ReportProgressPanel progressPanel) method and leave it up to the module to aquire and report data in its desired format. The only requirements are that the module saves to the given report path and updates the org.sleuthkit.autopsy.report.ReportProgressPanel as the report progresses.
|
||||
|
||||
When updating the progress panel, it is recommened to update it as infrequently as possible, while still keeping the user informed. If your report processes 100,000 files and you chose to update the UI each time a file is reviewed, the UI would freeze when trying to process all your requests. This would cause problems to not only your reporting module, but to other modules running in parellel. A safer approach would be to update the UI every 1,000 files, or when a certain "category" of the files being processed has changed. For example, the HTML report module increments the progress bar and changes the processing label every time a new Blackboard Artifact Type is being processed.
|
||||
When updating the progress panel, it is recommended to update it as infrequently as possible, while still keeping the user informed. If your report processes 100,000 files and you chose to update the UI each time a file is reviewed, the UI would freeze when trying to process all your requests. This would cause problems to not only your reporting module, but to other modules running in parallel. A safer approach would be to update the UI every 1,000 files, or when a certain "category" of the files being processed has changed. For example, the HTML report module increments the progress bar and changes the processing label every time a new Blackboard Artifact Type is being processed.
|
||||
|
||||
Autopsy will also display the panel returned by getConfigurationPanel() in the generate report wizard, when that particular report module is selected. If null is returned, a blank panel will be displayed instead. This panel can be used to allow the user custom controls over the report.
|
||||
Autopsy will also display the panel returned by getConfigurationPanel() in the generate report wizard. This panel can be used to allow the user custom controls over the report.
|
||||
|
||||
Typically a general report module should interact with both the Blackboard API in the org.sleuthkit.datamodel.SleuthkitCase class, in addition to an API (possibly external/thirdparty) to convert Blackboard Artifacts to the desired reporting format.
|
||||
|
||||
\subsection report_create_module_layer Registering the Report in layer.xml
|
||||
Lastly, it is important to register each report module, regardless of the type, to a layer.xml file. This file serves as a globally excessible instance of the report module, and allows all report modules to be recognized abstractly without knowing each class. Without this file, Autopsy will be unable to see your report module.
|
||||
Lastly, it is important to register each report module, regardless of the type, to a layer.xml file. This file allows Autopsy to find the report module.
|
||||
|
||||
An example entry into layer.xml is shown below:
|
||||
\code
|
||||
|
6
test/input/NSRL.txt-md5.idx
Normal file
6
test/input/NSRL.txt-md5.idx
Normal file
@ -0,0 +1,6 @@
|
||||
00000000000000000000000000000000000000000|md5sum
|
||||
00000000000000000000000000000000000000001|NSRLcreator.txt
|
||||
0D4A1C9ED5A49CAF22FD5F52C666DE2C|0000000000000045
|
||||
35BA15EC1C3CF03531282147DB10A089|0000000000000100
|
||||
91C66396EEC4BCEEAF6EDE7A48F60C63|0000000000000152
|
||||
A99F69068F958CF412E7F8B8A0142B41|0000000000000000
|
6
test/input/notablehashes.txt-md5.idx
Normal file
6
test/input/notablehashes.txt-md5.idx
Normal file
@ -0,0 +1,6 @@
|
||||
00000000000000000000000000000000000000000|md5sum
|
||||
00000000000000000000000000000000000000001|notablehashescreator.txt
|
||||
48199F51973F317459E80E18DC744B12|0000000000000000
|
||||
5CCB10AEA1EC335139715D4AA44D0EE0|0000000000000062
|
||||
94610B03A4295300AA29C3364DB18683|0000000000000108
|
||||
A06B65C36E0E5A8229749375C3AAC4B1|0000000000000160
|
15
test/input/notablekeywords.xml
Normal file
15
test/input/notablekeywords.xml
Normal file
@ -0,0 +1,15 @@
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||
<keyword_lists>
|
||||
<keyword_list created="2012-03-23 11:06:17" modified="2012-03-23 11:08:48" name="notable_keywords" use_for_ingest="true">
|
||||
<keyword literal="true">Jean</keyword>
|
||||
<keyword literal="true">Personalized</keyword>
|
||||
<keyword literal="true">DIRT</keyword>
|
||||
<keyword literal="true">Osama</keyword>
|
||||
<keyword literal="true">bomb</keyword>
|
||||
<keyword literal="true">hacking</keyword>
|
||||
<keyword literal="true">molotov</keyword>
|
||||
<keyword literal="true">nuclear</keyword>
|
||||
<keyword literal="true">صحافة و إعلام</keyword>
|
||||
<keyword literal="true">مطلوبا</keyword>
|
||||
</keyword_list>
|
||||
</keyword_lists>
|
84
test/script/Emailer.py
Normal file
84
test/script/Emailer.py
Normal file
@ -0,0 +1,84 @@
|
||||
import smtplib
|
||||
from email.mime.image import MIMEImage
|
||||
from email.mime.multipart import MIMEMultipart
|
||||
from email.mime.text import MIMEText
|
||||
from email.MIMEBase import MIMEBase
|
||||
from email import Encoders
|
||||
import urllib2
|
||||
import xml
|
||||
from time import localtime, strftime
|
||||
from xml.dom.minidom import parse, parseString
|
||||
import subprocess
|
||||
import sys
|
||||
import os
|
||||
|
||||
def send_email(parsed, errorem, attachl, passFail):
|
||||
element = parsed.getElementsByTagName("email")
|
||||
if(len(element)<=0):
|
||||
return
|
||||
element = element[0]
|
||||
toval = element.getAttribute("value").encode().decode("utf_8")
|
||||
if(toval==None):
|
||||
return
|
||||
element = parsed.getElementsByTagName("mail_server")[0]
|
||||
serverval = element.getAttribute("value").encode().decode("utf_8")
|
||||
# Create the container (outer) email message.
|
||||
msg = MIMEMultipart()
|
||||
if(passFail):
|
||||
msg['Subject'] = '[Test]Autopsy test passed.'
|
||||
else:
|
||||
msg['Subject'] = '[Test]Autopsy test failed.'
|
||||
# me == the sender's email address
|
||||
# family = the list of all recipients' email addresses
|
||||
msg['From'] = 'AutopsyTest'
|
||||
msg['To'] = toval
|
||||
msg.preamble = 'This is a test'
|
||||
container = MIMEText(errorem, 'plain')
|
||||
msg.attach(container)
|
||||
Build_email(msg, attachl)
|
||||
s = smtplib.SMTP(serverval)
|
||||
s.sendmail(msg['From'], msg['To'], msg.as_string())
|
||||
s.quit()
|
||||
|
||||
def Build_email(msg, attachl):
|
||||
for file in attachl:
|
||||
part = MIMEBase('application', "octet-stream")
|
||||
atach = open(file, "rb")
|
||||
attch = atach.read()
|
||||
noml = file.split("\\")
|
||||
nom = noml[len(noml)-1]
|
||||
part.set_payload(attch)
|
||||
Encoders.encode_base64(part)
|
||||
part.add_header('Content-Disposition', 'attachment; filename="' + nom + '"')
|
||||
msg.attach(part)
|
||||
|
||||
# Returns a Windows style path starting with the cwd and
|
||||
# ending with the list of directories given
|
||||
def make_local_path(*dirs):
|
||||
path = wgetcwd()
|
||||
for dir in dirs:
|
||||
path += ("\\" + dir)
|
||||
return path_fix(path)
|
||||
|
||||
# Returns a Windows style path based only off the given directories
|
||||
def make_path(*dirs):
|
||||
path = dirs[0]
|
||||
for dir in dirs[1:]:
|
||||
path += ("\\" + dir)
|
||||
return path_fix(path)
|
||||
|
||||
# Fix a standard os.path by making it Windows format
|
||||
def path_fix(path):
|
||||
return path.replace("/", "\\")
|
||||
|
||||
# Gets the true current working directory instead of Cygwin's
|
||||
def wgetcwd():
|
||||
proc = subprocess.Popen(("cygpath", "-m", os.getcwd()), stdout=subprocess.PIPE)
|
||||
out,err = proc.communicate()
|
||||
tst = out.rstrip()
|
||||
if os.getcwd == tst:
|
||||
return os.getcwd
|
||||
else:
|
||||
proc = subprocess.Popen(("cygpath", "-m", os.getcwd()), stdout=subprocess.PIPE)
|
||||
out,err = proc.communicate()
|
||||
return out.rstrip()
|
186
test/script/srcupdater.py
Normal file
186
test/script/srcupdater.py
Normal file
@ -0,0 +1,186 @@
|
||||
import codecs
|
||||
import datetime
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
import socket
|
||||
import sqlite3
|
||||
import subprocess
|
||||
import sys
|
||||
from sys import platform as _platform
|
||||
import time
|
||||
import traceback
|
||||
import xml
|
||||
from xml.dom.minidom import parse, parseString
|
||||
import Emailer
|
||||
|
||||
def compile(errore, attachli, parsedin):
|
||||
global redo
|
||||
global tryredo
|
||||
global failedbool
|
||||
global errorem
|
||||
errorem = errore
|
||||
global attachl
|
||||
attachl = attachli
|
||||
global passed
|
||||
global parsed
|
||||
parsed = parsedin
|
||||
passed = True
|
||||
tryredo = False
|
||||
redo = True
|
||||
while(redo):
|
||||
passed = True
|
||||
if(passed):
|
||||
gitPull("sleuthkit")
|
||||
if(passed):
|
||||
vsBuild()
|
||||
if(passed):
|
||||
gitPull("autopsy")
|
||||
if(passed):
|
||||
antBuild("datamodel", False)
|
||||
if(passed):
|
||||
antBuild("autopsy", True)
|
||||
if(passed):
|
||||
redo = False
|
||||
else:
|
||||
print("Compile Failed")
|
||||
time.sleep(3600)
|
||||
attachl = []
|
||||
errorem = "The test standard didn't match the gold standard.\n"
|
||||
failedbool = False
|
||||
if(tryredo):
|
||||
errorem += "Rebuilt properly.\n"
|
||||
Emailer.send_email(parsed, errorem, attachl, True)
|
||||
attachl = []
|
||||
errorem = "The test standard didn't match the gold standard.\n"
|
||||
passed = True
|
||||
|
||||
#Pulls from git
|
||||
def gitPull(TskOrAutopsy):
|
||||
global SYS
|
||||
global errorem
|
||||
global attachl
|
||||
ccwd = ""
|
||||
gppth = Emailer.make_local_path("..", "GitPullOutput" + TskOrAutopsy + ".txt")
|
||||
attachl.append(gppth)
|
||||
gpout = open(gppth, 'a')
|
||||
toPull = "http://www.github.com/sleuthkit/" + TskOrAutopsy
|
||||
call = ["git", "pull", toPull]
|
||||
if TskOrAutopsy == "sleuthkit":
|
||||
ccwd = os.path.join("..", "..", "..", "sleuthkit")
|
||||
else:
|
||||
ccwd = os.path.join("..", "..")
|
||||
subprocess.call(call, stdout=gpout, cwd=ccwd)
|
||||
gpout.close()
|
||||
|
||||
|
||||
#Builds TSK as a win32 applicatiion
|
||||
def vsBuild():
|
||||
global redo
|
||||
global tryredo
|
||||
global passed
|
||||
global parsed
|
||||
#Please ensure that the current working directory is $autopsy/testing/script
|
||||
oldpath = os.getcwd()
|
||||
os.chdir(os.path.join("..", "..", "..","sleuthkit", "win32"))
|
||||
vs = []
|
||||
vs.append("/cygdrive/c/windows/microsoft.NET/framework/v4.0.30319/MSBuild.exe")
|
||||
vs.append(os.path.join("Tsk-win.sln"))
|
||||
vs.append("/p:configuration=release")
|
||||
vs.append("/p:platform=win32")
|
||||
vs.append("/t:clean")
|
||||
vs.append("/t:rebuild")
|
||||
print(vs)
|
||||
VSpth = Emailer.make_local_path("..", "VSOutput.txt")
|
||||
VSout = open(VSpth, 'a')
|
||||
subprocess.call(vs, stdout=VSout)
|
||||
VSout.close()
|
||||
os.chdir(oldpath)
|
||||
chk = os.path.join("..", "..", "..","sleuthkit", "win32", "Release", "libtsk_jni.dll")
|
||||
try:
|
||||
open(chk)
|
||||
except IOError as e:
|
||||
global errorem
|
||||
global attachl
|
||||
if(not tryredo):
|
||||
errorem += "LIBTSK C++ failed to build.\n"
|
||||
attachl.append(VSpth)
|
||||
Emailer.send_email(parsed, errorem, attachl, False)
|
||||
tryredo = True
|
||||
passed = False
|
||||
redo = True
|
||||
|
||||
|
||||
|
||||
#Builds Autopsy or the Datamodel
|
||||
def antBuild(which, Build):
|
||||
global redo
|
||||
global passed
|
||||
global tryredo
|
||||
global parsed
|
||||
directory = os.path.join("..", "..")
|
||||
ant = []
|
||||
if which == "datamodel":
|
||||
directory = os.path.join("..", "..", "..", "sleuthkit", "bindings", "java")
|
||||
ant.append("ant")
|
||||
ant.append("-f")
|
||||
ant.append(directory)
|
||||
ant.append("clean")
|
||||
if(Build):
|
||||
ant.append("build")
|
||||
else:
|
||||
ant.append("dist")
|
||||
antpth = Emailer.make_local_path("..", "ant" + which + "Output.txt")
|
||||
antout = open(antpth, 'a')
|
||||
succd = subprocess.call(ant, stdout=antout)
|
||||
antout.close()
|
||||
global errorem
|
||||
global attachl
|
||||
if which == "datamodel":
|
||||
chk = os.path.join("..", "..", "..","sleuthkit", "bindings", "java", "dist", "TSK_DataModel.jar")
|
||||
try:
|
||||
open(chk)
|
||||
except IOError as e:
|
||||
if(not tryredo):
|
||||
errorem += "DataModel Java build failed.\n"
|
||||
attachl.append(antpth)
|
||||
Emailer.send_email(parsed, errorem, attachl, False)
|
||||
passed = False
|
||||
tryredo = True
|
||||
elif (succd != 0 and (not tryredo)):
|
||||
errorem += "Autopsy build failed.\n"
|
||||
attachl.append(antpth)
|
||||
Emailer.send_email(parsed, errorem, attachl, False)
|
||||
tryredo = True
|
||||
elif (succd != 0):
|
||||
passed = False
|
||||
|
||||
|
||||
def main():
|
||||
errore = ""
|
||||
attachli = []
|
||||
config_file = ""
|
||||
arg = sys.argv.pop(0)
|
||||
arg = sys.argv.pop(0)
|
||||
config_file = arg
|
||||
parsedin = parse(config_file)
|
||||
compile(errore, attachli, parsedin)
|
||||
|
||||
class OS:
|
||||
LINUX, MAC, WIN, CYGWIN = range(4)
|
||||
if __name__ == "__main__":
|
||||
global SYS
|
||||
if _platform == "linux" or _platform == "linux2":
|
||||
SYS = OS.LINUX
|
||||
elif _platform == "darwin":
|
||||
SYS = OS.MAC
|
||||
elif _platform == "win32":
|
||||
SYS = OS.WIN
|
||||
elif _platform == "cygwin":
|
||||
SYS = OS.CYGWIN
|
||||
|
||||
if SYS is OS.WIN or SYS is OS.CYGWIN:
|
||||
main()
|
||||
else:
|
||||
print("We only support Windows and Cygwin at this time.")
|
BIN
thirdparty/jfxrt/1.7.21/jfxrt.jar
vendored
Normal file
BIN
thirdparty/jfxrt/1.7.21/jfxrt.jar
vendored
Normal file
Binary file not shown.
Loading…
x
Reference in New Issue
Block a user