Merge pull request #331 from jawallace/testing

Testing
This commit is contained in:
Richard Cordovano 2013-12-19 09:01:07 -08:00
commit 261fb48f27
5 changed files with 231 additions and 194 deletions

View File

@ -48,7 +48,9 @@ import org.netbeans.jellytools.MainWindowOperator;
import org.netbeans.jellytools.NbDialogOperator; import org.netbeans.jellytools.NbDialogOperator;
import org.netbeans.jellytools.WizardOperator; import org.netbeans.jellytools.WizardOperator;
import org.netbeans.jemmy.Timeout; import org.netbeans.jemmy.Timeout;
import org.netbeans.jemmy.Timeouts;
import org.netbeans.jemmy.operators.JButtonOperator; import org.netbeans.jemmy.operators.JButtonOperator;
import org.netbeans.jemmy.operators.JListOperator;
import org.netbeans.jemmy.operators.JCheckBoxOperator; import org.netbeans.jemmy.operators.JCheckBoxOperator;
import org.netbeans.jemmy.operators.JDialogOperator; import org.netbeans.jemmy.operators.JDialogOperator;
import org.netbeans.jemmy.operators.JFileChooserOperator; import org.netbeans.jemmy.operators.JFileChooserOperator;
@ -116,6 +118,7 @@ public class RegressionTest extends TestCase {
public void setUp() { public void setUp() {
logger.info("######## " + System.getProperty("img_path") + " #######"); logger.info("######## " + System.getProperty("img_path") + " #######");
Timeouts.setDefault("ComponentOperator.WaitComponentTimeout", 1000000);
} }
/** /**
@ -232,7 +235,7 @@ public class RegressionTest extends TestCase {
jfco0.chooseFile(words); jfco0.chooseFile(words);
JTableOperator jto = new JTableOperator(jdo, 0); JTableOperator jto = new JTableOperator(jdo, 0);
jto.clickOnCell(0, 0); jto.clickOnCell(0, 0);
JCheckBoxOperator jcbo = new JCheckBoxOperator(jdo, "Enable for ingest", 0); JCheckBoxOperator jcbo = new JCheckBoxOperator(jdo, "Use during ingest", 0);
if (!jcbo.isSelected()) { if (!jcbo.isSelected()) {
jcbo.doClick(); jcbo.doClick();
} }
@ -299,10 +302,13 @@ public class RegressionTest extends TestCase {
logger.info("Generate Report Button"); logger.info("Generate Report Button");
JDialog reportDialog = JDialogOperator.waitJDialog("Generate Report", false, false); JDialog reportDialog = JDialogOperator.waitJDialog("Generate Report", false, false);
JDialogOperator reportDialogOperator = new JDialogOperator(reportDialog); JDialogOperator reportDialogOperator = new JDialogOperator(reportDialog);
JListOperator listOperator = new JListOperator(reportDialogOperator);
JButtonOperator jbo0 = new JButtonOperator(reportDialogOperator, "Next"); JButtonOperator jbo0 = new JButtonOperator(reportDialogOperator, "Next");
DateFormat dateFormat = new SimpleDateFormat("MM-dd-yyyy-HH-mm-ss"); DateFormat dateFormat = new SimpleDateFormat("MM-dd-yyyy-HH-mm-ss");
Date date = new Date(); Date date = new Date();
String datenotime = dateFormat.format(date); String datenotime = dateFormat.format(date);
listOperator.clickOnItem(2, 1);
new Timeout("pausing", 1000).sleep();
jbo0.pushNoBlock(); jbo0.pushNoBlock();
new Timeout("pausing", 1000).sleep(); new Timeout("pausing", 1000).sleep();
JButtonOperator jbo1 = new JButtonOperator(reportDialogOperator, "Finish"); JButtonOperator jbo1 = new JButtonOperator(reportDialogOperator, "Finish");

View File

@ -221,6 +221,9 @@ class TestRunner(object):
logres = Logs.search_common_log("TskCoreException", test_data) logres = Logs.search_common_log("TskCoreException", test_data)
TestResultsDiffer.run_diff(test_data) TestResultsDiffer.run_diff(test_data)
print("Html report passed: ", test_data.html_report_passed)
print("Errors diff passed: ", test_data.errors_diff_passed)
print("DB diff passed: ", test_data.db_diff_passed)
test_data.overall_passed = (test_data.html_report_passed and test_data.overall_passed = (test_data.html_report_passed and
test_data.errors_diff_passed and test_data.db_diff_passed) test_data.errors_diff_passed and test_data.db_diff_passed)
@ -736,7 +739,7 @@ class TestResultsDiffer(object):
output_dir = test_data.output_path output_dir = test_data.output_path
gold_bb_dump = test_data.get_sorted_data_path(DBType.GOLD) gold_bb_dump = test_data.get_sorted_data_path(DBType.GOLD)
gold_dump = test_data.get_db_dump_path(DBType.GOLD) gold_dump = test_data.get_db_dump_path(DBType.GOLD)
test_data.db_diff_pass = all(TskDbDiff(output_db, gold_db, output_dir=output_dir, gold_bb_dump=gold_bb_dump, test_data.db_diff_passed = all(TskDbDiff(output_db, gold_db, output_dir=output_dir, gold_bb_dump=gold_bb_dump,
gold_dump=gold_dump).run_diff()) gold_dump=gold_dump).run_diff())
# Compare Exceptions # Compare Exceptions
@ -1656,7 +1659,7 @@ class Args(object):
elif arg == "-fr" or arg == "--forcerun": elif arg == "-fr" or arg == "--forcerun":
print("Not downloading new images") print("Not downloading new images")
self.fr = True self.fr = True
elif arg == "-e" or arg == "-email": elif arg == "--email":
self.email_enabled = True self.email_enabled = True
else: else:
print(usage()) print(usage())

View File

@ -21,7 +21,7 @@ def make_path(*dirs):
# Fix a standard os.path by making it Windows format # Fix a standard os.path by making it Windows format
def path_fix(path): def path_fix(path):
return path.replace("/", "\\") return os.path.normcase(os.path.normpath(path))
# Gets the true current working directory instead of Cygwin's # Gets the true current working directory instead of Cygwin's
def wgetcwd(): def wgetcwd():

View File

@ -17,6 +17,10 @@ import Emailer
from regression_utils import * from regression_utils import *
def compile(errore, attachli, parsedin): def compile(errore, attachli, parsedin):
global to
global server
global subj
global email_enabled
global redo global redo
global tryredo global tryredo
global failedbool global failedbool
@ -36,12 +40,15 @@ def compile(errore, attachli, parsedin):
gitPull("sleuthkit") gitPull("sleuthkit")
if(passed): if(passed):
vsBuild() vsBuild()
print("TSK")
if(passed): if(passed):
gitPull("autopsy") gitPull("autopsy")
if(passed): if(passed):
antBuild("datamodel", False) antBuild("datamodel", False)
print("DataModel")
if(passed): if(passed):
antBuild("autopsy", True) antBuild("autopsy", True)
print("Aut")
if(passed): if(passed):
redo = False redo = False
else: else:
@ -53,7 +60,8 @@ def compile(errore, attachli, parsedin):
if(tryredo): if(tryredo):
errorem = "" errorem = ""
errorem += "Rebuilt properly.\n" errorem += "Rebuilt properly.\n"
Emailer.send_email(parsed, errorem, attachl, True) if email_enabled:
Emailer.send_email(to, server, subj, errorem, attachl)
attachl = [] attachl = []
passed = True passed = True
@ -75,7 +83,6 @@ def gitPull(TskOrAutopsy):
subprocess.call(call, stdout=sys.stdout, cwd=ccwd) subprocess.call(call, stdout=sys.stdout, cwd=ccwd)
gpout.close() gpout.close()
#Builds TSK as a win32 applicatiion #Builds TSK as a win32 applicatiion
def vsBuild(): def vsBuild():
global redo global redo
@ -89,7 +96,7 @@ def vsBuild():
vs.append("/cygdrive/c/windows/microsoft.NET/framework/v4.0.30319/MSBuild.exe") vs.append("/cygdrive/c/windows/microsoft.NET/framework/v4.0.30319/MSBuild.exe")
vs.append(os.path.join("Tsk-win.sln")) vs.append(os.path.join("Tsk-win.sln"))
vs.append("/p:configuration=release") vs.append("/p:configuration=release")
vs.append("/p:platform=win32") vs.append("/p:platform=x64")
vs.append("/t:clean") vs.append("/t:clean")
vs.append("/t:rebuild") vs.append("/t:rebuild")
print(vs) print(vs)
@ -98,24 +105,24 @@ def vsBuild():
subprocess.call(vs, stdout=VSout) subprocess.call(vs, stdout=VSout)
VSout.close() VSout.close()
os.chdir(oldpath) os.chdir(oldpath)
chk = os.path.join("..", "..", "..","sleuthkit", "win32", "Release", "libtsk_jni.dll") chk = os.path.join("..", "..", "..","sleuthkit", "win32", "x64", "Release", "libtsk_jni.dll")
try: if not os.path.exists(chk):
open(chk) print("path doesn't exist")
except IOError as e:
global errorem global errorem
global attachl global attachl
global email_enabled
if(not tryredo): if(not tryredo):
errorem += "LIBTSK C++ failed to build.\n" errorem += "LIBTSK C++ failed to build.\n"
attachl.append(VSpth) attachl.append(VSpth)
send_email(parsed, errorem, attachl, False) if email_enabled:
Emailer.send_email(parsed, errorem, attachl, False)
tryredo = True tryredo = True
passed = False passed = False
redo = True redo = True
#Builds Autopsy or the Datamodel #Builds Autopsy or the Datamodel
def antBuild(which, Build): def antBuild(which, Build):
print("building: ", which)
global redo global redo
global passed global passed
global tryredo global tryredo
@ -138,6 +145,10 @@ def antBuild(which, Build):
antout.close() antout.close()
global errorem global errorem
global attachl global attachl
global email_enabled
global to
global subj
global server
if which == "datamodel": if which == "datamodel":
chk = os.path.join("..", "..", "..","sleuthkit", "bindings", "java", "dist", "TSK_DataModel.jar") chk = os.path.join("..", "..", "..","sleuthkit", "bindings", "java", "dist", "TSK_DataModel.jar")
try: try:
@ -146,19 +157,24 @@ def antBuild(which, Build):
if(not tryredo): if(not tryredo):
errorem += "DataModel Java build failed.\n" errorem += "DataModel Java build failed.\n"
attachl.append(antpth) attachl.append(antpth)
Emailer.send_email(parsed, errorem, attachl, False) if email_enabled:
Emailer.send_email(to, server, subj, errorem, attachl)
passed = False passed = False
tryredo = True tryredo = True
elif (succd != 0 and (not tryredo)): elif (succd != 0 and (not tryredo)):
errorem += "Autopsy build failed.\n" errorem += "Autopsy build failed.\n"
attachl.append(antpth) attachl.append(antpth)
Emailer.send_email(parsed, errorem, attachl, False) Emailer.send_email(to, server, subj, errorem, attachl)
tryredo = True tryredo = True
elif (succd != 0): elif (succd != 0):
passed = False passed = False
def main(): def main():
global email_enabled
global to
global server
global subj
errore = "" errore = ""
attachli = [] attachli = []
config_file = "" config_file = ""
@ -166,6 +182,14 @@ def main():
arg = sys.argv.pop(0) arg = sys.argv.pop(0)
config_file = arg config_file = arg
parsedin = parse(config_file) parsedin = parse(config_file)
try:
to = parsedin.getElementsByTagName("email")[0].getAttribute("value").encode().decode("utf_8")
server = parsedin.getElementsByTagName("mail_server")[0].getAttribute("value").encode().decode("utf_8")
subj = parsedin.getElementsByTagName("subject")[0].getAttribute("value").encode().decode("utf_8")
except Exception:
email_enabled = False
# email_enabled = (to is not None) and (server is not None) and (subj is not None)
email_enabled = False
compile(errore, attachli, parsedin) compile(errore, attachli, parsedin)
class OS: class OS:
@ -185,3 +209,4 @@ if __name__ == "__main__":
main() main()
else: else:
print("We only support Windows and Cygwin at this time.") print("We only support Windows and Cygwin at this time.")

View File

@ -136,37 +136,40 @@ class TskDbDiff(object):
""" """
unsorted_dump = TskDbDiff._get_tmp_file("dump_data", ".txt") unsorted_dump = TskDbDiff._get_tmp_file("dump_data", ".txt")
conn = sqlite3.connect(db_file) conn = sqlite3.connect(db_file)
autopsy_cur2 = conn.cursor() conn.text_factory = lambda x: x.decode("utf-8", "ignore")
conn.row_factory = sqlite3.Row
artifact_cursor = conn.cursor()
# Get the list of all artifacts # Get the list of all artifacts
# @@@ Could add a SORT by parent_path in here since that is how we are going to later sort it. # @@@ Could add a SORT by parent_path in here since that is how we are going to later sort it.
autopsy_cur2.execute("SELECT tsk_files.parent_path, tsk_files.name, blackboard_artifact_types.display_name, blackboard_artifacts.artifact_id FROM blackboard_artifact_types INNER JOIN blackboard_artifacts ON blackboard_artifact_types.artifact_type_id = blackboard_artifacts.artifact_type_id INNER JOIN tsk_files ON tsk_files.obj_id = blackboard_artifacts.obj_id") artifact_cursor.execute("SELECT tsk_files.parent_path, tsk_files.name, blackboard_artifact_types.display_name, blackboard_artifacts.artifact_id FROM blackboard_artifact_types INNER JOIN blackboard_artifacts ON blackboard_artifact_types.artifact_type_id = blackboard_artifacts.artifact_type_id INNER JOIN tsk_files ON tsk_files.obj_id = blackboard_artifacts.obj_id")
database_log = codecs.open(unsorted_dump, "wb", "utf_8") database_log = codecs.open(unsorted_dump, "wb", "utf_8")
rw = autopsy_cur2.fetchone() row = artifact_cursor.fetchone()
appnd = False appnd = False
counter = 0 counter = 0
artifact_count = 0 artifact_count = 0
artifact_fail = 0 artifact_fail = 0
# Cycle through artifacts # Cycle through artifacts
try: try:
while (rw != None): while (row != None):
# File Name and artifact type # File Name and artifact type
if(rw[0] != None): if(row["parent_path"] != None):
database_log.write(rw[0] + rw[1] + ' <artifact type="' + rw[2] + '" > ') database_log.write(row["parent_path"] + row["name"] + ' <artifact type="' + row["display_name"] + '" > ')
else: else:
database_log.write(rw[1] + ' <artifact type="' + rw[2] + '" > ') database_log.write(row["name"] + ' <artifact type="' + row["display_name"] + '" > ')
# Get attributes for this artifact # Get attributes for this artifact
autopsy_cur1 = conn.cursor() attribute_cursor = conn.cursor()
looptry = True looptry = True
artifact_count += 1 artifact_count += 1
try: try:
key = "" art_id = ""
key = str(rw[3]) art_id = str(row["artifact_id"])
key = key, attribute_cursor.execute("SELECT blackboard_attributes.source, blackboard_attribute_types.display_name, blackboard_attributes.value_type, blackboard_attributes.value_text, blackboard_attributes.value_int32, blackboard_attributes.value_int64, blackboard_attributes.value_double FROM blackboard_attributes INNER JOIN blackboard_attribute_types ON blackboard_attributes.attribute_type_id = blackboard_attribute_types.attribute_type_id WHERE artifact_id =? ORDER BY blackboard_attributes.source, blackboard_attribute_types.display_name, blackboard_attributes.value_type, blackboard_attributes.value_text, blackboard_attributes.value_int32, blackboard_attributes.value_int64, blackboard_attributes.value_double", [art_id])
autopsy_cur1.execute("SELECT blackboard_attributes.source, blackboard_attribute_types.display_name, blackboard_attributes.value_type, blackboard_attributes.value_text, blackboard_attributes.value_int32, blackboard_attributes.value_int64, blackboard_attributes.value_double FROM blackboard_attributes INNER JOIN blackboard_attribute_types ON blackboard_attributes.attribute_type_id = blackboard_attribute_types.attribute_type_id WHERE artifact_id =? ORDER BY blackboard_attributes.source, blackboard_attribute_types.display_name, blackboard_attributes.value_type, blackboard_attributes.value_text, blackboard_attributes.value_int32, blackboard_attributes.value_int64, blackboard_attributes.value_double", key) attributes = attribute_cursor.fetchall()
attributes = autopsy_cur1.fetchall()
except sqlite3.Error as e: except sqlite3.Error as e:
msg ="Attributes in artifact id (in output DB)# " + str(rw[3]) + " encountered an error: " + str(e) +" .\n" msg = "Attributes in artifact id (in output DB)# " + str(row["artifact_id"]) + " encountered an error: " + str(e) +" .\n"
print("Attributes in artifact id (in output DB)# ", str(row["artifact_id"]), " encountered an error: ", str(e))
print()
looptry = False looptry = False
artifact_fail += 1 artifact_fail += 1
database_log.write('Error Extracting Attributes') database_log.write('Error Extracting Attributes')
@ -177,33 +180,29 @@ class TskDbDiff(object):
if(looptry == True): if(looptry == True):
src = attributes[0][0] src = attributes[0][0]
for attr in attributes: for attr in attributes:
val = 3 + attr[2] attr_value_index = 3 + attr["value_type"]
numvals = 0 numvals = 0
for x in range(3, 6): for x in range(3, 6):
if(attr[x] != None): if(attr[x] != None):
numvals += 1 numvals += 1
if(numvals > 1): if(numvals > 1):
msg = "There were too many values for attribute type: " + attr[1] + " for artifact with id #" + str(rw[3]) + ".\n" msg = "There were too many values for attribute type: " + attr["display_name"] + " for artifact with id #" + str(row["artifact_id"]) + ".\n"
if(not attr[0] == src): if(not attr["source"] == src):
msg ="There were inconsistent sources for artifact with id #" + str(rw[3]) + ".\n" msg = "There were inconsistent sources for artifact with id #" + str(row["artifact_id"]) + ".\n"
try: try:
database_log.write('<attribute source="' + attr[0] + '" type="' + attr[1] + '" value="') attr_value_as_string = str(attr[attr_value_index])
inpval = attr[val] #if((type(attr_value_as_string) != 'unicode') or (type(attr_value_as_string) != 'str')):
if((type(inpval) != 'unicode') or (type(inpval) != 'str')): # attr_value_as_string = str(attr_value_as_string)
inpval = str(inpval) patrn = re.compile("[\n\0\a\b\r\f]")
patrn = re.compile("[\n\0\a\b\r\f\e]") attr_value_as_string = re.sub(patrn, ' ', attr_value_as_string)
inpval = re.sub(patrn, ' ', inpval) database_log.write('<attribute source="' + attr["source"] + '" type="' + attr["display_name"] + '" value="' + attr_value_as_string + '" />')
database_log.write(inpval)
except IOError as e: except IOError as e:
print("IO error")
raise TskDbDiffException("Unexpected IO error while writing to database log." + str(e)) raise TskDbDiffException("Unexpected IO error while writing to database log." + str(e))
database_log.write('" />')
database_log.write(' <artifact/>\n') database_log.write(' <artifact/>\n')
rw = autopsy_cur2.fetchone() row = artifact_cursor.fetchone()
# Now sort the file
srtcmdlst = ["sort", unsorted_dump, "-o", bb_dump_file]
subprocess.call(srtcmdlst)
print(artifact_fail) print(artifact_fail)
if(artifact_fail > 0): if(artifact_fail > 0):
msg ="There were " + str(artifact_count) + " artifacts and " + str(artifact_fail) + " threw an exception while loading.\n" msg ="There were " + str(artifact_count) + " artifacts and " + str(artifact_fail) + " threw an exception while loading.\n"
@ -212,6 +211,10 @@ class TskDbDiff(object):
finally: finally:
database_log.close() database_log.close()
# Now sort the file
srtcmdlst = ["sort", unsorted_dump, "-o", bb_dump_file]
subprocess.call(srtcmdlst)
def _dump_output_db_nonbb(db_file, dump_file): def _dump_output_db_nonbb(db_file, dump_file):
"""Dumps a database to a text file. """Dumps a database to a text file.
@ -224,7 +227,7 @@ class TskDbDiff(object):
backup_db_file = TskDbDiff._get_tmp_file("tsk_backup_db", ".db") backup_db_file = TskDbDiff._get_tmp_file("tsk_backup_db", ".db")
shutil.copy(db_file, backup_db_file) shutil.copy(db_file, backup_db_file)
conn = sqlite3.connect(backup_db_file) conn = sqlite3.connect(backup_db_file)
conn.text_factory = lambda x: x.decode("utf-8", "ignore")
# Delete the blackboard tables # Delete the blackboard tables
conn.execute("DROP TABLE blackboard_artifacts") conn.execute("DROP TABLE blackboard_artifacts")
conn.execute("DROP TABLE blackboard_attributes") conn.execute("DROP TABLE blackboard_attributes")
@ -266,14 +269,14 @@ def main():
print("usage: tskdbdiff [OUPUT DB PATH] [GOLD DB PATH]") print("usage: tskdbdiff [OUPUT DB PATH] [GOLD DB PATH]")
sys.exit() sys.exit()
db_diff = TskDbDiff(output_db, gold_db) db_diff = TskDbDiff(output_db, gold_db, output_dir=".")
dump_passed, bb_dump_passed = db_diff.run_diff() dump_passed, bb_dump_passed = db_diff.run_diff()
if dump_passed and bb_dump_passed: if dump_passed and bb_dump_passed:
print("Database comparison passed.") print("Database comparison passed.")
elif not dump_passed: if not dump_passed:
print("Non blackboard database comparison failed.") print("Non blackboard database comparison failed.")
elif not bb_dump_passed: if not bb_dump_passed:
print("Blackboard database comparison failed.") print("Blackboard database comparison failed.")
return 0 return 0