Merge pull request #331 from jawallace/testing

Testing
This commit is contained in:
Richard Cordovano 2013-12-19 09:01:07 -08:00
commit 261fb48f27
5 changed files with 231 additions and 194 deletions

View File

@ -48,7 +48,9 @@ import org.netbeans.jellytools.MainWindowOperator;
import org.netbeans.jellytools.NbDialogOperator; import org.netbeans.jellytools.NbDialogOperator;
import org.netbeans.jellytools.WizardOperator; import org.netbeans.jellytools.WizardOperator;
import org.netbeans.jemmy.Timeout; import org.netbeans.jemmy.Timeout;
import org.netbeans.jemmy.Timeouts;
import org.netbeans.jemmy.operators.JButtonOperator; import org.netbeans.jemmy.operators.JButtonOperator;
import org.netbeans.jemmy.operators.JListOperator;
import org.netbeans.jemmy.operators.JCheckBoxOperator; import org.netbeans.jemmy.operators.JCheckBoxOperator;
import org.netbeans.jemmy.operators.JDialogOperator; import org.netbeans.jemmy.operators.JDialogOperator;
import org.netbeans.jemmy.operators.JFileChooserOperator; import org.netbeans.jemmy.operators.JFileChooserOperator;
@ -116,6 +118,7 @@ public class RegressionTest extends TestCase {
public void setUp() { public void setUp() {
logger.info("######## " + System.getProperty("img_path") + " #######"); logger.info("######## " + System.getProperty("img_path") + " #######");
Timeouts.setDefault("ComponentOperator.WaitComponentTimeout", 1000000);
} }
/** /**
@ -232,7 +235,7 @@ public class RegressionTest extends TestCase {
jfco0.chooseFile(words); jfco0.chooseFile(words);
JTableOperator jto = new JTableOperator(jdo, 0); JTableOperator jto = new JTableOperator(jdo, 0);
jto.clickOnCell(0, 0); jto.clickOnCell(0, 0);
JCheckBoxOperator jcbo = new JCheckBoxOperator(jdo, "Enable for ingest", 0); JCheckBoxOperator jcbo = new JCheckBoxOperator(jdo, "Use during ingest", 0);
if (!jcbo.isSelected()) { if (!jcbo.isSelected()) {
jcbo.doClick(); jcbo.doClick();
} }
@ -299,10 +302,13 @@ public class RegressionTest extends TestCase {
logger.info("Generate Report Button"); logger.info("Generate Report Button");
JDialog reportDialog = JDialogOperator.waitJDialog("Generate Report", false, false); JDialog reportDialog = JDialogOperator.waitJDialog("Generate Report", false, false);
JDialogOperator reportDialogOperator = new JDialogOperator(reportDialog); JDialogOperator reportDialogOperator = new JDialogOperator(reportDialog);
JListOperator listOperator = new JListOperator(reportDialogOperator);
JButtonOperator jbo0 = new JButtonOperator(reportDialogOperator, "Next"); JButtonOperator jbo0 = new JButtonOperator(reportDialogOperator, "Next");
DateFormat dateFormat = new SimpleDateFormat("MM-dd-yyyy-HH-mm-ss"); DateFormat dateFormat = new SimpleDateFormat("MM-dd-yyyy-HH-mm-ss");
Date date = new Date(); Date date = new Date();
String datenotime = dateFormat.format(date); String datenotime = dateFormat.format(date);
listOperator.clickOnItem(2, 1);
new Timeout("pausing", 1000).sleep();
jbo0.pushNoBlock(); jbo0.pushNoBlock();
new Timeout("pausing", 1000).sleep(); new Timeout("pausing", 1000).sleep();
JButtonOperator jbo1 = new JButtonOperator(reportDialogOperator, "Finish"); JButtonOperator jbo1 = new JButtonOperator(reportDialogOperator, "Finish");

View File

@ -165,9 +165,9 @@ class TestRunner(object):
Errors.add_email_attachment(html.name) Errors.add_email_attachment(html.name)
html.close() html.close()
if test_config.email_enabled: if test_config.email_enabled:
Emailer.send_email(test_config.mail_to, test_config.mail_server, Emailer.send_email(test_config.mail_to, test_config.mail_server,
test_config.mail_subject, Errors.email_body, Errors.email_attachs) test_config.mail_subject, Errors.email_body, Errors.email_attachs)
def _run_autopsy_ingest(test_data): def _run_autopsy_ingest(test_data):
"""Run Autopsy ingest for the image in the given TestData. """Run Autopsy ingest for the image in the given TestData.
@ -221,6 +221,9 @@ class TestRunner(object):
logres = Logs.search_common_log("TskCoreException", test_data) logres = Logs.search_common_log("TskCoreException", test_data)
TestResultsDiffer.run_diff(test_data) TestResultsDiffer.run_diff(test_data)
print("Html report passed: ", test_data.html_report_passed)
print("Errors diff passed: ", test_data.errors_diff_passed)
print("DB diff passed: ", test_data.db_diff_passed)
test_data.overall_passed = (test_data.html_report_passed and test_data.overall_passed = (test_data.html_report_passed and
test_data.errors_diff_passed and test_data.db_diff_passed) test_data.errors_diff_passed and test_data.db_diff_passed)
@ -736,7 +739,7 @@ class TestResultsDiffer(object):
output_dir = test_data.output_path output_dir = test_data.output_path
gold_bb_dump = test_data.get_sorted_data_path(DBType.GOLD) gold_bb_dump = test_data.get_sorted_data_path(DBType.GOLD)
gold_dump = test_data.get_db_dump_path(DBType.GOLD) gold_dump = test_data.get_db_dump_path(DBType.GOLD)
test_data.db_diff_pass = all(TskDbDiff(output_db, gold_db, output_dir=output_dir, gold_bb_dump=gold_bb_dump, test_data.db_diff_passed = all(TskDbDiff(output_db, gold_db, output_dir=output_dir, gold_bb_dump=gold_bb_dump,
gold_dump=gold_dump).run_diff()) gold_dump=gold_dump).run_diff())
# Compare Exceptions # Compare Exceptions
@ -1656,7 +1659,7 @@ class Args(object):
elif arg == "-fr" or arg == "--forcerun": elif arg == "-fr" or arg == "--forcerun":
print("Not downloading new images") print("Not downloading new images")
self.fr = True self.fr = True
elif arg == "-e" or arg == "-email": elif arg == "--email":
self.email_enabled = True self.email_enabled = True
else: else:
print(usage()) print(usage())

View File

@ -21,7 +21,7 @@ def make_path(*dirs):
# Fix a standard os.path by making it Windows format # Fix a standard os.path by making it Windows format
def path_fix(path): def path_fix(path):
return path.replace("/", "\\") return os.path.normcase(os.path.normpath(path))
# Gets the true current working directory instead of Cygwin's # Gets the true current working directory instead of Cygwin's
def wgetcwd(): def wgetcwd():

View File

@ -17,171 +17,196 @@ import Emailer
from regression_utils import * from regression_utils import *
def compile(errore, attachli, parsedin): def compile(errore, attachli, parsedin):
global redo global to
global tryredo global server
global failedbool global subj
global errorem global email_enabled
errorem = errore global redo
global attachl global tryredo
attachl = attachli global failedbool
global passed global errorem
global parsed errorem = errore
parsed = parsedin global attachl
passed = True attachl = attachli
tryredo = False global passed
redo = True global parsed
while(redo): parsed = parsedin
passed = True passed = True
if(passed): tryredo = False
gitPull("sleuthkit") redo = True
if(passed): while(redo):
vsBuild() passed = True
if(passed): if(passed):
gitPull("autopsy") gitPull("sleuthkit")
if(passed): if(passed):
antBuild("datamodel", False) vsBuild()
if(passed): print("TSK")
antBuild("autopsy", True) if(passed):
if(passed): gitPull("autopsy")
redo = False if(passed):
else: antBuild("datamodel", False)
print("Compile Failed") print("DataModel")
time.sleep(3600) if(passed):
attachl = [] antBuild("autopsy", True)
errorem = "The test standard didn't match the gold standard.\n" print("Aut")
failedbool = False if(passed):
if(tryredo): redo = False
errorem = "" else:
errorem += "Rebuilt properly.\n" print("Compile Failed")
Emailer.send_email(parsed, errorem, attachl, True) time.sleep(3600)
attachl = [] attachl = []
passed = True errorem = "The test standard didn't match the gold standard.\n"
failedbool = False
if(tryredo):
errorem = ""
errorem += "Rebuilt properly.\n"
if email_enabled:
Emailer.send_email(to, server, subj, errorem, attachl)
attachl = []
passed = True
#Pulls from git #Pulls from git
def gitPull(TskOrAutopsy): def gitPull(TskOrAutopsy):
global SYS global SYS
global errorem global errorem
global attachl global attachl
ccwd = "" ccwd = ""
gppth = make_local_path("..", "GitPullOutput" + TskOrAutopsy + ".txt") gppth = make_local_path("..", "GitPullOutput" + TskOrAutopsy + ".txt")
attachl.append(gppth) attachl.append(gppth)
gpout = open(gppth, 'a') gpout = open(gppth, 'a')
toPull = "https://www.github.com/sleuthkit/" + TskOrAutopsy toPull = "https://www.github.com/sleuthkit/" + TskOrAutopsy
call = ["git", "pull", toPull] call = ["git", "pull", toPull]
if TskOrAutopsy == "sleuthkit": if TskOrAutopsy == "sleuthkit":
ccwd = os.path.join("..", "..", "..", "sleuthkit") ccwd = os.path.join("..", "..", "..", "sleuthkit")
else: else:
ccwd = os.path.join("..", "..") ccwd = os.path.join("..", "..")
subprocess.call(call, stdout=sys.stdout, cwd=ccwd) subprocess.call(call, stdout=sys.stdout, cwd=ccwd)
gpout.close() gpout.close()
#Builds TSK as a win32 applicatiion #Builds TSK as a win32 applicatiion
def vsBuild(): def vsBuild():
global redo global redo
global tryredo global tryredo
global passed global passed
global parsed global parsed
#Please ensure that the current working directory is $autopsy/testing/script #Please ensure that the current working directory is $autopsy/testing/script
oldpath = os.getcwd() oldpath = os.getcwd()
os.chdir(os.path.join("..", "..", "..","sleuthkit", "win32")) os.chdir(os.path.join("..", "..", "..","sleuthkit", "win32"))
vs = [] vs = []
vs.append("/cygdrive/c/windows/microsoft.NET/framework/v4.0.30319/MSBuild.exe") vs.append("/cygdrive/c/windows/microsoft.NET/framework/v4.0.30319/MSBuild.exe")
vs.append(os.path.join("Tsk-win.sln")) vs.append(os.path.join("Tsk-win.sln"))
vs.append("/p:configuration=release") vs.append("/p:configuration=release")
vs.append("/p:platform=win32") vs.append("/p:platform=x64")
vs.append("/t:clean") vs.append("/t:clean")
vs.append("/t:rebuild") vs.append("/t:rebuild")
print(vs) print(vs)
VSpth = make_local_path("..", "VSOutput.txt") VSpth = make_local_path("..", "VSOutput.txt")
VSout = open(VSpth, 'a') VSout = open(VSpth, 'a')
subprocess.call(vs, stdout=VSout) subprocess.call(vs, stdout=VSout)
VSout.close() VSout.close()
os.chdir(oldpath) os.chdir(oldpath)
chk = os.path.join("..", "..", "..","sleuthkit", "win32", "Release", "libtsk_jni.dll") chk = os.path.join("..", "..", "..","sleuthkit", "win32", "x64", "Release", "libtsk_jni.dll")
try: if not os.path.exists(chk):
open(chk) print("path doesn't exist")
except IOError as e: global errorem
global errorem global attachl
global attachl global email_enabled
if(not tryredo): if(not tryredo):
errorem += "LIBTSK C++ failed to build.\n" errorem += "LIBTSK C++ failed to build.\n"
attachl.append(VSpth) attachl.append(VSpth)
send_email(parsed, errorem, attachl, False) if email_enabled:
tryredo = True Emailer.send_email(parsed, errorem, attachl, False)
passed = False tryredo = True
redo = True passed = False
redo = True
#Builds Autopsy or the Datamodel #Builds Autopsy or the Datamodel
def antBuild(which, Build): def antBuild(which, Build):
global redo print("building: ", which)
global passed global redo
global tryredo global passed
global parsed global tryredo
directory = os.path.join("..", "..") global parsed
ant = [] directory = os.path.join("..", "..")
if which == "datamodel": ant = []
directory = os.path.join("..", "..", "..", "sleuthkit", "bindings", "java") if which == "datamodel":
ant.append("ant") directory = os.path.join("..", "..", "..", "sleuthkit", "bindings", "java")
ant.append("-f") ant.append("ant")
ant.append(directory) ant.append("-f")
ant.append("clean") ant.append(directory)
if(Build): ant.append("clean")
ant.append("build") if(Build):
else: ant.append("build")
ant.append("dist") else:
antpth = make_local_path("..", "ant" + which + "Output.txt") ant.append("dist")
antout = open(antpth, 'a') antpth = make_local_path("..", "ant" + which + "Output.txt")
succd = subprocess.call(ant, stdout=antout) antout = open(antpth, 'a')
antout.close() succd = subprocess.call(ant, stdout=antout)
global errorem antout.close()
global attachl global errorem
if which == "datamodel": global attachl
chk = os.path.join("..", "..", "..","sleuthkit", "bindings", "java", "dist", "TSK_DataModel.jar") global email_enabled
try: global to
open(chk) global subj
except IOError as e: global server
if(not tryredo): if which == "datamodel":
errorem += "DataModel Java build failed.\n" chk = os.path.join("..", "..", "..","sleuthkit", "bindings", "java", "dist", "TSK_DataModel.jar")
attachl.append(antpth) try:
Emailer.send_email(parsed, errorem, attachl, False) open(chk)
passed = False except IOError as e:
tryredo = True if(not tryredo):
elif (succd != 0 and (not tryredo)): errorem += "DataModel Java build failed.\n"
errorem += "Autopsy build failed.\n" attachl.append(antpth)
attachl.append(antpth) if email_enabled:
Emailer.send_email(parsed, errorem, attachl, False) Emailer.send_email(to, server, subj, errorem, attachl)
tryredo = True passed = False
elif (succd != 0): tryredo = True
passed = False elif (succd != 0 and (not tryredo)):
errorem += "Autopsy build failed.\n"
attachl.append(antpth)
Emailer.send_email(to, server, subj, errorem, attachl)
tryredo = True
elif (succd != 0):
passed = False
def main(): def main():
errore = "" global email_enabled
attachli = [] global to
config_file = "" global server
arg = sys.argv.pop(0) global subj
arg = sys.argv.pop(0) errore = ""
config_file = arg attachli = []
parsedin = parse(config_file) config_file = ""
compile(errore, attachli, parsedin) arg = sys.argv.pop(0)
arg = sys.argv.pop(0)
config_file = arg
parsedin = parse(config_file)
try:
to = parsedin.getElementsByTagName("email")[0].getAttribute("value").encode().decode("utf_8")
server = parsedin.getElementsByTagName("mail_server")[0].getAttribute("value").encode().decode("utf_8")
subj = parsedin.getElementsByTagName("subject")[0].getAttribute("value").encode().decode("utf_8")
except Exception:
email_enabled = False
# email_enabled = (to is not None) and (server is not None) and (subj is not None)
email_enabled = False
compile(errore, attachli, parsedin)
class OS: class OS:
LINUX, MAC, WIN, CYGWIN = range(4) LINUX, MAC, WIN, CYGWIN = range(4)
if __name__ == "__main__": if __name__ == "__main__":
global SYS global SYS
if _platform == "linux" or _platform == "linux2": if _platform == "linux" or _platform == "linux2":
SYS = OS.LINUX SYS = OS.LINUX
elif _platform == "darwin": elif _platform == "darwin":
SYS = OS.MAC SYS = OS.MAC
elif _platform == "win32": elif _platform == "win32":
SYS = OS.WIN SYS = OS.WIN
elif _platform == "cygwin": elif _platform == "cygwin":
SYS = OS.CYGWIN SYS = OS.CYGWIN
if SYS is OS.WIN or SYS is OS.CYGWIN:
main()
else:
print("We only support Windows and Cygwin at this time.")
if SYS is OS.WIN or SYS is OS.CYGWIN:
main()
else:
print("We only support Windows and Cygwin at this time.")

View File

@ -136,37 +136,40 @@ class TskDbDiff(object):
""" """
unsorted_dump = TskDbDiff._get_tmp_file("dump_data", ".txt") unsorted_dump = TskDbDiff._get_tmp_file("dump_data", ".txt")
conn = sqlite3.connect(db_file) conn = sqlite3.connect(db_file)
autopsy_cur2 = conn.cursor() conn.text_factory = lambda x: x.decode("utf-8", "ignore")
conn.row_factory = sqlite3.Row
artifact_cursor = conn.cursor()
# Get the list of all artifacts # Get the list of all artifacts
# @@@ Could add a SORT by parent_path in here since that is how we are going to later sort it. # @@@ Could add a SORT by parent_path in here since that is how we are going to later sort it.
autopsy_cur2.execute("SELECT tsk_files.parent_path, tsk_files.name, blackboard_artifact_types.display_name, blackboard_artifacts.artifact_id FROM blackboard_artifact_types INNER JOIN blackboard_artifacts ON blackboard_artifact_types.artifact_type_id = blackboard_artifacts.artifact_type_id INNER JOIN tsk_files ON tsk_files.obj_id = blackboard_artifacts.obj_id") artifact_cursor.execute("SELECT tsk_files.parent_path, tsk_files.name, blackboard_artifact_types.display_name, blackboard_artifacts.artifact_id FROM blackboard_artifact_types INNER JOIN blackboard_artifacts ON blackboard_artifact_types.artifact_type_id = blackboard_artifacts.artifact_type_id INNER JOIN tsk_files ON tsk_files.obj_id = blackboard_artifacts.obj_id")
database_log = codecs.open(unsorted_dump, "wb", "utf_8") database_log = codecs.open(unsorted_dump, "wb", "utf_8")
rw = autopsy_cur2.fetchone() row = artifact_cursor.fetchone()
appnd = False appnd = False
counter = 0 counter = 0
artifact_count = 0 artifact_count = 0
artifact_fail = 0 artifact_fail = 0
# Cycle through artifacts # Cycle through artifacts
try: try:
while (rw != None): while (row != None):
# File Name and artifact type # File Name and artifact type
if(rw[0] != None): if(row["parent_path"] != None):
database_log.write(rw[0] + rw[1] + ' <artifact type="' + rw[2] + '" > ') database_log.write(row["parent_path"] + row["name"] + ' <artifact type="' + row["display_name"] + '" > ')
else: else:
database_log.write(rw[1] + ' <artifact type="' + rw[2] + '" > ') database_log.write(row["name"] + ' <artifact type="' + row["display_name"] + '" > ')
# Get attributes for this artifact # Get attributes for this artifact
autopsy_cur1 = conn.cursor() attribute_cursor = conn.cursor()
looptry = True looptry = True
artifact_count += 1 artifact_count += 1
try: try:
key = "" art_id = ""
key = str(rw[3]) art_id = str(row["artifact_id"])
key = key, attribute_cursor.execute("SELECT blackboard_attributes.source, blackboard_attribute_types.display_name, blackboard_attributes.value_type, blackboard_attributes.value_text, blackboard_attributes.value_int32, blackboard_attributes.value_int64, blackboard_attributes.value_double FROM blackboard_attributes INNER JOIN blackboard_attribute_types ON blackboard_attributes.attribute_type_id = blackboard_attribute_types.attribute_type_id WHERE artifact_id =? ORDER BY blackboard_attributes.source, blackboard_attribute_types.display_name, blackboard_attributes.value_type, blackboard_attributes.value_text, blackboard_attributes.value_int32, blackboard_attributes.value_int64, blackboard_attributes.value_double", [art_id])
autopsy_cur1.execute("SELECT blackboard_attributes.source, blackboard_attribute_types.display_name, blackboard_attributes.value_type, blackboard_attributes.value_text, blackboard_attributes.value_int32, blackboard_attributes.value_int64, blackboard_attributes.value_double FROM blackboard_attributes INNER JOIN blackboard_attribute_types ON blackboard_attributes.attribute_type_id = blackboard_attribute_types.attribute_type_id WHERE artifact_id =? ORDER BY blackboard_attributes.source, blackboard_attribute_types.display_name, blackboard_attributes.value_type, blackboard_attributes.value_text, blackboard_attributes.value_int32, blackboard_attributes.value_int64, blackboard_attributes.value_double", key) attributes = attribute_cursor.fetchall()
attributes = autopsy_cur1.fetchall()
except sqlite3.Error as e: except sqlite3.Error as e:
msg ="Attributes in artifact id (in output DB)# " + str(rw[3]) + " encountered an error: " + str(e) +" .\n" msg = "Attributes in artifact id (in output DB)# " + str(row["artifact_id"]) + " encountered an error: " + str(e) +" .\n"
print("Attributes in artifact id (in output DB)# ", str(row["artifact_id"]), " encountered an error: ", str(e))
print()
looptry = False looptry = False
artifact_fail += 1 artifact_fail += 1
database_log.write('Error Extracting Attributes') database_log.write('Error Extracting Attributes')
@ -177,33 +180,29 @@ class TskDbDiff(object):
if(looptry == True): if(looptry == True):
src = attributes[0][0] src = attributes[0][0]
for attr in attributes: for attr in attributes:
val = 3 + attr[2] attr_value_index = 3 + attr["value_type"]
numvals = 0 numvals = 0
for x in range(3, 6): for x in range(3, 6):
if(attr[x] != None): if(attr[x] != None):
numvals += 1 numvals += 1
if(numvals > 1): if(numvals > 1):
msg = "There were too many values for attribute type: " + attr[1] + " for artifact with id #" + str(rw[3]) + ".\n" msg = "There were too many values for attribute type: " + attr["display_name"] + " for artifact with id #" + str(row["artifact_id"]) + ".\n"
if(not attr[0] == src): if(not attr["source"] == src):
msg ="There were inconsistent sources for artifact with id #" + str(rw[3]) + ".\n" msg = "There were inconsistent sources for artifact with id #" + str(row["artifact_id"]) + ".\n"
try: try:
database_log.write('<attribute source="' + attr[0] + '" type="' + attr[1] + '" value="') attr_value_as_string = str(attr[attr_value_index])
inpval = attr[val] #if((type(attr_value_as_string) != 'unicode') or (type(attr_value_as_string) != 'str')):
if((type(inpval) != 'unicode') or (type(inpval) != 'str')): # attr_value_as_string = str(attr_value_as_string)
inpval = str(inpval) patrn = re.compile("[\n\0\a\b\r\f]")
patrn = re.compile("[\n\0\a\b\r\f\e]") attr_value_as_string = re.sub(patrn, ' ', attr_value_as_string)
inpval = re.sub(patrn, ' ', inpval) database_log.write('<attribute source="' + attr["source"] + '" type="' + attr["display_name"] + '" value="' + attr_value_as_string + '" />')
database_log.write(inpval)
except IOError as e: except IOError as e:
print("IO error")
raise TskDbDiffException("Unexpected IO error while writing to database log." + str(e)) raise TskDbDiffException("Unexpected IO error while writing to database log." + str(e))
database_log.write('" />')
database_log.write(' <artifact/>\n') database_log.write(' <artifact/>\n')
rw = autopsy_cur2.fetchone() row = artifact_cursor.fetchone()
# Now sort the file
srtcmdlst = ["sort", unsorted_dump, "-o", bb_dump_file]
subprocess.call(srtcmdlst)
print(artifact_fail) print(artifact_fail)
if(artifact_fail > 0): if(artifact_fail > 0):
msg ="There were " + str(artifact_count) + " artifacts and " + str(artifact_fail) + " threw an exception while loading.\n" msg ="There were " + str(artifact_count) + " artifacts and " + str(artifact_fail) + " threw an exception while loading.\n"
@ -212,6 +211,10 @@ class TskDbDiff(object):
finally: finally:
database_log.close() database_log.close()
# Now sort the file
srtcmdlst = ["sort", unsorted_dump, "-o", bb_dump_file]
subprocess.call(srtcmdlst)
def _dump_output_db_nonbb(db_file, dump_file): def _dump_output_db_nonbb(db_file, dump_file):
"""Dumps a database to a text file. """Dumps a database to a text file.
@ -224,7 +227,7 @@ class TskDbDiff(object):
backup_db_file = TskDbDiff._get_tmp_file("tsk_backup_db", ".db") backup_db_file = TskDbDiff._get_tmp_file("tsk_backup_db", ".db")
shutil.copy(db_file, backup_db_file) shutil.copy(db_file, backup_db_file)
conn = sqlite3.connect(backup_db_file) conn = sqlite3.connect(backup_db_file)
conn.text_factory = lambda x: x.decode("utf-8", "ignore")
# Delete the blackboard tables # Delete the blackboard tables
conn.execute("DROP TABLE blackboard_artifacts") conn.execute("DROP TABLE blackboard_artifacts")
conn.execute("DROP TABLE blackboard_attributes") conn.execute("DROP TABLE blackboard_attributes")
@ -266,14 +269,14 @@ def main():
print("usage: tskdbdiff [OUPUT DB PATH] [GOLD DB PATH]") print("usage: tskdbdiff [OUPUT DB PATH] [GOLD DB PATH]")
sys.exit() sys.exit()
db_diff = TskDbDiff(output_db, gold_db) db_diff = TskDbDiff(output_db, gold_db, output_dir=".")
dump_passed, bb_dump_passed = db_diff.run_diff() dump_passed, bb_dump_passed = db_diff.run_diff()
if dump_passed and bb_dump_passed: if dump_passed and bb_dump_passed:
print("Database comparison passed.") print("Database comparison passed.")
elif not dump_passed: if not dump_passed:
print("Non blackboard database comparison failed.") print("Non blackboard database comparison failed.")
elif not bb_dump_passed: if not bb_dump_passed:
print("Blackboard database comparison failed.") print("Blackboard database comparison failed.")
return 0 return 0