Scrub SQL, document how to use user-supplied queries safely

This commit is contained in:
Karl Mortensen 2015-04-30 12:55:45 -04:00
parent 5db7a98dbf
commit b55e2e6e0b
29 changed files with 1768 additions and 1023 deletions

1
.gitignore vendored
View File

@ -65,7 +65,6 @@ genfiles.properties
!/Testing/nbproject/project.properties
*~
/netbeans-plat
/docs/doxygen/doxygen_docs
/docs/doxygen-user/user-docs
/jdiff-javadocs/*
/jdiff-logs/*

View File

@ -227,7 +227,7 @@ public abstract class AbstractAbstractFileNode<T extends AbstractFile> extends A
}
return name;
}
@SuppressWarnings("deprecation")
private static String getHashSetHitsForFile(AbstractFile content) {
String strList = "";
SleuthkitCase skCase = content.getSleuthkitCase();
@ -255,7 +255,6 @@ public abstract class AbstractAbstractFileNode<T extends AbstractFile> extends A
} catch (TskCoreException | SQLException ex) {
logger.log(Level.WARNING, "Error getting hashset hits: ", ex); //NON-NLS
}
return strList;
}

View File

@ -77,7 +77,7 @@ import org.sleuthkit.datamodel.TskCoreException;
//TODO add a generic query to SleuthkitCase
private String createMaxQuery(String attr) {
return "SELECT MAX(" + attr + ") from tsk_files WHERE " + attr + " < " + System.currentTimeMillis() / 1000; //NON-NLS
return "SELECT MAX(" + attr + ") FROM tsk_files WHERE " + attr + " < " + System.currentTimeMillis() / 1000; //NON-NLS
}
@SuppressWarnings("deprecation")

View File

@ -151,7 +151,6 @@ import org.sleuthkit.datamodel.TskCoreException;
List<AbstractFile> contentList = null;
try {
SleuthkitCase tskDb = currentCase.getSleuthkitCase();
//ResultSet rs = tempDb.runQuery(this.getQuery("count(*) as TotalMatches"));
contentList = tskDb.findAllFilesWhere(this.getQuery());
} catch (TskCoreException ex) {
@ -205,11 +204,11 @@ import org.sleuthkit.datamodel.TskCoreException;
*/
private String getQuery() throws FilterValidationException {
//String query = "select " + tempQuery + " from tsk_files where 1";
//String query = "SELECT " + tempQuery + " FROM tsk_files WHERE 1";
String query = " 1";
for (FileSearchFilter f : this.getEnabledFilters()) {
query += " and (" + f.getPredicate() + ")"; //NON-NLS
query += " AND (" + f.getPredicate() + ")"; //NON-NLS
}
return query;

View File

@ -87,7 +87,7 @@ class BrowserLocationAnalyzer {
try {
resultSet = statement.executeQuery(
"Select timestamp, latitude, longitude, accuracy FROM CachedPosition;"); //NON-NLS
"SELECT timestamp, latitude, longitude, accuracy FROM CachedPosition;"); //NON-NLS
while (resultSet.next()) {
Long timestamp = Long.valueOf(resultSet.getString("timestamp")) / 1000; //NON-NLS

View File

@ -87,7 +87,7 @@ class GoogleMapLocationAnalyzer {
try {
resultSet = statement.executeQuery(
"Select time,dest_lat,dest_lng,dest_title,dest_address,source_lat,source_lng FROM destination_history;"); //NON-NLS
"SELECT time,dest_lat,dest_lng,dest_title,dest_address,source_lat,source_lng FROM destination_history;"); //NON-NLS
while (resultSet.next()) {
Long time = Long.valueOf(resultSet.getString("time")) / 1000; //NON-NLS

View File

@ -84,7 +84,7 @@ class TangoMessageAnalyzer {
try {
resultSet = statement.executeQuery(
"Select conv_id, create_time,direction,payload FROM messages ORDER BY create_time DESC;"); //NON-NLS
"SELECT conv_id, create_time,direction,payload FROM messages ORDER BY create_time DESC;"); //NON-NLS
String conv_id; // seems to wrap around the message found in payload after decoding from base-64
String direction; // 1 incoming, 2 outgoing

View File

@ -84,7 +84,7 @@ class TextMessageAnalyzer {
try {
resultSet = statement.executeQuery(
"Select address,date,read,type,subject,body FROM sms;"); //NON-NLS
"SELECT address,date,read,type,subject,body FROM sms;"); //NON-NLS
String address; // may be phone number, or other addresses

View File

@ -57,7 +57,7 @@ class ContactAnalyzer {
try {
SleuthkitCase skCase = Case.getCurrentCase().getSleuthkitCase();
absFiles = skCase.findAllFilesWhere("name LIKE '%call_history%' "); //NON-NLS //get exact file names
if (absFiles.isEmpty()) { //asdfkjasfakljsdfhlaksdjfhasdlkjf
if (absFiles.isEmpty()) {
return;
}
for (AbstractFile AF : absFiles) {

View File

@ -87,7 +87,7 @@ class TextMessageAnalyzer {
AbstractFile f = skCase.getAbstractFileById(fId);
try {
resultSet = statement.executeQuery(
"Select address,date,type,subject,body FROM sms;"); //NON-NLS
"SELECT address,date,type,subject,body FROM sms;"); //NON-NLS
BlackboardArtifact bba;
String address; // may be phone number, or other addresses

View File

@ -67,7 +67,6 @@ import org.sleuthkit.datamodel.*;
* @param progressPanel panel to update the report's progress
*/
@Override
@SuppressWarnings("deprecation")
public void generateReport(String path, ReportProgressPanel progressPanel) {
// Start the progress bar and setup the report
progressPanel.setIndeterminate(false);
@ -85,7 +84,7 @@ import org.sleuthkit.datamodel.*;
+ " AND name != '.' AND name != '..'"; //NON-NLS
progressPanel.updateStatusLabel(NbBundle.getMessage(this.getClass(), "ReportBodyFile.progress.loading"));
List<FsContent> fs = skCase.findFilesWhere(query);
List<AbstractFile> fs = skCase.findAllFilesWhere(query);
// Check if ingest has finished
String ingestwarning = "";
@ -103,7 +102,7 @@ import org.sleuthkit.datamodel.*;
out.write(ingestwarning);
// Loop files and write info to report
int count = 0;
for (FsContent file : fs) {
for (AbstractFile file : fs) {
if (progressPanel.getStatus() == ReportStatus.CANCELED) {
break;
}
@ -164,7 +163,7 @@ import org.sleuthkit.datamodel.*;
logger.log(Level.SEVERE, errorMessage, ex);
}
}
progressPanel.complete();
progressPanel.complete(ReportStatus.COMPLETE);
} catch(TskCoreException ex) {
logger.log(Level.WARNING, "Failed to get the unique path.", ex); //NON-NLS
}

View File

@ -373,7 +373,7 @@ import org.sleuthkit.datamodel.TskData;
List<AbstractFile> absFiles;
try {
SleuthkitCase skCase = Case.getCurrentCase().getSleuthkitCase();
absFiles = skCase.findAllFilesWhere("NOT meta_type = " + TskData.TSK_FS_META_TYPE_ENUM.TSK_FS_META_TYPE_DIR.getValue()); //NON-NLS
absFiles = skCase.findAllFilesWhere("meta_type != " + TskData.TSK_FS_META_TYPE_ENUM.TSK_FS_META_TYPE_DIR.getValue()); //NON-NLS
return absFiles;
} catch (TskCoreException ex) {
MessageNotifyUtil.Notify.show(

View File

@ -358,7 +358,7 @@ public class TimeLineController {
@SuppressWarnings("deprecation")
private long getCaseLastArtifactID(final SleuthkitCase sleuthkitCase) {
long caseLastArtfId = -1;
String query = "select Max(artifact_id) as max_id from blackboard_artifacts"; // NON-NLS
String query = "SELECT MAX(artifact_id) AS max_id FROM blackboard_artifacts"; // NON-NLS
try (CaseDbQuery dbQuery = sleuthkitCase.executeQuery(query)) {
ResultSet resultSet = dbQuery.getResultSet();

View File

@ -274,9 +274,9 @@ public class EventDB {
try (Statement stmt = con.createStatement();
//You can't inject multiple values into one ? paramater in prepared statement,
//so we make new statement each time...
ResultSet rs = stmt.executeQuery("select Min(time), Max(time) from events where event_id in (" + StringUtils.join(eventIDs, ", ") + ")");) { // NON-NLS
ResultSet rs = stmt.executeQuery("SELECT MIN(time), MAX(time) FROM events WHERE event_id IN (" + StringUtils.join(eventIDs, ", ") + ")");) { // NON-NLS
while (rs.next()) {
span = new Interval(rs.getLong("Min(time)"), rs.getLong("Max(time)") + 1, DateTimeZone.UTC); // NON-NLS
span = new Interval(rs.getLong("MIN(time)"), rs.getLong("MAX(time)") + 1, DateTimeZone.UTC); // NON-NLS
}
} catch (SQLException ex) {
@ -314,7 +314,7 @@ public class EventDB {
int result = -1;
dbReadLock();
//TODO convert this to prepared statement -jm
try (ResultSet rs = con.createStatement().executeQuery("select count(*) as count from events")) { // NON-NLS
try (ResultSet rs = con.createStatement().executeQuery("SELECT count(*) AS count FROM events")) { // NON-NLS
while (rs.next()) {
result = rs.getInt("count"); // NON-NLS
break;
@ -444,7 +444,7 @@ public class EventDB {
Set<Long> resultIDs = new HashSet<>();
dbReadLock();
final String query = "select event_id from events where time >= " + startTime + " and time <" + endTime + " and " + getSQLWhere(filter); // NON-NLS
final String query = "SELECT event_id FROM events WHERE time >= " + startTime + " AND time <" + endTime + " AND " + getSQLWhere(filter); // NON-NLS
//System.out.println(query);
try (Statement stmt = con.createStatement();
ResultSet rs = stmt.executeQuery(query)) {
@ -597,11 +597,11 @@ public class EventDB {
"INSERT INTO events (file_id ,artifact_id, time, sub_type, base_type, full_description, med_description, short_description, known_state) " // NON-NLS
+ "VALUES (?,?,?,?,?,?,?,?,?)"); // NON-NLS
getMaxTimeStmt = prepareStatement("select Max(time) as max from events"); // NON-NLS
getMinTimeStmt = prepareStatement("select Min(time) as min from events"); // NON-NLS
getEventByIDStmt = prepareStatement("select * from events where event_id = ?"); // NON-NLS
recordDBInfoStmt = prepareStatement("insert or replace into db_info (key, value) values (?, ?)"); // NON-NLS
getDBInfoStmt = prepareStatement("select value from db_info where key = ?"); // NON-NLS
getMaxTimeStmt = prepareStatement("SELECT MAX(time) AS max FROM events"); // NON-NLS
getMinTimeStmt = prepareStatement("SELECT MIN(time) AS min FROM events"); // NON-NLS
getEventByIDStmt = prepareStatement("SELECT * FROM events WHERE event_id = ?"); // NON-NLS
recordDBInfoStmt = prepareStatement("INSERT OR REPLACE INTO db_info (key, value) VALUES (?, ?)"); // NON-NLS
getDBInfoStmt = prepareStatement("SELECT value FROM db_info WHERE key = ?"); // NON-NLS
} catch (SQLException sQLException) {
LOGGER.log(Level.SEVERE, "failed to prepareStatment", sQLException); // NON-NLS
}
@ -795,8 +795,8 @@ public class EventDB {
final boolean useSubTypes = (zoomLevel == EventTypeZoomLevel.SUB_TYPE);
//get some info about the range of dates requested
final String queryString = "select count(*), " + (useSubTypes ? SUB_TYPE_COLUMN : BASE_TYPE_COLUMN) // NON-NLS
+ " from events where time >= " + startTime + " and time < " + endTime + " and " + getSQLWhere(filter) // NON-NLS
final String queryString = "SELECT count(*), " + (useSubTypes ? SUB_TYPE_COLUMN : BASE_TYPE_COLUMN) // NON-NLS
+ " FROM events WHERE time >= " + startTime + " AND time < " + endTime + " AND " + getSQLWhere(filter) // NON-NLS
+ " GROUP BY " + (useSubTypes ? SUB_TYPE_COLUMN : BASE_TYPE_COLUMN); // NON-NLS
ResultSet rs = null;
@ -876,10 +876,10 @@ public class EventDB {
//get all agregate events in this time unit
dbReadLock();
String query = "select strftime('" + strfTimeFormat + "',time , 'unixepoch'" + (TimeLineController.getTimeZone().get().equals(TimeZone.getDefault()) ? ", 'localtime'" : "") + ") as interval, group_concat(event_id) as event_ids, Min(time), Max(time), " + descriptionColumn + ", " + (useSubTypes ? SUB_TYPE_COLUMN : BASE_TYPE_COLUMN) // NON-NLS
+ " from events where time >= " + start + " and time < " + end + " and " + getSQLWhere(filter) // NON-NLS
+ " group by interval, " + (useSubTypes ? SUB_TYPE_COLUMN : BASE_TYPE_COLUMN) + " , " + descriptionColumn // NON-NLS
+ " order by Min(time)"; // NON-NLS
String query = "SELECT strftime('" + strfTimeFormat + "',time , 'unixepoch'" + (TimeLineController.getTimeZone().get().equals(TimeZone.getDefault()) ? ", 'localtime'" : "") + ") as interval, group_concat(event_id) as event_ids, Min(time), Max(time), " + descriptionColumn + ", " + (useSubTypes ? SUB_TYPE_COLUMN : BASE_TYPE_COLUMN) // NON-NLS
+ " FROM events where time >= " + start + " AND time < " + end + " AND " + getSQLWhere(filter) // NON-NLS
+ " GROUP BY interval, " + (useSubTypes ? SUB_TYPE_COLUMN : BASE_TYPE_COLUMN) + " , " + descriptionColumn // NON-NLS
+ " ORDER BY MIN(time)"; // NON-NLS
//System.out.println(query);
ResultSet rs = null;
try (Statement stmt = con.createStatement(); // scoop up requested events in groups organized by interval, type, and desription

View File

@ -682,12 +682,12 @@ public final class ImageGalleryController {
try {
//grab all files with supported extension or mime types
final List<AbstractFile> files = getSleuthKitCase().findAllFilesWhere(DRAWABLE_QUERY + " or tsk_files.obj_id in (select tsk_files.obj_id from tsk_files , blackboard_artifacts, blackboard_attributes"
+ " where blackboard_artifacts.obj_id = tsk_files.obj_id"
+ " and blackboard_attributes.artifact_id = blackboard_artifacts.artifact_id"
+ " and blackboard_artifacts.artifact_type_id = " + BlackboardArtifact.ARTIFACT_TYPE.TSK_GEN_INFO.getTypeID()
+ " and blackboard_attributes.attribute_type_id = " + BlackboardAttribute.ATTRIBUTE_TYPE.TSK_FILE_TYPE_SIG.getTypeID()
+ " and blackboard_attributes.value_text in ('" + StringUtils.join(ImageGalleryModule.getSupportedMimes(), "','") + "'))");
final List<AbstractFile> files = getSleuthKitCase().findAllFilesWhere(DRAWABLE_QUERY + " OR tsk_files.obj_id IN (select tsk_files.obj_id FROM tsk_files , blackboard_artifacts, blackboard_attributes"
+ " WHERE blackboard_artifacts.obj_id = tsk_files.obj_id"
+ " AND blackboard_attributes.artifact_id = blackboard_artifacts.artifact_id"
+ " AND blackboard_artifacts.artifact_type_id = " + BlackboardArtifact.ARTIFACT_TYPE.TSK_GEN_INFO.getTypeID()
+ " AND blackboard_attributes.attribute_type_id = " + BlackboardAttribute.ATTRIBUTE_TYPE.TSK_FILE_TYPE_SIG.getTypeID()
+ " AND blackboard_attributes.value_text IN ('" + StringUtils.join(ImageGalleryModule.getSupportedMimes(), "','") + "'))");
progressHandle.switchToDeterminate(files.size());
updateProgress(0.0);
@ -807,7 +807,7 @@ public final class ImageGalleryController {
for (FileSystem fs : image.getFileSystems()) {
fsObjIds.add(fs.getId());
}
fsQuery = "(fs_obj_id = " + StringUtils.join(fsObjIds, " or fs_obj_id = ") + ") ";
fsQuery = "(fs_obj_id = " + StringUtils.join(fsObjIds, " OR fs_obj_id = ") + ") ";
}
// NOTE: Logical files currently (Apr '15) have a null value for fs_obj_id in DB.
// for them, we will not specify a fs_obj_id, which means we will grab files
@ -816,7 +816,7 @@ public final class ImageGalleryController {
fsQuery = "(fs_obj_id IS NULL) ";
}
files = getSleuthKitCase().findAllFilesWhere(fsQuery + " and " + DRAWABLE_QUERY);
files = getSleuthKitCase().findAllFilesWhere(fsQuery + " AND " + DRAWABLE_QUERY);
progressHandle.switchToDeterminate(files.size());
//do in transaction

View File

@ -199,27 +199,27 @@ public class DrawableDB {
"INSERT OR IGNORE INTO drawable_files (obj_id , path, name, created_time, modified_time, make, model, analyzed) "
+ "VALUES (?,?,?,?,?,?,?,?)");
removeFileStmt = prepareStatement("delete from drawable_files where obj_id = ?");
removeFileStmt = prepareStatement("DELETE FROM drawable_files WHERE obj_id = ?");
pathGroupStmt = prepareStatement("select obj_id , analyzed from drawable_files where path = ? ", DrawableAttribute.PATH);
nameGroupStmt = prepareStatement("select obj_id , analyzed from drawable_files where name = ? ", DrawableAttribute.NAME);
created_timeGroupStmt = prepareStatement("select obj_id , analyzed from drawable_files where created_time = ? ", DrawableAttribute.CREATED_TIME);
modified_timeGroupStmt = prepareStatement("select obj_id , analyzed from drawable_files where modified_time = ? ", DrawableAttribute.MODIFIED_TIME);
makeGroupStmt = prepareStatement("select obj_id , analyzed from drawable_files where make = ? ", DrawableAttribute.MAKE);
modelGroupStmt = prepareStatement("select obj_id , analyzed from drawable_files where model = ? ", DrawableAttribute.MODEL);
analyzedGroupStmt = prepareStatement("Select obj_id , analyzed from drawable_files where analyzed = ?", DrawableAttribute.ANALYZED);
hashSetGroupStmt = prepareStatement("select drawable_files.obj_id as obj_id, analyzed from drawable_files , hash_sets , hash_set_hits where drawable_files.obj_id = hash_set_hits.obj_id and hash_sets.hash_set_id = hash_set_hits.hash_set_id and hash_sets.hash_set_name = ?", DrawableAttribute.HASHSET);
pathGroupStmt = prepareStatement("SELECT obj_id , analyzed FROM drawable_files WHERE path = ? ", DrawableAttribute.PATH);
nameGroupStmt = prepareStatement("SELECT obj_id , analyzed FROM drawable_files WHERE name = ? ", DrawableAttribute.NAME);
created_timeGroupStmt = prepareStatement("SELECT obj_id , analyzed FROM drawable_files WHERE created_time = ? ", DrawableAttribute.CREATED_TIME);
modified_timeGroupStmt = prepareStatement("SELECT obj_id , analyzed FROM drawable_files WHERE modified_time = ? ", DrawableAttribute.MODIFIED_TIME);
makeGroupStmt = prepareStatement("SELECT obj_id , analyzed FROM drawable_files WHERE make = ? ", DrawableAttribute.MAKE);
modelGroupStmt = prepareStatement("SELECT obj_id , analyzed FROM drawable_files WHERE model = ? ", DrawableAttribute.MODEL);
analyzedGroupStmt = prepareStatement("SELECT obj_id , analyzed FROM drawable_files WHERE analyzed = ?", DrawableAttribute.ANALYZED);
hashSetGroupStmt = prepareStatement("SELECT drawable_files.obj_id AS obj_id, analyzed FROM drawable_files , hash_sets , hash_set_hits WHERE drawable_files.obj_id = hash_set_hits.obj_id AND hash_sets.hash_set_id = hash_set_hits.hash_set_id AND hash_sets.hash_set_name = ?", DrawableAttribute.HASHSET);
updateGroupStmt = prepareStatement("update groups set seen = 1 where value = ? and attribute = ?");
insertGroupStmt = prepareStatement("insert or replace into groups (value, attribute) values (?,?)");
updateGroupStmt = prepareStatement("UPDATE groups SET seen = 1 WHERE value = ? AND attribute = ?");
insertGroupStmt = prepareStatement("INSERT OR REPLACE INTO groups (value, attribute) VALUES (?,?)");
groupSeenQueryStmt = prepareStatement("select seen from groups where value = ? and attribute = ?");
groupSeenQueryStmt = prepareStatement("SELECT seen FROM groups WHERE value = ? AND attribute = ?");
selectHashSetNamesStmt = prepareStatement("SELECT DISTINCT hash_set_name FROM hash_sets");
insertHashSetStmt = prepareStatement("insert or ignore into hash_sets (hash_set_name) values (?)");
selectHashSetStmt = prepareStatement("select hash_set_id from hash_sets where hash_set_name = ?");
insertHashSetStmt = prepareStatement("INSERT OR IGNORE INTO hash_sets (hash_set_name) VALUES (?)");
selectHashSetStmt = prepareStatement("SELECT hash_set_id FROM hash_sets WHERE hash_set_name = ?");
insertHashHitStmt = prepareStatement("insert or ignore into hash_set_hits (hash_set_id, obj_id) values (?,?)");
insertHashHitStmt = prepareStatement("INSERT OR IGNORE INTO hash_set_hits (hash_set_id, obj_id) VALUES (?,?)");
} else {
throw new ExceptionInInitializerError();
@ -574,17 +574,17 @@ public class DrawableDB {
if (hashSetNames.isEmpty() == false) {
for (String name : hashSetNames) {
// "insert or ignore into hash_sets (hash_set_name) values (?)"
// "INSERT OR IGNORE INTO hash_sets (hash_set_name) VALUES (?)"
insertHashSetStmt.setString(1, name);
insertHashSetStmt.executeUpdate();
//TODO: use nested select to get hash_set_id rather than seperate statement/query
//"select hash_set_id from hash_sets where hash_set_name = ?"
//"SELECT hash_set_id FROM hash_sets WHERE hash_set_name = ?"
selectHashSetStmt.setString(1, name);
try (ResultSet rs = selectHashSetStmt.executeQuery()) {
while (rs.next()) {
int hashsetID = rs.getInt("hash_set_id");
//"insert or ignore into hash_set_hits (hash_set_id, obj_id) values (?,?)";
//"INSERT OR IGNORE INTO hash_set_hits (hash_set_id, obj_id) VALUES (?,?)";
insertHashHitStmt.setInt(1, hashsetID);
insertHashHitStmt.setLong(2, f.getId());
insertHashHitStmt.executeUpdate();
@ -649,7 +649,7 @@ public class DrawableDB {
public Boolean isFileAnalyzed(long fileId) {
dbReadLock();
try (Statement stmt = con.createStatement();
ResultSet analyzedQuery = stmt.executeQuery("select analyzed from drawable_files where obj_id = " + fileId)) {
ResultSet analyzedQuery = stmt.executeQuery("SELECT analyzed FROM drawable_files WHERE obj_id = " + fileId)) {
while (analyzedQuery.next()) {
return analyzedQuery.getBoolean(ANALYZED);
}
@ -667,7 +667,7 @@ public class DrawableDB {
dbReadLock();
try (Statement stmt = con.createStatement();
//Can't make this a preprared statement because of the IN ( ... )
ResultSet analyzedQuery = stmt.executeQuery("select count(analyzed) as analyzed from drawable_files where analyzed = 1 and obj_id in (" + StringUtils.join(fileIds, ", ") + ")")) {
ResultSet analyzedQuery = stmt.executeQuery("SELECT COUNT(analyzed) AS analyzed FROM drawable_files WHERE analyzed = 1 AND obj_id IN (" + StringUtils.join(fileIds, ", ") + ")")) {
while (analyzedQuery.next()) {
return analyzedQuery.getInt(ANALYZED) == fileIds.size();
}
@ -689,7 +689,7 @@ public class DrawableDB {
// In testing, this method appears to be a lot faster than doing one large select statement
for (Long fileID : fileIDsInGroup) {
Statement stmt = con.createStatement();
ResultSet analyzedQuery = stmt.executeQuery("select analyzed from drawable_files where obj_id = " + fileID);
ResultSet analyzedQuery = stmt.executeQuery("SELECT analyzed FROM drawable_files WHERE obj_id = " + fileID);
while (analyzedQuery.next()) {
if (analyzedQuery.getInt(ANALYZED) == 0) {
return false;
@ -705,7 +705,7 @@ public class DrawableDB {
// Old method
try (Statement stmt = con.createStatement();
//Can't make this a preprared statement because of the IN ( ... )
ResultSet analyzedQuery = stmt.executeQuery("select count(analyzed) as analyzed from drawable_files where analyzed = 1 and obj_id in (" + StringUtils.join(fileIDsInGroup, ", ") + ")")) {
ResultSet analyzedQuery = stmt.executeQuery("SELECT count(analyzed) as analyzed from drawable_files where analyzed = 1 and obj_id in (" + StringUtils.join(fileIDsInGroup, ", ") + ")")) {
while (analyzedQuery.next()) {
return analyzedQuery.getInt(ANALYZED) == fileIDsInGroup.size();
}
@ -861,15 +861,15 @@ public class DrawableDB {
default:
dbReadLock();
//TODO: convert this to prepared statement
StringBuilder query = new StringBuilder("select " + groupBy.attrName.toString() + ", count(*) from drawable_files group by " + groupBy.attrName.toString());
StringBuilder query = new StringBuilder("SELECT " + groupBy.attrName.toString() + ", COUNT(*) FROM drawable_files GROUP BY " + groupBy.attrName.toString());
String orderByClause = "";
switch (sortBy) {
case GROUP_BY_VALUE:
orderByClause = " order by " + groupBy.attrName.toString();
orderByClause = " ORDER BY " + groupBy.attrName.toString();
break;
case FILE_COUNT:
orderByClause = " order by count(*)";
orderByClause = " ORDER BY COUNT(*)";
break;
case NONE:
// case PRIORITY:
@ -1056,7 +1056,7 @@ public class DrawableDB {
public int countAllFiles() {
int result = -1;
dbReadLock();
try (ResultSet rs = con.createStatement().executeQuery("select count(*) as COUNT from drawable_files")) {
try (ResultSet rs = con.createStatement().executeQuery("SELECT COUNT(*) AS COUNT FROM drawable_files")) {
while (rs.next()) {
result = rs.getInt("COUNT");

View File

@ -56,11 +56,11 @@ import org.sleuthkit.datamodel.TskData;
class Chrome extends Extract {
private static final String historyQuery = "SELECT urls.url, urls.title, urls.visit_count, urls.typed_count, " //NON-NLS
+ "last_visit_time, urls.hidden, visits.visit_time, (SELECT urls.url FROM urls WHERE urls.id=visits.url) as from_visit, visits.transition FROM urls, visits WHERE urls.id = visits.url"; //NON-NLS
private static final String cookieQuery = "select name, value, host_key, expires_utc,last_access_utc, creation_utc from cookies"; //NON-NLS
private static final String downloadQuery = "select full_path, url, start_time, received_bytes from downloads"; //NON-NLS
private static final String downloadQueryVersion30 = "SELECT current_path as full_path, url, start_time, received_bytes FROM downloads, downloads_url_chains WHERE downloads.id=downloads_url_chains.id"; //NON-NLS
private static final String loginQuery = "select origin_url, username_value, signon_realm from logins"; //NON-NLS
+ "last_visit_time, urls.hidden, visits.visit_time, (SELECT urls.url FROM urls WHERE urls.id=visits.url) AS from_visit, visits.transition FROM urls, visits WHERE urls.id = visits.url"; //NON-NLS
private static final String cookieQuery = "SELECT name, value, host_key, expires_utc,last_access_utc, creation_utc FROM cookies"; //NON-NLS
private static final String downloadQuery = "SELECT full_path, url, start_time, received_bytes FROM downloads"; //NON-NLS
private static final String downloadQueryVersion30 = "SELECT current_path AS full_path, url, start_time, received_bytes FROM downloads, downloads_url_chains WHERE downloads.id=downloads_url_chains.id"; //NON-NLS
private static final String loginQuery = "SELECT origin_url, username_value, signon_realm from logins"; //NON-NLS
private final Logger logger = Logger.getLogger(this.getClass().getName());
private Content dataSource;
private IngestJobContext context;

View File

@ -53,12 +53,12 @@ import org.sleuthkit.datamodel.TskCoreException;
class Firefox extends Extract {
private static final Logger logger = Logger.getLogger(Firefox.class.getName());
private static final String historyQuery = "SELECT moz_historyvisits.id,url,title,visit_count,(visit_date/1000000) as visit_date,from_visit,(SELECT url FROM moz_places WHERE id=moz_historyvisits.from_visit) as ref FROM moz_places, moz_historyvisits WHERE moz_places.id = moz_historyvisits.place_id AND hidden = 0"; //NON-NLS
private static final String cookieQuery = "SELECT name,value,host,expiry,(lastAccessed/1000000) as lastAccessed,(creationTime/1000000) as creationTime FROM moz_cookies"; //NON-NLS
private static final String cookieQueryV3 = "SELECT name,value,host,expiry,(lastAccessed/1000000) as lastAccessed FROM moz_cookies"; //NON-NLS
private static final String bookmarkQuery = "SELECT fk, moz_bookmarks.title, url, (moz_bookmarks.dateAdded/1000000) as dateAdded FROM moz_bookmarks INNER JOIN moz_places ON moz_bookmarks.fk=moz_places.id"; //NON-NLS
private static final String downloadQuery = "SELECT target, source,(startTime/1000000) as startTime, maxBytes FROM moz_downloads"; //NON-NLS
private static final String downloadQueryVersion24 = "SELECT url, content as target, (lastModified/1000000) as lastModified FROM moz_places, moz_annos WHERE moz_places.id = moz_annos.place_id AND moz_annos.anno_attribute_id = 3"; //NON-NLS
private static final String historyQuery = "SELECT moz_historyvisits.id,url,title,visit_count,(visit_date/1000000) AS visit_date,from_visit,(SELECT url FROM moz_places WHERE id=moz_historyvisits.from_visit) as ref FROM moz_places, moz_historyvisits WHERE moz_places.id = moz_historyvisits.place_id AND hidden = 0"; //NON-NLS
private static final String cookieQuery = "SELECT name,value,host,expiry,(lastAccessed/1000000) AS lastAccessed,(creationTime/1000000) AS creationTime FROM moz_cookies"; //NON-NLS
private static final String cookieQueryV3 = "SELECT name,value,host,expiry,(lastAccessed/1000000) AS lastAccessed FROM moz_cookies"; //NON-NLS
private static final String bookmarkQuery = "SELECT fk, moz_bookmarks.title, url, (moz_bookmarks.dateAdded/1000000) AS dateAdded FROM moz_bookmarks INNER JOIN moz_places ON moz_bookmarks.fk=moz_places.id"; //NON-NLS
private static final String downloadQuery = "SELECT target, source,(startTime/1000000) AS startTime, maxBytes FROM moz_downloads"; //NON-NLS
private static final String downloadQueryVersion24 = "SELECT url, content AS target, (lastModified/1000000) AS lastModified FROM moz_places, moz_annos WHERE moz_places.id = moz_annos.place_id AND moz_annos.anno_attribute_id = 3"; //NON-NLS
private final IngestServices services = IngestServices.getInstance();
private Content dataSource;
private IngestJobContext context;

View File

@ -279,7 +279,7 @@ class SearchEngineURLQueryAnalyzer extends Extract {
for (BlackboardArtifact artifact : listArtifacts) {
if (context.dataSourceIngestIsCancelled()) {
break; //User cancled the process.
break; //User cancelled the process.
}
//initializing default attributes
@ -302,7 +302,7 @@ class SearchEngineURLQueryAnalyzer extends Extract {
SearchEngineURLQueryAnalyzer.SearchEngine se = null;
//from blackboard_attributes
Collection<BlackboardAttribute> listAttributes = currentCase.getSleuthkitCase().getMatchingAttributes("Where artifact_id = " + artifact.getArtifactID()); //NON-NLS
Collection<BlackboardAttribute> listAttributes = currentCase.getSleuthkitCase().getMatchingAttributes("WHERE artifact_id = " + artifact.getArtifactID()); //NON-NLS
for (BlackboardAttribute attribute : listAttributes) {
if (attribute.getAttributeTypeID() == BlackboardAttribute.ATTRIBUTE_TYPE.TSK_URL.getTypeID()) {

View File

@ -195,7 +195,7 @@ class Util {
int index = parent_path.lastIndexOf('/');
String name = parent_path.substring(++index);
parent_path = parent_path.substring(0, index);
//String query = "select * from tsk_files where parent_path like \"" + parent_path + "\" AND name like \"" + name + "\"";
//String query = "SELECT * from tsk_files where parent_path like \"" + parent_path + "\" AND name like \"" + name + "\"";
FileManager fileManager = Case.getCurrentCase().getServices().getFileManager();
List<AbstractFile> files = null;

File diff suppressed because it is too large Load Diff

166
docs/doxygen/database.dox Normal file
View File

@ -0,0 +1,166 @@
/*! \page database_page Databases
\section types_of_databases Database Options
Autopsy currently allows either \ref sqlite_db or \ref postgresql_db as the back-end database system for a case. Any module you write could be used with either as the backend database, at the user's discretion.
\subsection sqlite_db SQLite
- SQLite 3 or newer
- Used for Single-user cases
- Built-in
- No configuration required
- Databases are stored locally
\subsection postgresql_db PostgreSQL
- PostgreSQL 9.4.1 or newer
- Used for Multi-user cases
- An accessible instance of PostgreSQL must be running (on a server, or locally)
- Proper configuration to connect to this instance of PostgreSQL must be entered in Tools, Options, Multi-User options
- Databases are stored within PostgreSQL, wherever it happens to be running
\subsection which_db Which Database is my Module Accessing?
In an Autopsy Module, you can check the database type currently in use with the following code snippet:
\code{.java}
Case currentCase = Case.getCurrentCase();
if (currentCase.getCaseType() == Case.CaseType.MULTI_USER_CASE)
{
// PostgreSQL in use
}
else
{
// SQLite in use
}
\endcode
<br>
\section db_user_calls TSK methods to Query the Database With User-Supplied SQL
The following SleuthkitCase methods are available for the user to supply all of, or a portion of, a SQL query.
\code{.java}
ArrayList<BlackboardAttribute> getMatchingAttributes(String whereClause)
ArrayList<BlackboardArtifact> getMatchingArtifacts(String whereClause)
long countFilesWhere(String sqlWhereClause)
List<AbstractFile> findAllFilesWhere(String sqlWhereClause)
List<Long> findAllFileIdsWhere(String sqlWhereClause)
CaseDbQuery executeQuery(String query)
List<FsContent> findFilesWhere(String sqlWhereClause) [deprecated]
ResultSet runQuery(String query) [deprecated]
void closeRunQuery(ResultSet resultSet) [deprecated]
\endcode
The majority of them only allow the user to specify a WHERE clause, determining which records to SELECT.
<br>
<br>
The following example finds all the .txt files in the case:
\code{.java}
List<AbstractFile> files = sk.findAllFilesWhere("name LIKE '%.txt'");
\endcode
<br>
\section db_pitfalls_to_avoid How to Avoid Pitfalls When Using the Query Methods
Because there are multiple backend databases, care must be taken to use strict SQL. When things must be different between database types, use \ref which_db to determine which database type is currently in use and create the proper SQL statements.
- Test your module with both types of databases. They behave differently and will give you different resulting output order.
<br>
<br>
- Do not use INSERT OR REPLACE INTO. It does not exist in PostgreSQL.
<br>
<br>
- Do not use INSERT OR IGNORE INTO. It does not exist in PostgreSQL.
<br>
<br>
- SQLite and PostgreSQL have different defaults on what order they will return records in, so you want to fully specify ORDER BY clauses for both database types. Example:
\code{.java}
String orderByClause;
if (currentCase.getCaseType() == Case.CaseType.MULTI_USER_CASE)
{
orderByClause = "ORDER BY att.value_text, ASC NULLS FIRST"; //PostgreSQL
}
else
{
orderByClause = "ORDER BY att.value_text ASC"; //SQLite
}
\endcode
<br>
- Do not use backticks. PostgreSQL does not use them like SQLite does.
<br>
<br>
- Use only single quotes to quote values. Do not use double quotes for this. Quoting values is not required.
<br>
\code{.java}
SELECT * FROM tsk_files WHERE has_path = "1" // Bad example
SELECT * FROM tsk_files WHERE has_path = '1' // Good example
SELECT * FROM tsk_files WHERE has_path = 1 // Good example
\endcode
<br>
- Use only double quotes to quote column names. Do not use single quotes for this. Quoting column names is not required.
<br>
\code{.java}
SELECT 'obj_id' FROM tsk_files WHERE has_path = 1 // Bad example
SELECT "obj_id" FROM tsk_files WHERE has_path = 1 // Good example
SELECT obj_id FROM tsk_files WHERE has_path = 1 // Good example
\endcode
<br>
- PostgreSQL compares are case-sensitive. Always specify what type of compare you want. UPPER(), LOWER(), ILIKE etc can help with that.
\code{.java}
SELECT * from people WHERE first_name LIKE '%somename%' // Will be case sensitive in PostgreSQL, not in SQLite
SELECT * from people WHERE first_name ILIKE '%somename%' // Works in PostgreSQL, does not exist in SQLite
SELECT * from people WHERE LOWER(first_name) LIKE LOWER('%somename%') // Not case sensitive in either database
\endcode
<br>
- Do not use || and && to connect logical clauses. This does not exist in PostgreSQL. Use OR and AND instead.
\code{.java}
SELECT COUNT(*) FROM tsk_files WHERE dir_type = '5' && md5 IS NULL || size > '0' // Bad Example
SELECT COUNT(*) FROM tsk_files WHERE dir_type = '5' AND md5 IS NULL OR size > '0' // Good Example
\endcode
<br>
- Do not use COLLATE NOCASE to order output. This does not exist in PostgreSQL. Use LOWER() or UPPER() instead.
\code{.java}
ORDER BY tsk_files.dir_type, tsk_files.name COLLATE NOCASE // Bad Example
ORDER BY tsk_files.dir_type, LOWER(tsk_files.name) // Good Example
\endcode
<br>
- Do not insert [NUL characters](http://en.wikipedia.org/wiki/Null_character) into the database as UTF-8 (NUL characters are not NULL fields). Translate NUL characters to the [SUB character](http://en.wikipedia.org/wiki/Substitute_character) with the following instead:
\code{.java}
private String replaceNulls(String text);
\endcode
<br>
- In ORDER BY clauses, PostgreSQL ignores leading dashes. Given the following data, you will see the following two sort orders for the different databases.
<br>
| Data | PostgreSQL sort order | SQLite sort order|
|:--------:|:------------------------:|:------------------:|
|Alpha | Alpha | -Bravo |
|-Bravo | -Bravo | Alpha |
|Charlie | Charlie | Charlie |
<br>
To force PostgreSQL to not ignore leading dashes, convert strings to SQL_ASCII before sorting by them. This is done with convert_to(), but it only exists in PostgreSQL.
<br>
\code{.java}
ORDER BY some_value // Bad example
ORDER BY convert_to(some_value, 'SQL_ASCII') // Good example
\endcode
<br>
With the code above, using SQL_ASCII encoding, the following results are seen:
<br>
| Data | PostgreSQL sort order | SQLite sort order|
|:--------:|:------------------------:|:------------------:|
|Alpha | -Bravo | -Bravo |
|-Bravo | Alpha | Alpha |
|Charlie | Charlie | Charlie |
<br>
<br>
- PostgreSQL sorts NULLs last for ASC and first for DESC. SQLite does the opposite. PostgreSQL allows you to control the NULL sort order with NULLS FIRST or NULLS LAST
\code{.java}
ORDER BY att.value_text ASC // SQLite example, will give different ordering in PostgreSQL
ORDER BY convert_to(att.value_text, 'SQL_ASCII') ASC NULLS FIRST // PostgreSQL example, does not exist in SQLite
\endcode
<br>
*/

5
docs/doxygen/doxygen_docs/.gitignore vendored Normal file
View File

@ -0,0 +1,5 @@
# Ignore everything in this directory
*
# Except this file and api-docs folder
!api-docs
!.gitignore

View File

@ -0,0 +1,4 @@
# Ignore everything in this directory
*
# Except this file
!.gitignore

View File

@ -1,5 +1,5 @@
<hr/>
<p><i>Copyright &#169; 2012-2014 Basis Technology. Generated on: $date<br/>
<p><i>Copyright &#169; 2012-2015 Basis Technology. Generated on: $date<br/>
This work is licensed under a
<a rel="license" href="http://creativecommons.org/licenses/by-sa/3.0/us/">Creative Commons Attribution-Share Alike 3.0 United States License</a>.
</i></p>

View File

@ -18,6 +18,7 @@ If you want to write modules, then these pages are for you:
- \subpage mod_content_page
- \subpage mod_result_page
- \subpage adv_dev_page
- \subpage database_page
- \subpage mod_mobile_page
These pages are more detailed if you want to modify Autopsy code instead of writing add-on modules.

View File

@ -8,7 +8,7 @@ The basic idea is that you need an ingest module. We've typically written mobil
The ingest module has a basic flow of
- Query for files using the org.sleuthkit.autopsy.casemodule.services.FileManager service
- Parse them or open them with SQLite (you'll need to bring along a SQLite JAR file)
- Parse them or open them with SQLite (you'll need to bring along a SQLite JAR file) or PostgreSQL
- Create Blackboard Artifacts (see \ref platform_blackboard)
The BlackBoard has standard artifacts for the standard cell phone forensics data types, such as BlackboardArtifact.TSK_CALLLOG.

View File

@ -29,7 +29,7 @@ These are the basic concepts that you should be aware of before writing any type
- <b>Data Source</b>: Data source is the term used in Autopsy to refer to disk images and logical files that are added to a case. Data sources are represented in Autopsy using several types of classes from the org.sleuthkit.datamodel package.
- <b>Case</b>: A case is a container for one or more data sources in Autopsy. A case is represented by a org.sleuthkit.autopsy.casemodule.Case class. Only one case can be open at a time. You can get the current case using org.sleuthkit.autopsy.casemodule.Case.getCurrentCase().
- <b>Central Database</b>: A central SQLite database exists and stores all file metadata and analysis results. Access to this database can be found from the org.sleuthkit.datamodel.SleuthkitCase class, but you'll probably never need to directly interact with it. All modules can query it for information, though many do not need to. For example, file-level ingest modules will be passed in a reference to a specific file to analyze and may never need to directly go to the database for more information.
- <b>Central Database</b>: A central SQLite or PostgreSQL database exists and stores all file metadata and analysis results. Access to this database can be found from the org.sleuthkit.datamodel.SleuthkitCase class, but you'll probably never need to directly interact with it. All modules can query it for information, though many do not need to. For example, file-level ingest modules will be passed in a reference to a specific file to analyze and may never need to directly go to the database for more information.
- <b>Blackboard:</b> The blackboard is how modules communicate back and forth. Modules post their results to the blackboard in the form of artifacts and the UI will display them. See the \ref platform_blackboard section for more details.
- <b>Services and Utilities</b>: There are a lot of convenience services and utilities that are provided to developers. Refer to the \ref mod_dev_other_services section for more details.

View File

@ -14,7 +14,7 @@ The first step in Autopsy work flow is creating a case. This is done in the org.
\section design_image Adding an Image and Running Ingest Modules
After case is created, one or more disk images can be added to the case. There is a wizard to guide that process and it is located in the org.sleuthkit.autopsy.casemodule package. Refer to the package section \ref casemodule_add_image for more details on the wizard. Most developers will not need to touch this code though. An important concept though is that adding an image to a case means that Autopsy uses The Sleuth Kit to enumerate all of the files in the file system and make a database entry for them in the embedded SQLite database that was created for the case. The database will be used for all further analysis.
After case is created, one or more disk images can be added to the case. There is a wizard to guide that process and it is located in the org.sleuthkit.autopsy.casemodule package. Refer to the package section \ref casemodule_add_image for more details on the wizard. Most developers will not need to touch this code though. An important concept though is that adding an image to a case means that Autopsy uses The Sleuth Kit to enumerate all of the files in the file system and make a database entry for them in the embedded SQLite or PostgreSQL database that was created for the case. The database will be used for all further analysis.
After image has been added to the case, the user can select one or more ingest modules to be executed on the image. Ingest modules focus on a specific type of analysis task and run in the background. They either analyze the entire disk image or individual files. The user will see the results from the modules in the result tree and in the ingest inbox.