mirror of
https://github.com/overcuriousity/autopsy-flatpak.git
synced 2025-07-06 21:00:22 +00:00
Merge branch 'develop' of https://github.com/sleuthkit/autopsy into 1110-no-prepop-2
# Conflicts: # ImageGallery/src/org/sleuthkit/autopsy/imagegallery/ImageGalleryModule.java
This commit is contained in:
commit
d5bd765c9b
@ -116,6 +116,7 @@ import org.sleuthkit.datamodel.Report;
|
||||
import org.sleuthkit.datamodel.SleuthkitCase;
|
||||
import org.sleuthkit.datamodel.TskCoreException;
|
||||
import org.sleuthkit.datamodel.TskUnsupportedSchemaVersionException;
|
||||
import org.sleuthkit.autopsy.coreutils.StopWatch;
|
||||
|
||||
/**
|
||||
* An Autopsy case. Currently, only one case at a time may be open.
|
||||
@ -707,11 +708,15 @@ public class Case {
|
||||
"Case.exceptionMessage.cannotGetLockToDeleteCase=Cannot delete case because it is open for another user or there is a problem with the coordination service."
|
||||
})
|
||||
public static void deleteCase(CaseMetadata metadata) throws CaseActionException {
|
||||
StopWatch stopWatch = new StopWatch();
|
||||
stopWatch.start();
|
||||
synchronized (caseActionSerializationLock) {
|
||||
if (null != currentCase) {
|
||||
throw new CaseActionException(Bundle.Case_exceptionMessage_cannotDeleteCurrentCase());
|
||||
}
|
||||
}
|
||||
stopWatch.stop();
|
||||
logger.log(Level.INFO, String.format("Used %d s to acquire caseActionSerializationLock (Java monitor in Case class) for %s (%s) in %s", stopWatch.getElapsedTimeSecs(), metadata.getCaseDisplayName(), metadata.getCaseName(), metadata.getCaseDirectory()));
|
||||
|
||||
/*
|
||||
* Set up either a GUI progress indicator without a cancel button (can't
|
||||
@ -733,10 +738,19 @@ public class Case {
|
||||
* cannot be deleted if another node has it open.
|
||||
*/
|
||||
progressIndicator.progress(Bundle.Case_progressMessage_checkingForOtherUser());
|
||||
stopWatch.reset();
|
||||
stopWatch.start();
|
||||
try (CoordinationService.Lock dirLock = CoordinationService.getInstance().tryGetExclusiveLock(CategoryNode.CASES, metadata.getCaseDirectory())) {
|
||||
assert (null != dirLock);
|
||||
deleteCase(metadata, progressIndicator);
|
||||
stopWatch.stop();
|
||||
logger.log(Level.INFO, String.format("Used %d s to acquire case directory coordination service lock for %s (%s) in %s", stopWatch.getElapsedTimeSecs(), metadata.getCaseDisplayName(), metadata.getCaseName(), metadata.getCaseDirectory()));
|
||||
if (dirLock != null) {
|
||||
deleteCase(metadata, progressIndicator);
|
||||
} else {
|
||||
throw new CaseActionException(Bundle.Case_creationException_couldNotAcquireDirLock());
|
||||
}
|
||||
} catch (CoordinationServiceException ex) {
|
||||
stopWatch.stop();
|
||||
logger.log(Level.INFO, String.format("Used %d s to fail to acquire case directory coordination service lock for %s (%s) in %s", stopWatch.getElapsedTimeSecs(), metadata.getCaseDisplayName(), metadata.getCaseName(), metadata.getCaseDirectory()));
|
||||
throw new CaseActionException(Bundle.Case_exceptionMessage_cannotGetLockToDeleteCase(), ex);
|
||||
}
|
||||
}
|
||||
@ -946,11 +960,13 @@ public class Case {
|
||||
"Case.exceptionMessage.errorsDeletingCase=Errors occured while deleting the case. See the application log for details"
|
||||
})
|
||||
private static void deleteCase(CaseMetadata metadata, ProgressIndicator progressIndicator) throws CaseActionException {
|
||||
StopWatch stopWatch = new StopWatch();
|
||||
boolean errorsOccurred = false;
|
||||
if (CaseType.MULTI_USER_CASE == metadata.getCaseType()) {
|
||||
/*
|
||||
* Delete the case database from the database server.
|
||||
*/
|
||||
stopWatch.start();
|
||||
try {
|
||||
progressIndicator.progress(Bundle.Case_progressMessage_deletingCaseDatabase());
|
||||
CaseDbConnectionInfo db;
|
||||
@ -960,10 +976,14 @@ public class Case {
|
||||
Statement statement = connection.createStatement();) {
|
||||
String deleteCommand = "DROP DATABASE \"" + metadata.getCaseDatabaseName() + "\""; //NON-NLS
|
||||
statement.execute(deleteCommand);
|
||||
stopWatch.stop();
|
||||
logger.log(Level.INFO, String.format("Used %d s to delete case database for %s (%s) in %s", stopWatch.getElapsedTimeSecs(), metadata.getCaseDisplayName(), metadata.getCaseName(), metadata.getCaseDirectory()));
|
||||
}
|
||||
} catch (UserPreferencesException | ClassNotFoundException | SQLException ex) {
|
||||
logger.log(Level.SEVERE, String.format("Failed to delete case database %s for %s (%s) in %s", metadata.getCaseDatabaseName(), metadata.getCaseDisplayName(), metadata.getCaseName(), metadata.getCaseDirectory()), ex);
|
||||
errorsOccurred = true;
|
||||
stopWatch.stop();
|
||||
logger.log(Level.INFO, String.format("Used %d s to fail delete case database for %s (%s) in %s", stopWatch.getElapsedTimeSecs(), metadata.getCaseDisplayName(), metadata.getCaseName(), metadata.getCaseDirectory()));
|
||||
}
|
||||
}
|
||||
|
||||
@ -973,10 +993,16 @@ public class Case {
|
||||
progressIndicator.progress(Bundle.Case_progressMessage_deletingTextIndex());
|
||||
for (KeywordSearchService searchService : Lookup.getDefault().lookupAll(KeywordSearchService.class)) {
|
||||
try {
|
||||
stopWatch.reset();
|
||||
stopWatch.start();
|
||||
searchService.deleteTextIndex(metadata);
|
||||
stopWatch.stop();
|
||||
logger.log(Level.INFO, String.format("Used %d s to delete text index for %s (%s) in %s", stopWatch.getElapsedTimeSecs(), metadata.getCaseDisplayName(), metadata.getCaseName(), metadata.getCaseDirectory()));
|
||||
} catch (KeywordSearchServiceException ex) {
|
||||
logger.log(Level.SEVERE, String.format("Failed to delete text index for %s (%s) in %s", metadata.getCaseDisplayName(), metadata.getCaseName(), metadata.getCaseDirectory()), ex);
|
||||
errorsOccurred = true;
|
||||
stopWatch.stop();
|
||||
logger.log(Level.INFO, String.format("Used %d s to fail to delete text index for %s (%s) in %s", stopWatch.getElapsedTimeSecs(), metadata.getCaseDisplayName(), metadata.getCaseName(), metadata.getCaseDirectory()));
|
||||
}
|
||||
}
|
||||
|
||||
@ -984,9 +1010,16 @@ public class Case {
|
||||
* Delete the case directory.
|
||||
*/
|
||||
progressIndicator.progress(Bundle.Case_progressMessage_deletingCaseDirectory());
|
||||
stopWatch.reset();
|
||||
stopWatch.start();
|
||||
if (!FileUtil.deleteDir(new File(metadata.getCaseDirectory()))) {
|
||||
logger.log(Level.SEVERE, String.format("Failed to delete case directory for %s (%s) in %s", metadata.getCaseDisplayName(), metadata.getCaseName(), metadata.getCaseDirectory()));
|
||||
errorsOccurred = true;
|
||||
stopWatch.stop();
|
||||
logger.log(Level.INFO, String.format("Used %d s to fail to delete case directory for %s (%s) in %s", stopWatch.getElapsedTimeSecs(), metadata.getCaseDisplayName(), metadata.getCaseName(), metadata.getCaseDirectory()));
|
||||
} else {
|
||||
stopWatch.stop();
|
||||
logger.log(Level.INFO, String.format("Used %d s to delete case directory for %s (%s) in %s", stopWatch.getElapsedTimeSecs(), metadata.getCaseDisplayName(), metadata.getCaseName(), metadata.getCaseDirectory()));
|
||||
}
|
||||
|
||||
/*
|
||||
@ -1540,11 +1573,13 @@ public class Case {
|
||||
}
|
||||
|
||||
/**
|
||||
* Notifies case event subscribers that a central repository comment has been changed.
|
||||
* Notifies case event subscribers that a central repository comment has
|
||||
* been changed.
|
||||
*
|
||||
* This should not be called from the event dispatch thread (EDT)
|
||||
*
|
||||
* @param contentId the objectId for the Content which has had its central repo comment changed
|
||||
* @param contentId the objectId for the Content which has had its central
|
||||
* repo comment changed
|
||||
* @param newComment the new value of the comment
|
||||
*/
|
||||
public void notifyCentralRepoCommentChanged(long contentId, String newComment) {
|
||||
@ -1800,7 +1835,7 @@ public class Case {
|
||||
progressIndicator.progress(Bundle.Case_progressMessage_preparingToOpenCaseResources());
|
||||
acquireSharedCaseDirLock(metadata.getCaseDirectory());
|
||||
try (CoordinationService.Lock resourcesLock = acquireExclusiveCaseResourcesLock(metadata.getCaseDirectory())) {
|
||||
assert (null != resourcesLock);
|
||||
assert(resourcesLock != null); // Use reference to avoid compile time warning.
|
||||
open(isNewCase, progressIndicator);
|
||||
} catch (CaseActionException ex) {
|
||||
releaseSharedCaseDirLock(getMetadata().getCaseDirectory());
|
||||
@ -2375,7 +2410,7 @@ public class Case {
|
||||
* @throws CaseActionException with a user-friendly message if the lock
|
||||
* cannot be acquired.
|
||||
*/
|
||||
@Messages({"Case.creationException.couldNotAcquireDirLock=Failed to get lock on case directory."})
|
||||
@Messages({"Case.creationException.couldNotAcquireDirLock=Failed to get lock on case directory"})
|
||||
private void acquireSharedCaseDirLock(String caseDir) throws CaseActionException {
|
||||
try {
|
||||
caseDirLock = CoordinationService.getInstance().tryGetSharedLock(CategoryNode.CASES, caseDir, DIR_LOCK_TIMOUT_HOURS, TimeUnit.HOURS);
|
||||
|
@ -369,7 +369,7 @@ public class LocalFilesDSProcessor implements DataSourceProcessor, AutoIngestDat
|
||||
@Override
|
||||
public void process(String deviceId, Path dataSourcePath, DataSourceProcessorProgressMonitor progressMonitor, DataSourceProcessorCallback callBack) {
|
||||
List<String> filePaths = Arrays.asList(new String[]{dataSourcePath.toString()});
|
||||
run(deviceId, deviceId, filePaths, progressMonitor, callBack);
|
||||
run(deviceId, "", filePaths, progressMonitor, callBack);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -419,7 +419,7 @@ public class DataContentViewerOtherCases extends JPanel implements DataContentVi
|
||||
}
|
||||
|
||||
// we can correlate based on the MD5 if it is enabled
|
||||
if (this.file != null && EamDb.isEnabled()) {
|
||||
if (this.file != null && EamDb.isEnabled() && this.file.getSize() > 0) {
|
||||
try {
|
||||
|
||||
List<CorrelationAttributeInstance.Type> artifactTypes = EamDb.getInstance().getDefinedCorrelationTypes();
|
||||
@ -447,27 +447,23 @@ public class DataContentViewerOtherCases extends JPanel implements DataContentVi
|
||||
} catch (EamDbException | TskCoreException ex) {
|
||||
LOGGER.log(Level.SEVERE, "Error connecting to DB", ex); // NON-NLS
|
||||
}
|
||||
|
||||
} else {
|
||||
|
||||
// If EamDb not enabled, get the Files default correlation type to allow Other Occurances to be enabled.
|
||||
if (this.file != null) {
|
||||
String md5 = this.file.getMd5Hash();
|
||||
if (md5 != null && !md5.isEmpty()) {
|
||||
try {
|
||||
final CorrelationAttributeInstance.Type fileAttributeType
|
||||
= CorrelationAttributeInstance.getDefaultCorrelationTypes()
|
||||
.stream()
|
||||
.filter(attrType -> attrType.getId() == CorrelationAttributeInstance.FILES_TYPE_ID)
|
||||
.findAny()
|
||||
.get();
|
||||
} else if (this.file != null && this.file.getSize() > 0) {
|
||||
String md5 = this.file.getMd5Hash();
|
||||
if (md5 != null && !md5.isEmpty()) {
|
||||
try {
|
||||
final CorrelationAttributeInstance.Type fileAttributeType
|
||||
= CorrelationAttributeInstance.getDefaultCorrelationTypes()
|
||||
.stream()
|
||||
.filter(attrType -> attrType.getId() == CorrelationAttributeInstance.FILES_TYPE_ID)
|
||||
.findAny()
|
||||
.get();
|
||||
|
||||
ret.add(new CorrelationAttributeInstance(fileAttributeType, md5));
|
||||
} catch (EamDbException ex) {
|
||||
LOGGER.log(Level.SEVERE, "Error connecting to DB", ex); // NON-NLS
|
||||
} catch (CorrelationAttributeNormalizationException ex) {
|
||||
LOGGER.log(Level.INFO, String.format("Unable to create CorrelationAttributeInstance for value %s", md5), ex); // NON-NLS
|
||||
}
|
||||
ret.add(new CorrelationAttributeInstance(fileAttributeType, md5));
|
||||
} catch (EamDbException ex) {
|
||||
LOGGER.log(Level.SEVERE, "Error connecting to DB", ex); // NON-NLS
|
||||
} catch (CorrelationAttributeNormalizationException ex) {
|
||||
LOGGER.log(Level.INFO, String.format("Unable to create CorrelationAttributeInstance for value %s", md5), ex); // NON-NLS
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -515,9 +511,9 @@ public class DataContentViewerOtherCases extends JPanel implements DataContentVi
|
||||
* artifact. If the central repo is not enabled, this will only return files
|
||||
* from the current case with matching MD5 hashes.
|
||||
*
|
||||
* @param corAttr CorrelationAttribute to query for
|
||||
* @param corAttr CorrelationAttribute to query for
|
||||
* @param dataSourceName Data source to filter results
|
||||
* @param deviceId Device Id to filter results
|
||||
* @param deviceId Device Id to filter results
|
||||
*
|
||||
* @return A collection of correlated artifact instances
|
||||
*/
|
||||
@ -580,7 +576,7 @@ public class DataContentViewerOtherCases extends JPanel implements DataContentVi
|
||||
* Get all other abstract files in the current case with the same MD5 as the
|
||||
* selected node.
|
||||
*
|
||||
* @param corAttr The CorrelationAttribute containing the MD5 to search for
|
||||
* @param corAttr The CorrelationAttribute containing the MD5 to search for
|
||||
* @param openCase The current case
|
||||
*
|
||||
* @return List of matching AbstractFile objects
|
||||
@ -657,11 +653,9 @@ public class DataContentViewerOtherCases extends JPanel implements DataContentVi
|
||||
// - The central repo is disabled and the backing file has a valid MD5 hash
|
||||
this.file = this.getAbstractFileFromNode(node);
|
||||
if (EamDb.isEnabled()) {
|
||||
return this.file != null
|
||||
&& this.file.getSize() > 0
|
||||
&& !getCorrelationAttributesFromNode(node).isEmpty();
|
||||
return !getCorrelationAttributesFromNode(node).isEmpty();
|
||||
} else {
|
||||
return this.file != null
|
||||
return this.file != null
|
||||
&& this.file.getSize() > 0
|
||||
&& ((this.file.getMd5Hash() != null) && (!this.file.getMd5Hash().isEmpty()));
|
||||
}
|
||||
@ -733,8 +727,8 @@ public class DataContentViewerOtherCases extends JPanel implements DataContentVi
|
||||
* Adjust a given column for the text provided.
|
||||
*
|
||||
* @param columnIndex The index of the column to adjust.
|
||||
* @param text The text whose length will be used to adjust the column
|
||||
* width.
|
||||
* @param text The text whose length will be used to adjust the
|
||||
* column width.
|
||||
*/
|
||||
private void setColumnWidthToText(int columnIndex, String text) {
|
||||
TableColumn column = otherCasesTable.getColumnModel().getColumn(columnIndex);
|
||||
|
@ -2964,7 +2964,6 @@ abstract class AbstractSqlEamDb implements EamDb {
|
||||
resultSet.getString("creation_date"), resultSet.getString("case_number"), resultSet.getString("examiner_name"),
|
||||
resultSet.getString("examiner_email"), resultSet.getString("examiner_phone"), resultSet.getString("notes"));
|
||||
|
||||
|
||||
return eamCase;
|
||||
}
|
||||
|
||||
@ -3080,6 +3079,7 @@ abstract class AbstractSqlEamDb implements EamDb {
|
||||
|
||||
ResultSet resultSet = null;
|
||||
Statement statement = null;
|
||||
PreparedStatement preparedStatement = null;
|
||||
Connection conn = null;
|
||||
try {
|
||||
|
||||
@ -3114,6 +3114,10 @@ abstract class AbstractSqlEamDb implements EamDb {
|
||||
logger.log(Level.INFO, "Central Repository is up to date");
|
||||
return;
|
||||
}
|
||||
if (dbSchemaVersion.compareTo(CURRENT_DB_SCHEMA_VERSION) > 0) {
|
||||
logger.log(Level.INFO, "Central Repository is of newer version than software creates");
|
||||
return;
|
||||
}
|
||||
|
||||
// Update from 1.0 to 1.1
|
||||
if (dbSchemaVersion.compareTo(new CaseDbSchemaVersionNumber(1, 1)) < 0) {
|
||||
@ -3126,7 +3130,56 @@ abstract class AbstractSqlEamDb implements EamDb {
|
||||
// regardless of whether this succeeds.
|
||||
EamDbUtil.insertDefaultOrganization(conn);
|
||||
}
|
||||
//Update to 1.2
|
||||
if (dbSchemaVersion.compareTo(new CaseDbSchemaVersionNumber(1, 2)) < 0) {
|
||||
//update central repository to be able to store new correlation attributes
|
||||
EamDbPlatformEnum selectedPlatform = EamDbPlatformEnum.getSelectedPlatform();
|
||||
final String addSsidTableTemplate;
|
||||
final String addCaseIdIndexTemplate;
|
||||
final String addDataSourceIdIndexTemplate;
|
||||
final String addValueIndexTemplate;
|
||||
final String addKnownStatusIndexTemplate;
|
||||
final String addAttributeSql;
|
||||
//get the data base specific code for creating a new _instance table
|
||||
switch (selectedPlatform) {
|
||||
case POSTGRESQL:
|
||||
addAttributeSql = "INSERT INTO correlation_types(id, display_name, db_table_name, supported, enabled) VALUES (?, ?, ?, ?, ?) " + getConflictClause();
|
||||
addSsidTableTemplate = PostgresEamDbSettings.getCreateArtifactInstancesTableTemplate();
|
||||
addCaseIdIndexTemplate = PostgresEamDbSettings.getAddCaseIdIndexTemplate();
|
||||
addDataSourceIdIndexTemplate = PostgresEamDbSettings.getAddDataSourceIdIndexTemplate();
|
||||
addValueIndexTemplate = PostgresEamDbSettings.getAddValueIndexTemplate();
|
||||
addKnownStatusIndexTemplate = PostgresEamDbSettings.getAddKnownStatusIndexTemplate();
|
||||
break;
|
||||
case SQLITE:
|
||||
addAttributeSql = "INSERT OR IGNORE INTO correlation_types(id, display_name, db_table_name, supported, enabled) VALUES (?, ?, ?, ?, ?)";
|
||||
addSsidTableTemplate = SqliteEamDbSettings.getCreateArtifactInstancesTableTemplate();
|
||||
addCaseIdIndexTemplate = SqliteEamDbSettings.getAddCaseIdIndexTemplate();
|
||||
addDataSourceIdIndexTemplate = SqliteEamDbSettings.getAddDataSourceIdIndexTemplate();
|
||||
addValueIndexTemplate = SqliteEamDbSettings.getAddValueIndexTemplate();
|
||||
addKnownStatusIndexTemplate = SqliteEamDbSettings.getAddKnownStatusIndexTemplate();
|
||||
break;
|
||||
default:
|
||||
throw new EamDbException("Currently selected database platform \"" + selectedPlatform.name() + "\" can not be upgraded.");
|
||||
}
|
||||
final String wirelessNetworsDbTableName = "wireless_networks";
|
||||
final String wirelessNetworksTableInstanceName = wirelessNetworsDbTableName + "_instances";
|
||||
//add the wireless_networks attribute to the correlation_types table
|
||||
preparedStatement = conn.prepareStatement(addAttributeSql);
|
||||
preparedStatement.setInt(1, CorrelationAttributeInstance.SSID_TYPE_ID);
|
||||
preparedStatement.setString(2, Bundle.CorrelationType_SSID_displayName());
|
||||
preparedStatement.setString(3, wirelessNetworsDbTableName);
|
||||
preparedStatement.setInt(4, 1);
|
||||
preparedStatement.setInt(5, 1);
|
||||
preparedStatement.execute();
|
||||
|
||||
//create a new wireless_networks_instances table and add indexes for its columns
|
||||
statement.execute(String.format(addSsidTableTemplate, wirelessNetworksTableInstanceName, wirelessNetworksTableInstanceName));
|
||||
statement.execute(String.format(addCaseIdIndexTemplate, wirelessNetworksTableInstanceName, wirelessNetworksTableInstanceName));
|
||||
statement.execute(String.format(addDataSourceIdIndexTemplate, wirelessNetworksTableInstanceName, wirelessNetworksTableInstanceName));
|
||||
statement.execute(String.format(addValueIndexTemplate, wirelessNetworksTableInstanceName, wirelessNetworksTableInstanceName));
|
||||
statement.execute(String.format(addKnownStatusIndexTemplate, wirelessNetworksTableInstanceName, wirelessNetworksTableInstanceName));
|
||||
|
||||
}
|
||||
if (!updateSchemaVersion(conn)) {
|
||||
throw new EamDbException("Error updating schema version");
|
||||
}
|
||||
@ -3144,6 +3197,7 @@ abstract class AbstractSqlEamDb implements EamDb {
|
||||
throw ex;
|
||||
} finally {
|
||||
EamDbUtil.closeResultSet(resultSet);
|
||||
EamDbUtil.closeStatement(preparedStatement);
|
||||
EamDbUtil.closeStatement(statement);
|
||||
EamDbUtil.closeConnection(conn);
|
||||
}
|
||||
|
@ -171,7 +171,7 @@ public class CorrelationAttributeInstance implements Serializable {
|
||||
* Is this a database instance?
|
||||
*
|
||||
* @return True if the instance ID is greater or equal to zero; otherwise
|
||||
* false.
|
||||
* false.
|
||||
*/
|
||||
public boolean isDatabaseInstance() {
|
||||
return (ID >= 0);
|
||||
@ -234,7 +234,7 @@ public class CorrelationAttributeInstance implements Serializable {
|
||||
* as notable and should never be set to KNOWN.
|
||||
*
|
||||
* @param knownStatus Should be BAD if the item is tagged as notable,
|
||||
* UNKNOWN otherwise
|
||||
* UNKNOWN otherwise
|
||||
*/
|
||||
public void setKnownStatus(TskData.FileKnown knownStatus) {
|
||||
this.knownStatus = knownStatus;
|
||||
@ -246,18 +246,20 @@ public class CorrelationAttributeInstance implements Serializable {
|
||||
public static final int EMAIL_TYPE_ID = 2;
|
||||
public static final int PHONE_TYPE_ID = 3;
|
||||
public static final int USBID_TYPE_ID = 4;
|
||||
public static final int SSID_TYPE_ID = 5;
|
||||
|
||||
/**
|
||||
* Load the default correlation types
|
||||
*
|
||||
* @throws EamDbException if the Type's dbTableName has invalid
|
||||
* characters/format
|
||||
* characters/format
|
||||
*/
|
||||
@Messages({"CorrelationType.FILES.displayName=Files",
|
||||
"CorrelationType.DOMAIN.displayName=Domains",
|
||||
"CorrelationType.EMAIL.displayName=Email Addresses",
|
||||
"CorrelationType.PHONE.displayName=Phone Numbers",
|
||||
"CorrelationType.USBID.displayName=USB Devices"})
|
||||
"CorrelationType.USBID.displayName=USB Devices",
|
||||
"CorrelationType.SSID.displayName=Wireless Networks"})
|
||||
public static List<CorrelationAttributeInstance.Type> getDefaultCorrelationTypes() throws EamDbException {
|
||||
List<CorrelationAttributeInstance.Type> DEFAULT_CORRELATION_TYPES = new ArrayList<>();
|
||||
DEFAULT_CORRELATION_TYPES.add(new CorrelationAttributeInstance.Type(FILES_TYPE_ID, Bundle.CorrelationType_FILES_displayName(), "file", true, true)); // NON-NLS
|
||||
@ -265,6 +267,7 @@ public class CorrelationAttributeInstance implements Serializable {
|
||||
DEFAULT_CORRELATION_TYPES.add(new CorrelationAttributeInstance.Type(EMAIL_TYPE_ID, Bundle.CorrelationType_EMAIL_displayName(), "email_address", true, true)); // NON-NLS
|
||||
DEFAULT_CORRELATION_TYPES.add(new CorrelationAttributeInstance.Type(PHONE_TYPE_ID, Bundle.CorrelationType_PHONE_displayName(), "phone_number", true, true)); // NON-NLS
|
||||
DEFAULT_CORRELATION_TYPES.add(new CorrelationAttributeInstance.Type(USBID_TYPE_ID, Bundle.CorrelationType_USBID_displayName(), "usb_devices", true, true)); // NON-NLS
|
||||
DEFAULT_CORRELATION_TYPES.add(new CorrelationAttributeInstance.Type(SSID_TYPE_ID, Bundle.CorrelationType_SSID_displayName(), "wireless_networks", true, true)); // NON-NLS
|
||||
return DEFAULT_CORRELATION_TYPES;
|
||||
}
|
||||
|
||||
@ -283,13 +286,14 @@ public class CorrelationAttributeInstance implements Serializable {
|
||||
|
||||
/**
|
||||
*
|
||||
* @param typeId Unique ID for this Correlation Type
|
||||
* @param typeId Unique ID for this Correlation Type
|
||||
* @param displayName Name of this type displayed in the UI.
|
||||
* @param dbTableName Central repository db table where data of this
|
||||
* type is stored. Must start with a lowercase letter and only contain
|
||||
* lowercase letters, numbers, and '_' characters.
|
||||
* @param supported Is this Type currently supported
|
||||
* @param enabled Is this Type currently enabled.
|
||||
* type is stored. Must start with a lowercase letter
|
||||
* and only contain lowercase letters, numbers, and
|
||||
* '_' characters.
|
||||
* @param supported Is this Type currently supported
|
||||
* @param enabled Is this Type currently enabled.
|
||||
*/
|
||||
public Type(int typeId, String displayName, String dbTableName, Boolean supported, Boolean enabled) throws EamDbException {
|
||||
if (dbTableName == null) {
|
||||
@ -312,10 +316,11 @@ public class CorrelationAttributeInstance implements Serializable {
|
||||
*
|
||||
* @param displayName Name of this type displayed in the UI.
|
||||
* @param dbTableName Central repository db table where data of this
|
||||
* type is stored Must start with a lowercase letter and only contain
|
||||
* lowercase letters, numbers, and '_' characters.
|
||||
* @param supported Is this Type currently supported
|
||||
* @param enabled Is this Type currently enabled.
|
||||
* type is stored Must start with a lowercase letter
|
||||
* and only contain lowercase letters, numbers, and
|
||||
* '_' characters.
|
||||
* @param supported Is this Type currently supported
|
||||
* @param enabled Is this Type currently enabled.
|
||||
*/
|
||||
public Type(String displayName, String dbTableName, Boolean supported, Boolean enabled) throws EamDbException {
|
||||
this(-1, displayName, dbTableName, supported, enabled);
|
||||
@ -477,8 +482,8 @@ public class CorrelationAttributeInstance implements Serializable {
|
||||
* custom_instances)
|
||||
*
|
||||
* @param dbTableName the dbTableName to set. Must start with lowercase
|
||||
* letter and can only contain lowercase letters, numbers, and '_'
|
||||
* characters.
|
||||
* letter and can only contain lowercase letters,
|
||||
* numbers, and '_' characters.
|
||||
*
|
||||
* @throws EamDbException if dbTableName contains invalid characters
|
||||
*/
|
||||
|
@ -63,6 +63,8 @@ final public class CorrelationAttributeNormalizer {
|
||||
return normalizePhone(data);
|
||||
case CorrelationAttributeInstance.USBID_TYPE_ID:
|
||||
return normalizeUsbId(data);
|
||||
case CorrelationAttributeInstance.SSID_TYPE_ID:
|
||||
return data;
|
||||
default:
|
||||
final String errorMessage = String.format(
|
||||
"Validator function not found for attribute type: %s",
|
||||
|
@ -54,9 +54,9 @@ public class EamArtifactUtil {
|
||||
* EamArtifact with a single EamArtifactInstance within. If not, return
|
||||
* null.
|
||||
*
|
||||
* @param bbArtifact BlackboardArtifact to examine
|
||||
* @param bbArtifact BlackboardArtifact to examine
|
||||
* @param checkEnabled If true, only create a CorrelationAttribute if it is
|
||||
* enabled
|
||||
* enabled
|
||||
*
|
||||
* @return List of EamArtifacts
|
||||
*/
|
||||
@ -93,10 +93,10 @@ public class EamArtifactUtil {
|
||||
* based on the data in the blackboard artifact.
|
||||
*
|
||||
* @param correlationType The Central Repository artifact type to create
|
||||
* @param bbArtifact The blackboard artifact to pull data from
|
||||
* @param bbArtifact The blackboard artifact to pull data from
|
||||
*
|
||||
* @return the new EamArtifact, or null if one was not created because
|
||||
* bbArtifact did not contain the needed data
|
||||
* bbArtifact did not contain the needed data
|
||||
*/
|
||||
private static CorrelationAttributeInstance makeInstanceFromBlackboardArtifact(CorrelationAttributeInstance.Type correlationType,
|
||||
BlackboardArtifact bbArtifact) throws EamDbException {
|
||||
@ -159,13 +159,14 @@ public class EamArtifactUtil {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
} else if (correlationType.getId() == CorrelationAttributeInstance.USBID_TYPE_ID
|
||||
&& BlackboardArtifact.ARTIFACT_TYPE.TSK_DEVICE_ATTACHED.getTypeID() == artifactTypeID) {
|
||||
|
||||
value = bbArtifact.getAttribute(new BlackboardAttribute.Type(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DEVICE_ID)).getValueString();
|
||||
} else if (correlationType.getId() == CorrelationAttributeInstance.SSID_TYPE_ID
|
||||
&& BlackboardArtifact.ARTIFACT_TYPE.TSK_WIFI_NETWORK.getTypeID() == artifactTypeID) {
|
||||
value = bbArtifact.getAttribute(new BlackboardAttribute.Type(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SSID)).getValueString();
|
||||
}
|
||||
|
||||
} catch (TskCoreException ex) {
|
||||
logger.log(Level.SEVERE, "Error getting attribute while getting type from BlackboardArtifact.", ex); // NON-NLS
|
||||
return null;
|
||||
@ -185,9 +186,10 @@ public class EamArtifactUtil {
|
||||
* Uses the determined type and vallue, then looks up instance details to
|
||||
* create proper CorrelationAttributeInstance.
|
||||
*
|
||||
* @param bbArtifact the blackboard artifatc
|
||||
* @param bbArtifact the blackboard artifact
|
||||
* @param correlationType the given type
|
||||
* @param value the artifact value
|
||||
* @param value the artifact value
|
||||
*
|
||||
* @return CorrelationAttributeInstance from details
|
||||
*/
|
||||
private static CorrelationAttributeInstance makeCorrelationAttributeInstanceUsingTypeValue(BlackboardArtifact bbArtifact, CorrelationAttributeInstance.Type correlationType, String value) {
|
||||
@ -340,7 +342,7 @@ public class EamArtifactUtil {
|
||||
* @param file The file to test
|
||||
*
|
||||
* @return true if the file should be added to the central repo, false
|
||||
* otherwise
|
||||
* otherwise
|
||||
*/
|
||||
public static boolean isSupportedAbstractFileType(AbstractFile file) {
|
||||
if (file == null) {
|
||||
|
@ -31,9 +31,9 @@ import org.sleuthkit.datamodel.CaseDbSchemaVersionNumber;
|
||||
*/
|
||||
public interface EamDb {
|
||||
|
||||
public static final int SCHEMA_VERSION = 1;
|
||||
public static final int SCHEMA_VERSION = 2;
|
||||
public static final CaseDbSchemaVersionNumber CURRENT_DB_SCHEMA_VERSION
|
||||
= new CaseDbSchemaVersionNumber(1, 1);
|
||||
= new CaseDbSchemaVersionNumber(1, 2);
|
||||
|
||||
|
||||
/**
|
||||
|
@ -36,7 +36,8 @@ import org.sleuthkit.autopsy.coreutils.TextConverterException;
|
||||
/**
|
||||
* Settings for the Postgres implementation of the Central Repository database
|
||||
*
|
||||
* NOTE: This is public scope because the options panel calls it directly to set/get
|
||||
* NOTE: This is public scope because the options panel calls it directly to
|
||||
* set/get
|
||||
*/
|
||||
public final class PostgresEamDbSettings {
|
||||
|
||||
@ -391,26 +392,13 @@ public final class PostgresEamDbSettings {
|
||||
createCorrelationTypesTable.append("CONSTRAINT correlation_types_names UNIQUE (display_name, db_table_name)");
|
||||
createCorrelationTypesTable.append(")");
|
||||
|
||||
// Each "%s" will be replaced with the relevant TYPE_instances table name.
|
||||
StringBuilder createArtifactInstancesTableTemplate = new StringBuilder();
|
||||
createArtifactInstancesTableTemplate.append("CREATE TABLE IF NOT EXISTS %s (");
|
||||
createArtifactInstancesTableTemplate.append("id SERIAL PRIMARY KEY,");
|
||||
createArtifactInstancesTableTemplate.append("case_id integer NOT NULL,");
|
||||
createArtifactInstancesTableTemplate.append("data_source_id integer NOT NULL,");
|
||||
createArtifactInstancesTableTemplate.append("value text NOT NULL,");
|
||||
createArtifactInstancesTableTemplate.append("file_path text NOT NULL,");
|
||||
createArtifactInstancesTableTemplate.append("known_status integer NOT NULL,");
|
||||
createArtifactInstancesTableTemplate.append("comment text,");
|
||||
createArtifactInstancesTableTemplate.append("CONSTRAINT %s_multi_unique_ UNIQUE (data_source_id, value, file_path),");
|
||||
createArtifactInstancesTableTemplate.append("foreign key (case_id) references cases(id) ON UPDATE SET NULL ON DELETE SET NULL,");
|
||||
createArtifactInstancesTableTemplate.append("foreign key (data_source_id) references data_sources(id) ON UPDATE SET NULL ON DELETE SET NULL");
|
||||
createArtifactInstancesTableTemplate.append(")");
|
||||
String createArtifactInstancesTableTemplate = getCreateArtifactInstancesTableTemplate();
|
||||
|
||||
// Each "%s" will be replaced with the relevant TYPE_instances table name.
|
||||
String instancesIdx1 = "CREATE INDEX IF NOT EXISTS %s_case_id ON %s (case_id)";
|
||||
String instancesIdx2 = "CREATE INDEX IF NOT EXISTS %s_data_source_id ON %s (data_source_id)";
|
||||
String instancesIdx3 = "CREATE INDEX IF NOT EXISTS %s_value ON %s (value)";
|
||||
String instancesIdx4 = "CREATE INDEX IF NOT EXISTS %s_value_known_status ON %s (value, known_status)";
|
||||
String instancesIdx1 = getAddCaseIdIndexTemplate();
|
||||
String instancesIdx2 = getAddDataSourceIdIndexTemplate();
|
||||
|
||||
String instancesIdx3 = getAddValueIndexTemplate();
|
||||
String instancesIdx4 = getAddKnownStatusIndexTemplate();
|
||||
|
||||
StringBuilder createDbInfoTable = new StringBuilder();
|
||||
createDbInfoTable.append("CREATE TABLE IF NOT EXISTS db_info (");
|
||||
@ -454,7 +442,7 @@ public final class PostgresEamDbSettings {
|
||||
reference_type_dbname = EamDbUtil.correlationTypeToReferenceTableName(type);
|
||||
instance_type_dbname = EamDbUtil.correlationTypeToInstanceTableName(type);
|
||||
|
||||
stmt.execute(String.format(createArtifactInstancesTableTemplate.toString(), instance_type_dbname, instance_type_dbname));
|
||||
stmt.execute(String.format(createArtifactInstancesTableTemplate, instance_type_dbname, instance_type_dbname));
|
||||
stmt.execute(String.format(instancesIdx1, instance_type_dbname, instance_type_dbname));
|
||||
stmt.execute(String.format(instancesIdx2, instance_type_dbname, instance_type_dbname));
|
||||
stmt.execute(String.format(instancesIdx3, instance_type_dbname, instance_type_dbname));
|
||||
@ -480,6 +468,83 @@ public final class PostgresEamDbSettings {
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the template String for creating a new _instances table in a Postgres
|
||||
* central repository. %s will exist in the template where the name of the
|
||||
* new table will be addedd.
|
||||
*
|
||||
* @return a String which is a template for cretating a new _instances table
|
||||
*/
|
||||
static String getCreateArtifactInstancesTableTemplate() {
|
||||
// Each "%s" will be replaced with the relevant TYPE_instances table name.
|
||||
StringBuilder createArtifactInstancesTableTemplate = new StringBuilder();
|
||||
createArtifactInstancesTableTemplate.append("CREATE TABLE IF NOT EXISTS %s (");
|
||||
createArtifactInstancesTableTemplate.append("id SERIAL PRIMARY KEY,");
|
||||
createArtifactInstancesTableTemplate.append("case_id integer NOT NULL,");
|
||||
createArtifactInstancesTableTemplate.append("data_source_id integer NOT NULL,");
|
||||
createArtifactInstancesTableTemplate.append("value text NOT NULL,");
|
||||
createArtifactInstancesTableTemplate.append("file_path text NOT NULL,");
|
||||
createArtifactInstancesTableTemplate.append("known_status integer NOT NULL,");
|
||||
createArtifactInstancesTableTemplate.append("comment text,");
|
||||
createArtifactInstancesTableTemplate.append("CONSTRAINT %s_multi_unique_ UNIQUE (data_source_id, value, file_path),");
|
||||
createArtifactInstancesTableTemplate.append("foreign key (case_id) references cases(id) ON UPDATE SET NULL ON DELETE SET NULL,");
|
||||
createArtifactInstancesTableTemplate.append("foreign key (data_source_id) references data_sources(id) ON UPDATE SET NULL ON DELETE SET NULL");
|
||||
createArtifactInstancesTableTemplate.append(")");
|
||||
return createArtifactInstancesTableTemplate.toString();
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the template for creating an index on the case_id column of an
|
||||
* instance table. %s will exist in the template where the name of the new
|
||||
* table will be addedd.
|
||||
*
|
||||
* @return a String which is a template for adding an index to the case_id
|
||||
* column of a _instances table
|
||||
*/
|
||||
static String getAddCaseIdIndexTemplate() {
|
||||
// Each "%s" will be replaced with the relevant TYPE_instances table name.
|
||||
return "CREATE INDEX IF NOT EXISTS %s_case_id ON %s (case_id)";
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the template for creating an index on the data_source_id column of an
|
||||
* instance table. %s will exist in the template where the name of the new
|
||||
* table will be addedd.
|
||||
*
|
||||
* @return a String which is a template for adding an index to the
|
||||
* data_source_id column of a _instances table
|
||||
*/
|
||||
static String getAddDataSourceIdIndexTemplate() {
|
||||
// Each "%s" will be replaced with the relevant TYPE_instances table name.
|
||||
return "CREATE INDEX IF NOT EXISTS %s_data_source_id ON %s (data_source_id)";
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the template for creating an index on the value column of an instance
|
||||
* table. %s will exist in the template where the name of the new table will
|
||||
* be addedd.
|
||||
*
|
||||
* @return a String which is a template for adding an index to the value
|
||||
* column of a _instances table
|
||||
*/
|
||||
static String getAddValueIndexTemplate() {
|
||||
// Each "%s" will be replaced with the relevant TYPE_instances table name.
|
||||
return "CREATE INDEX IF NOT EXISTS %s_value ON %s (value)";
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the template for creating an index on the known_status column of an
|
||||
* instance table. %s will exist in the template where the name of the new
|
||||
* table will be addedd.
|
||||
*
|
||||
* @return a String which is a template for adding an index to the
|
||||
* known_status column of a _instances table
|
||||
*/
|
||||
static String getAddKnownStatusIndexTemplate() {
|
||||
// Each "%s" will be replaced with the relevant TYPE_instances table name.
|
||||
return "CREATE INDEX IF NOT EXISTS %s_value_known_status ON %s (value, known_status)";
|
||||
}
|
||||
|
||||
public boolean insertDefaultDatabaseContent() {
|
||||
Connection conn = getEphemeralConnection(false);
|
||||
if (null == conn) {
|
||||
|
@ -36,7 +36,8 @@ import org.sleuthkit.autopsy.coreutils.PlatformUtil;
|
||||
/**
|
||||
* Settings for the sqlite implementation of the Central Repository database
|
||||
*
|
||||
* NOTE: This is public scope because the options panel calls it directly to set/get
|
||||
* NOTE: This is public scope because the options panel calls it directly to
|
||||
* set/get
|
||||
*/
|
||||
public final class SqliteEamDbSettings {
|
||||
|
||||
@ -103,11 +104,11 @@ public final class SqliteEamDbSettings {
|
||||
*/
|
||||
public boolean dbFileExists() {
|
||||
File dbFile = new File(getFileNameWithPath());
|
||||
if(! dbFile.exists()){
|
||||
if (!dbFile.exists()) {
|
||||
return false;
|
||||
}
|
||||
// It's unlikely, but make sure the file isn't actually a directory
|
||||
return ( ! dbFile.isDirectory());
|
||||
return (!dbFile.isDirectory());
|
||||
}
|
||||
|
||||
/**
|
||||
@ -151,6 +152,7 @@ public final class SqliteEamDbSettings {
|
||||
|
||||
/**
|
||||
* Delete the database
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public boolean deleteDatabase() {
|
||||
@ -333,26 +335,13 @@ public final class SqliteEamDbSettings {
|
||||
createCorrelationTypesTable.append("CONSTRAINT correlation_types_names UNIQUE (display_name, db_table_name)");
|
||||
createCorrelationTypesTable.append(")");
|
||||
|
||||
// Each "%s" will be replaced with the relevant TYPE_instances table name.
|
||||
StringBuilder createArtifactInstancesTableTemplate = new StringBuilder();
|
||||
createArtifactInstancesTableTemplate.append("CREATE TABLE IF NOT EXISTS %s (");
|
||||
createArtifactInstancesTableTemplate.append("id integer primary key autoincrement NOT NULL,");
|
||||
createArtifactInstancesTableTemplate.append("case_id integer NOT NULL,");
|
||||
createArtifactInstancesTableTemplate.append("data_source_id integer NOT NULL,");
|
||||
createArtifactInstancesTableTemplate.append("value text NOT NULL,");
|
||||
createArtifactInstancesTableTemplate.append("file_path text NOT NULL,");
|
||||
createArtifactInstancesTableTemplate.append("known_status integer NOT NULL,");
|
||||
createArtifactInstancesTableTemplate.append("comment text,");
|
||||
createArtifactInstancesTableTemplate.append("CONSTRAINT %s_multi_unique UNIQUE(data_source_id, value, file_path) ON CONFLICT IGNORE,");
|
||||
createArtifactInstancesTableTemplate.append("foreign key (case_id) references cases(id) ON UPDATE SET NULL ON DELETE SET NULL,");
|
||||
createArtifactInstancesTableTemplate.append("foreign key (data_source_id) references data_sources(id) ON UPDATE SET NULL ON DELETE SET NULL");
|
||||
createArtifactInstancesTableTemplate.append(")");
|
||||
String createArtifactInstancesTableTemplate = getCreateArtifactInstancesTableTemplate();
|
||||
|
||||
// Each "%s" will be replaced with the relevant TYPE_instances table name.
|
||||
String instancesIdx1 = "CREATE INDEX IF NOT EXISTS %s_case_id ON %s (case_id)";
|
||||
String instancesIdx2 = "CREATE INDEX IF NOT EXISTS %s_data_source_id ON %s (data_source_id)";
|
||||
String instancesIdx3 = "CREATE INDEX IF NOT EXISTS %s_value ON %s (value)";
|
||||
String instancesIdx4 = "CREATE INDEX IF NOT EXISTS %s_value_known_status ON %s (value, known_status)";
|
||||
String instancesIdx1 = getAddCaseIdIndexTemplate();
|
||||
String instancesIdx2 = getAddDataSourceIdIndexTemplate();
|
||||
|
||||
String instancesIdx3 = getAddValueIndexTemplate();
|
||||
String instancesIdx4 = getAddKnownStatusIndexTemplate();
|
||||
|
||||
StringBuilder createDbInfoTable = new StringBuilder();
|
||||
createDbInfoTable.append("CREATE TABLE IF NOT EXISTS db_info (");
|
||||
@ -402,7 +391,7 @@ public final class SqliteEamDbSettings {
|
||||
reference_type_dbname = EamDbUtil.correlationTypeToReferenceTableName(type);
|
||||
instance_type_dbname = EamDbUtil.correlationTypeToInstanceTableName(type);
|
||||
|
||||
stmt.execute(String.format(createArtifactInstancesTableTemplate.toString(), instance_type_dbname, instance_type_dbname));
|
||||
stmt.execute(String.format(createArtifactInstancesTableTemplate, instance_type_dbname, instance_type_dbname));
|
||||
stmt.execute(String.format(instancesIdx1, instance_type_dbname, instance_type_dbname));
|
||||
stmt.execute(String.format(instancesIdx2, instance_type_dbname, instance_type_dbname));
|
||||
stmt.execute(String.format(instancesIdx3, instance_type_dbname, instance_type_dbname));
|
||||
@ -427,6 +416,83 @@ public final class SqliteEamDbSettings {
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the template String for creating a new _instances table in a Sqlite
|
||||
* central repository. %s will exist in the template where the name of the
|
||||
* new table will be addedd.
|
||||
*
|
||||
* @return a String which is a template for cretating a new _instances table
|
||||
*/
|
||||
static String getCreateArtifactInstancesTableTemplate() {
|
||||
// Each "%s" will be replaced with the relevant TYPE_instances table name.
|
||||
StringBuilder createArtifactInstancesTableTemplate = new StringBuilder();
|
||||
createArtifactInstancesTableTemplate.append("CREATE TABLE IF NOT EXISTS %s (");
|
||||
createArtifactInstancesTableTemplate.append("id integer primary key autoincrement NOT NULL,");
|
||||
createArtifactInstancesTableTemplate.append("case_id integer NOT NULL,");
|
||||
createArtifactInstancesTableTemplate.append("data_source_id integer NOT NULL,");
|
||||
createArtifactInstancesTableTemplate.append("value text NOT NULL,");
|
||||
createArtifactInstancesTableTemplate.append("file_path text NOT NULL,");
|
||||
createArtifactInstancesTableTemplate.append("known_status integer NOT NULL,");
|
||||
createArtifactInstancesTableTemplate.append("comment text,");
|
||||
createArtifactInstancesTableTemplate.append("CONSTRAINT %s_multi_unique UNIQUE(data_source_id, value, file_path) ON CONFLICT IGNORE,");
|
||||
createArtifactInstancesTableTemplate.append("foreign key (case_id) references cases(id) ON UPDATE SET NULL ON DELETE SET NULL,");
|
||||
createArtifactInstancesTableTemplate.append("foreign key (data_source_id) references data_sources(id) ON UPDATE SET NULL ON DELETE SET NULL");
|
||||
createArtifactInstancesTableTemplate.append(")");
|
||||
return createArtifactInstancesTableTemplate.toString();
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the template for creating an index on the case_id column of an
|
||||
* instance table. %s will exist in the template where the name of the new
|
||||
* table will be addedd.
|
||||
*
|
||||
* @return a String which is a template for adding an index to the case_id
|
||||
* column of a _instances table
|
||||
*/
|
||||
static String getAddCaseIdIndexTemplate() {
|
||||
// Each "%s" will be replaced with the relevant TYPE_instances table name.
|
||||
return "CREATE INDEX IF NOT EXISTS %s_case_id ON %s (case_id)";
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the template for creating an index on the data_source_id column of an
|
||||
* instance table. %s will exist in the template where the name of the new
|
||||
* table will be addedd.
|
||||
*
|
||||
* @return a String which is a template for adding an index to the
|
||||
* data_source_id column of a _instances table
|
||||
*/
|
||||
static String getAddDataSourceIdIndexTemplate() {
|
||||
// Each "%s" will be replaced with the relevant TYPE_instances table name.
|
||||
return "CREATE INDEX IF NOT EXISTS %s_data_source_id ON %s (data_source_id)";
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the template for creating an index on the value column of an instance
|
||||
* table. %s will exist in the template where the name of the new table will
|
||||
* be addedd.
|
||||
*
|
||||
* @return a String which is a template for adding an index to the value
|
||||
* column of a _instances table
|
||||
*/
|
||||
static String getAddValueIndexTemplate() {
|
||||
// Each "%s" will be replaced with the relevant TYPE_instances table name.
|
||||
return "CREATE INDEX IF NOT EXISTS %s_value ON %s (value)";
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the template for creating an index on the known_status column of an
|
||||
* instance table. %s will exist in the template where the name of the new
|
||||
* table will be addedd.
|
||||
*
|
||||
* @return a String which is a template for adding an index to the
|
||||
* known_status column of a _instances table
|
||||
*/
|
||||
static String getAddKnownStatusIndexTemplate() {
|
||||
// Each "%s" will be replaced with the relevant TYPE_instances table name.
|
||||
return "CREATE INDEX IF NOT EXISTS %s_value_known_status ON %s (value, known_status)";
|
||||
}
|
||||
|
||||
public boolean insertDefaultDatabaseContent() {
|
||||
Connection conn = getEphemeralConnection();
|
||||
if (null == conn) {
|
||||
@ -490,8 +556,6 @@ public final class SqliteEamDbSettings {
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* @return the dbDirectory
|
||||
*/
|
||||
|
@ -183,7 +183,10 @@ public class IngestEventsListener {
|
||||
|
||||
@Override
|
||||
public void propertyChange(PropertyChangeEvent evt) {
|
||||
if (getCeModuleInstanceCount() > 0) {
|
||||
//if ingest is running we want there to check if there is a Correlation Engine module running
|
||||
//sometimes artifacts are generated by DSPs or other sources while ingest is not running
|
||||
//in these cases we still want to create correlation attributes for those artifacts when appropriate
|
||||
if (!IngestManager.getInstance().isIngestRunning() || getCeModuleInstanceCount() > 0) {
|
||||
EamDb dbManager;
|
||||
try {
|
||||
dbManager = EamDb.getInstance();
|
||||
@ -193,7 +196,9 @@ public class IngestEventsListener {
|
||||
}
|
||||
switch (IngestManager.IngestModuleEvent.valueOf(evt.getPropertyName())) {
|
||||
case DATA_ADDED: {
|
||||
jobProcessingExecutor.submit(new DataAddedTask(dbManager, evt, isFlagNotableItems()));
|
||||
//if ingest isn't running create the interesting items otherwise use the ingest module setting to determine if we create interesting items
|
||||
boolean flagNotable = !IngestManager.getInstance().isIngestRunning() || isFlagNotableItems();
|
||||
jobProcessingExecutor.submit(new DataAddedTask(dbManager, evt, flagNotable));
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
@ -71,7 +71,6 @@ final class IngestModule implements FileIngestModule {
|
||||
private CorrelationDataSource eamDataSource;
|
||||
private Blackboard blackboard;
|
||||
private CorrelationAttributeInstance.Type filesType;
|
||||
|
||||
private final boolean flagTaggedNotableItems;
|
||||
|
||||
/**
|
||||
|
@ -34,7 +34,7 @@ AddNewOrganizationDialog.bnOK.text=OK
|
||||
AddNewOrganizationDialog.tfName.tooltip=POC Name
|
||||
ManageTagsDialog.okButton.text=OK
|
||||
ManageTagsDialog.cancelButton.text=Cancel
|
||||
ManageArtifactTypesDialog.taInstructionsMsg.text=Enable one or more correlation properties to use for correlation during ingest. Note, these properties are global and impact all users of the central repository.
|
||||
ManageArtifactTypesDialog.taInstructionsMsg.text=Enable one or more correlation properties to use for correlation during ingest. Note, these properties are global and impact all users of the Central Repository.
|
||||
EamSqliteSettingsDialog.bnOk.text=OK
|
||||
EamPostgresSettingsDialog.bnSave.text=Save
|
||||
EamDbSettingsDialog.bnDatabasePathFileOpen.text=Browse...
|
||||
@ -58,11 +58,10 @@ ManageCorrelationPropertiesDialog.okButton.text=OK
|
||||
GlobalSettingsPanel.bnManageProperties.text=Manage Correlation Properties
|
||||
EamDbSettingsDialog.lbDatabaseDesc.text=Database File:
|
||||
EamDbSettingsDialog.lbFullDbPath.text=
|
||||
GlobalSettingsPanel.cbUseCentralRepo.text=Use a central repository
|
||||
GlobalSettingsPanel.correlationPropertiesTextArea.text=Choose which file and result properties to store in the central repository for later correlation.\n
|
||||
GlobalSettingsPanel.organizationTextArea.text=Organization information can be tracked in the central repository.
|
||||
GlobalSettingsPanel.cbUseCentralRepo.text=Use a Central Repository
|
||||
GlobalSettingsPanel.organizationTextArea.text=Organization information can be tracked in the Central Repository.
|
||||
GlobalSettingsPanel.manageOrganizationButton.text=Manage Organizations
|
||||
GlobalSettingsPanel.lbCentralRepository.text=A central repository allows you to correlate files and results between cases.
|
||||
GlobalSettingsPanel.lbCentralRepository.text=A Central Repository allows you to correlate files and results between cases.
|
||||
GlobalSettingsPanel.pnCorrelationProperties.border.title=Correlation Properties
|
||||
GlobalSettingsPanel.organizationPanel.border.title=Organizations
|
||||
GlobalSettingsPanel.casesPanel.border.title=Case Details
|
||||
@ -74,8 +73,9 @@ ShowCasesDialog.caseDetailsTable.toolTipText=Click column name to sort. Right-cl
|
||||
ShowCasesDialog.title=Case Details
|
||||
GlobalSettingsPanel.Case\ Details.AccessibleContext.accessibleName=Cases Details
|
||||
ShowCasesDialog.caseDetailsTable.AccessibleContext.accessibleDescription=Click column name to sort.
|
||||
GlobalSettingsPanel.casesTextArea.text=Display table that lists central repository case details.
|
||||
GlobalSettingsPanel.ingestRunningWarningLabel.text=Cannot make changes to central repository settings when ingest is running!
|
||||
GlobalSettingsPanel.casesTextArea.text=Display table that lists Central Repository case details.
|
||||
GlobalSettingsPanel.ingestRunningWarningLabel.text=Cannot make changes to Central Repository settings when ingest is running!
|
||||
GlobalSettingsPanel.correlationPropertiesTextArea.text=Choose which file and result properties to store in the Central Repository for later correlation.\n
|
||||
ManageCasesDialog.examinerPhoneLabel.text=Examiner Phone:
|
||||
ManageCasesDialog.examinerNameLabel.text=Examiner Name:
|
||||
ManageCasesDialog.examinerEmailLabel.text=Examiner Email:
|
||||
|
@ -102,7 +102,7 @@ public class EamDbSettingsDialog extends JDialog {
|
||||
|
||||
@Override
|
||||
public String getDescription() {
|
||||
return "Directories and central repository databases";
|
||||
return "Directories and Central Repository databases";
|
||||
}
|
||||
});
|
||||
cbDatabaseType.setSelectedItem(selectedPlatform);
|
||||
|
@ -57,7 +57,7 @@
|
||||
<Layout>
|
||||
<DimensionLayout dim="0">
|
||||
<Group type="103" groupAlignment="0" attributes="0">
|
||||
<Component id="lbCentralRepository" pref="1022" max="32767" attributes="0"/>
|
||||
<Component id="lbCentralRepository" max="32767" attributes="0"/>
|
||||
<Group type="102" attributes="0">
|
||||
<Group type="103" groupAlignment="0" attributes="0">
|
||||
<Component id="pnDatabaseConfiguration" alignment="0" max="32767" attributes="0"/>
|
||||
@ -67,7 +67,7 @@
|
||||
<Group type="102" attributes="0">
|
||||
<Component id="cbUseCentralRepo" min="-2" pref="162" max="-2" attributes="0"/>
|
||||
<EmptySpace type="unrelated" max="-2" attributes="0"/>
|
||||
<Component id="ingestRunningWarningLabel" max="32767" attributes="0"/>
|
||||
<Component id="ingestRunningWarningLabel" pref="844" max="32767" attributes="0"/>
|
||||
</Group>
|
||||
<Group type="102" alignment="0" attributes="0">
|
||||
<EmptySpace max="-2" attributes="0"/>
|
||||
@ -89,13 +89,13 @@
|
||||
</Group>
|
||||
<EmptySpace type="unrelated" max="-2" attributes="0"/>
|
||||
<Component id="pnDatabaseConfiguration" min="-2" max="-2" attributes="0"/>
|
||||
<EmptySpace max="-2" attributes="0"/>
|
||||
<EmptySpace min="0" pref="0" max="-2" attributes="0"/>
|
||||
<Component id="pnCorrelationProperties" min="-2" max="-2" attributes="0"/>
|
||||
<EmptySpace max="-2" attributes="0"/>
|
||||
<EmptySpace min="0" pref="0" max="-2" attributes="0"/>
|
||||
<Component id="organizationPanel" min="-2" max="-2" attributes="0"/>
|
||||
<EmptySpace max="-2" attributes="0"/>
|
||||
<EmptySpace min="0" pref="0" max="-2" attributes="0"/>
|
||||
<Component id="casesPanel" min="-2" max="-2" attributes="0"/>
|
||||
<EmptySpace max="-2" attributes="0"/>
|
||||
<EmptySpace min="0" pref="0" max="-2" attributes="0"/>
|
||||
<Component id="tbOops" min="-2" max="-2" attributes="0"/>
|
||||
<EmptySpace max="-2" attributes="0"/>
|
||||
</Group>
|
||||
@ -242,14 +242,14 @@
|
||||
<Layout>
|
||||
<DimensionLayout dim="0">
|
||||
<Group type="103" groupAlignment="0" attributes="0">
|
||||
<Group type="102" attributes="0">
|
||||
<EmptySpace max="-2" attributes="0"/>
|
||||
<Group type="102" alignment="0" attributes="0">
|
||||
<EmptySpace min="-2" max="-2" attributes="0"/>
|
||||
<Group type="103" groupAlignment="0" attributes="0">
|
||||
<Component id="correlationPropertiesScrollPane" max="32767" attributes="0"/>
|
||||
<Group type="102" alignment="0" attributes="0">
|
||||
<Group type="102" attributes="0">
|
||||
<Component id="bnManageTypes" min="-2" max="-2" attributes="0"/>
|
||||
<EmptySpace min="0" pref="0" max="32767" attributes="0"/>
|
||||
</Group>
|
||||
<Component id="correlationPropertiesScrollPane" max="32767" attributes="0"/>
|
||||
</Group>
|
||||
<EmptySpace max="-2" attributes="0"/>
|
||||
</Group>
|
||||
@ -258,8 +258,7 @@
|
||||
<DimensionLayout dim="1">
|
||||
<Group type="103" groupAlignment="0" attributes="0">
|
||||
<Group type="102" alignment="1" attributes="0">
|
||||
<EmptySpace min="-2" pref="7" max="-2" attributes="0"/>
|
||||
<Component id="correlationPropertiesScrollPane" min="-2" max="-2" attributes="0"/>
|
||||
<Component id="correlationPropertiesScrollPane" pref="32" max="32767" attributes="0"/>
|
||||
<EmptySpace max="-2" attributes="0"/>
|
||||
<Component id="bnManageTypes" min="-2" max="-2" attributes="0"/>
|
||||
<EmptySpace min="-2" pref="8" max="-2" attributes="0"/>
|
||||
@ -301,7 +300,7 @@
|
||||
<Font name="Tahoma" size="11" style="0"/>
|
||||
</Property>
|
||||
<Property name="lineWrap" type="boolean" value="true"/>
|
||||
<Property name="rows" type="int" value="2"/>
|
||||
<Property name="rows" type="int" value="1"/>
|
||||
<Property name="text" type="java.lang.String" editor="org.netbeans.modules.i18n.form.FormI18nStringEditor">
|
||||
<ResourceString bundle="org/sleuthkit/autopsy/centralrepository/optionspanel/Bundle.properties" key="GlobalSettingsPanel.correlationPropertiesTextArea.text" replaceFormat="org.openide.util.NbBundle.getMessage({sourceFileName}.class, "{key}")"/>
|
||||
</Property>
|
||||
@ -334,7 +333,7 @@
|
||||
<Group type="102" attributes="0">
|
||||
<EmptySpace max="-2" attributes="0"/>
|
||||
<Group type="103" groupAlignment="0" attributes="0">
|
||||
<Component id="organizationScrollPane" pref="992" max="32767" attributes="0"/>
|
||||
<Component id="organizationScrollPane" max="32767" attributes="0"/>
|
||||
<Group type="102" attributes="0">
|
||||
<Component id="manageOrganizationButton" min="-2" max="-2" attributes="0"/>
|
||||
<EmptySpace min="0" pref="0" max="32767" attributes="0"/>
|
||||
|
@ -222,7 +222,7 @@ public final class GlobalSettingsPanel extends IngestModuleGlobalSettingsPanel i
|
||||
correlationPropertiesTextArea.setColumns(20);
|
||||
correlationPropertiesTextArea.setFont(new java.awt.Font("Tahoma", 0, 11)); // NOI18N
|
||||
correlationPropertiesTextArea.setLineWrap(true);
|
||||
correlationPropertiesTextArea.setRows(2);
|
||||
correlationPropertiesTextArea.setRows(1);
|
||||
correlationPropertiesTextArea.setText(org.openide.util.NbBundle.getMessage(GlobalSettingsPanel.class, "GlobalSettingsPanel.correlationPropertiesTextArea.text")); // NOI18N
|
||||
correlationPropertiesTextArea.setToolTipText("");
|
||||
correlationPropertiesTextArea.setWrapStyleWord(true);
|
||||
@ -236,17 +236,16 @@ public final class GlobalSettingsPanel extends IngestModuleGlobalSettingsPanel i
|
||||
.addGroup(pnCorrelationPropertiesLayout.createSequentialGroup()
|
||||
.addContainerGap()
|
||||
.addGroup(pnCorrelationPropertiesLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
|
||||
.addComponent(correlationPropertiesScrollPane)
|
||||
.addGroup(pnCorrelationPropertiesLayout.createSequentialGroup()
|
||||
.addComponent(bnManageTypes)
|
||||
.addGap(0, 0, Short.MAX_VALUE)))
|
||||
.addGap(0, 0, Short.MAX_VALUE))
|
||||
.addComponent(correlationPropertiesScrollPane))
|
||||
.addContainerGap())
|
||||
);
|
||||
pnCorrelationPropertiesLayout.setVerticalGroup(
|
||||
pnCorrelationPropertiesLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
|
||||
.addGroup(javax.swing.GroupLayout.Alignment.TRAILING, pnCorrelationPropertiesLayout.createSequentialGroup()
|
||||
.addGap(7, 7, 7)
|
||||
.addComponent(correlationPropertiesScrollPane, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
|
||||
.addComponent(correlationPropertiesScrollPane, javax.swing.GroupLayout.DEFAULT_SIZE, 32, Short.MAX_VALUE)
|
||||
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
|
||||
.addComponent(bnManageTypes)
|
||||
.addGap(8, 8, 8))
|
||||
@ -281,7 +280,7 @@ public final class GlobalSettingsPanel extends IngestModuleGlobalSettingsPanel i
|
||||
.addGroup(organizationPanelLayout.createSequentialGroup()
|
||||
.addContainerGap()
|
||||
.addGroup(organizationPanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
|
||||
.addComponent(organizationScrollPane, javax.swing.GroupLayout.DEFAULT_SIZE, 992, Short.MAX_VALUE)
|
||||
.addComponent(organizationScrollPane)
|
||||
.addGroup(organizationPanelLayout.createSequentialGroup()
|
||||
.addComponent(manageOrganizationButton)
|
||||
.addGap(0, 0, Short.MAX_VALUE)))
|
||||
@ -356,7 +355,7 @@ public final class GlobalSettingsPanel extends IngestModuleGlobalSettingsPanel i
|
||||
jPanel1.setLayout(jPanel1Layout);
|
||||
jPanel1Layout.setHorizontalGroup(
|
||||
jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
|
||||
.addComponent(lbCentralRepository, javax.swing.GroupLayout.DEFAULT_SIZE, 1022, Short.MAX_VALUE)
|
||||
.addComponent(lbCentralRepository, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
|
||||
.addGroup(jPanel1Layout.createSequentialGroup()
|
||||
.addGroup(jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
|
||||
.addComponent(pnDatabaseConfiguration, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
|
||||
@ -366,7 +365,7 @@ public final class GlobalSettingsPanel extends IngestModuleGlobalSettingsPanel i
|
||||
.addGroup(jPanel1Layout.createSequentialGroup()
|
||||
.addComponent(cbUseCentralRepo, javax.swing.GroupLayout.PREFERRED_SIZE, 162, javax.swing.GroupLayout.PREFERRED_SIZE)
|
||||
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED)
|
||||
.addComponent(ingestRunningWarningLabel, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE))
|
||||
.addComponent(ingestRunningWarningLabel, javax.swing.GroupLayout.DEFAULT_SIZE, 844, Short.MAX_VALUE))
|
||||
.addGroup(jPanel1Layout.createSequentialGroup()
|
||||
.addContainerGap()
|
||||
.addComponent(tbOops, javax.swing.GroupLayout.PREFERRED_SIZE, 974, javax.swing.GroupLayout.PREFERRED_SIZE)))
|
||||
@ -382,13 +381,13 @@ public final class GlobalSettingsPanel extends IngestModuleGlobalSettingsPanel i
|
||||
.addComponent(ingestRunningWarningLabel))
|
||||
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED)
|
||||
.addComponent(pnDatabaseConfiguration, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
|
||||
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
|
||||
.addGap(0, 0, 0)
|
||||
.addComponent(pnCorrelationProperties, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
|
||||
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
|
||||
.addGap(0, 0, 0)
|
||||
.addComponent(organizationPanel, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
|
||||
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
|
||||
.addGap(0, 0, 0)
|
||||
.addComponent(casesPanel, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
|
||||
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
|
||||
.addGap(0, 0, 0)
|
||||
.addComponent(tbOops, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
|
||||
.addContainerGap())
|
||||
);
|
||||
|
@ -74,7 +74,7 @@ class PListViewer extends javax.swing.JPanel implements FileTypeViewer, Explorer
|
||||
private final Outline outline;
|
||||
private ExplorerManager explorerManager;
|
||||
|
||||
private NSDictionary rootDict;
|
||||
private NSObject rootDict;
|
||||
|
||||
/**
|
||||
* Creates new form PListViewer
|
||||
@ -415,22 +415,35 @@ class PListViewer extends javax.swing.JPanel implements FileTypeViewer, Explorer
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses given binary stream and extracts Plist key/value
|
||||
* Parses given binary stream and extracts Plist key/value.
|
||||
*
|
||||
* @param plistbytes
|
||||
* @param plistbytes The byte array containing the Plist data.
|
||||
*
|
||||
* @return list of PropKeyValue
|
||||
*/
|
||||
private List<PropKeyValue> parsePList(final byte[] plistbytes) throws IOException, PropertyListFormatException, ParseException, ParserConfigurationException, SAXException {
|
||||
|
||||
final List<PropKeyValue> plist = new ArrayList<>();
|
||||
rootDict = (NSDictionary) PropertyListParser.parse(plistbytes);
|
||||
rootDict = PropertyListParser.parse(plistbytes);
|
||||
|
||||
final String[] keys = rootDict.allKeys();
|
||||
for (final String key : keys) {
|
||||
final PropKeyValue pkv = parseProperty(key, rootDict.objectForKey(key));
|
||||
if (null != pkv) {
|
||||
plist.add(pkv);
|
||||
/*
|
||||
* Parse the data if the root is an NSArray or NSDictionary. Anything
|
||||
* else is unexpected and will be ignored.
|
||||
*/
|
||||
if (rootDict instanceof NSArray) {
|
||||
for (int i=0; i < ((NSArray)rootDict).count(); i++) {
|
||||
final PropKeyValue pkv = parseProperty("", ((NSArray)rootDict).objectAtIndex(i));
|
||||
if (null != pkv) {
|
||||
plist.add(pkv);
|
||||
}
|
||||
}
|
||||
} else if (rootDict instanceof NSDictionary) {
|
||||
final String[] keys = ((NSDictionary)rootDict).allKeys();
|
||||
for (final String key : keys) {
|
||||
final PropKeyValue pkv = parseProperty(key, ((NSDictionary)rootDict).objectForKey(key));
|
||||
if (null != pkv) {
|
||||
plist.add(pkv);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -487,7 +487,8 @@ public class DataContentViewerArtifact extends javax.swing.JPanel implements Dat
|
||||
|| (artifact.getArtifactTypeID() == ARTIFACT_TYPE.TSK_KEYWORD_HIT.getTypeID())
|
||||
|| (artifact.getArtifactTypeID() == ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT.getTypeID())
|
||||
|| (artifact.getArtifactTypeID() == ARTIFACT_TYPE.TSK_OBJECT_DETECTED.getTypeID())
|
||||
|| (artifact.getArtifactTypeID() == ARTIFACT_TYPE.TSK_METADATA_EXIF.getTypeID())) {
|
||||
|| (artifact.getArtifactTypeID() == ARTIFACT_TYPE.TSK_METADATA_EXIF.getTypeID())
|
||||
|| (artifact.getArtifactTypeID() == ARTIFACT_TYPE.TSK_EXT_MISMATCH_DETECTED.getTypeID())) {
|
||||
return 3;
|
||||
} else {
|
||||
return 6;
|
||||
|
@ -1,7 +1,7 @@
|
||||
/*
|
||||
* Autopsy Forensic Browser
|
||||
*
|
||||
* Copyright 2012-2015 Basis Technology Corp.
|
||||
* Copyright 2012-2018 Basis Technology Corp.
|
||||
* Contact: carrier <at> sleuthkit <dot> org
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
@ -18,14 +18,22 @@
|
||||
*/
|
||||
package org.sleuthkit.autopsy.coreutils;
|
||||
|
||||
import java.net.MalformedURLException;
|
||||
import java.net.URL;
|
||||
import java.net.UnknownHostException;
|
||||
import java.util.StringTokenizer;
|
||||
|
||||
public class NetworkUtils {
|
||||
|
||||
private NetworkUtils() {
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the host name variable. Sometimes the network can be finicky, so the
|
||||
* answer returned by getHostName() could throw an exception or be null.
|
||||
* Have it read the environment variable if getHostName() is unsuccessful.
|
||||
*
|
||||
* @return the local host name
|
||||
*/
|
||||
public static String getLocalHostName() {
|
||||
String hostName = "";
|
||||
@ -41,4 +49,78 @@ public class NetworkUtils {
|
||||
}
|
||||
return hostName;
|
||||
}
|
||||
|
||||
/**
|
||||
* Attempt to manually extract the domain from a URL.
|
||||
*
|
||||
* @param url
|
||||
* @return empty string if no domain could be found
|
||||
*/
|
||||
private static String getBaseDomain(String url) {
|
||||
String host = null;
|
||||
|
||||
//strip protocol
|
||||
String cleanUrl = url.replaceFirst(".*:\\/\\/", "");
|
||||
|
||||
//strip after slashes
|
||||
String dirToks[] = cleanUrl.split("\\/");
|
||||
if (dirToks.length > 0) {
|
||||
host = dirToks[0];
|
||||
} else {
|
||||
host = cleanUrl;
|
||||
}
|
||||
|
||||
//get the domain part from host (last 2)
|
||||
StringTokenizer tok = new StringTokenizer(host, ".");
|
||||
StringBuilder hostB = new StringBuilder();
|
||||
int toks = tok.countTokens();
|
||||
|
||||
for (int count = 0; count < toks; ++count) {
|
||||
String part = tok.nextToken();
|
||||
int diff = toks - count;
|
||||
if (diff < 3) {
|
||||
hostB.append(part);
|
||||
}
|
||||
if (diff == 2) {
|
||||
hostB.append(".");
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
String base = hostB.toString();
|
||||
// verify there are no special characters in there
|
||||
if (base.matches(".*[~`!@#$%^&\\*\\(\\)\\+={}\\[\\];:\\?<>,/ ].*")) {
|
||||
return "";
|
||||
}
|
||||
return base;
|
||||
}
|
||||
|
||||
/**
|
||||
* Attempt to extract the domain from a URL.
|
||||
* Will start by using the built-in URL class, and if that fails will
|
||||
* try to extract it manually.
|
||||
*
|
||||
* @param urlString The URL to extract the domain from
|
||||
* @return empty string if no domain name was found
|
||||
*/
|
||||
public static String extractDomain(String urlString) {
|
||||
if (urlString == null) {
|
||||
return "";
|
||||
}
|
||||
String result = "";
|
||||
|
||||
try {
|
||||
URL url = new URL(urlString);
|
||||
result = url.getHost();
|
||||
} catch (MalformedURLException ex) {
|
||||
//do not log if not a valid URL - we will try to extract it ourselves
|
||||
}
|
||||
|
||||
//was not a valid URL, try a less picky method
|
||||
if (result == null || result.trim().isEmpty()) {
|
||||
return getBaseDomain(urlString);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -21,6 +21,8 @@ package org.sleuthkit.autopsy.coreutils;
|
||||
import java.text.DateFormat;
|
||||
import java.text.SimpleDateFormat;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.Comparator;
|
||||
import java.util.GregorianCalendar;
|
||||
import java.util.List;
|
||||
import java.util.SimpleTimeZone;
|
||||
@ -45,7 +47,7 @@ public class TimeZoneUtils {
|
||||
java.util.TimeZone zone = java.util.TimeZone.getTimeZone(timeZoneId);
|
||||
int offset = zone.getRawOffset() / 1000;
|
||||
int hour = offset / 3600;
|
||||
int min = (offset % 3600) / 60;
|
||||
int min = Math.abs((offset % 3600) / 60);
|
||||
|
||||
DateFormat dfm = new SimpleDateFormat("z");
|
||||
dfm.setTimeZone(zone);
|
||||
@ -74,7 +76,7 @@ public class TimeZoneUtils {
|
||||
public static String createTimeZoneString(TimeZone timeZone) {
|
||||
int offset = timeZone.getRawOffset() / 1000;
|
||||
int hour = offset / 3600;
|
||||
int minutes = (offset % 3600) / 60;
|
||||
int minutes = Math.abs((offset % 3600) / 60);
|
||||
|
||||
return String.format("(GMT%+d:%02d) %s", hour, minutes, timeZone.getID()); //NON-NLS
|
||||
}
|
||||
@ -83,9 +85,11 @@ public class TimeZoneUtils {
|
||||
* Generates a list of time zones.
|
||||
*/
|
||||
public static List<String> createTimeZoneList() {
|
||||
List<String> timeZoneList = new ArrayList<>();
|
||||
/*
|
||||
* Create a list of time zones.
|
||||
*/
|
||||
List<TimeZone> timeZoneList = new ArrayList<>();
|
||||
|
||||
// load and add all timezone
|
||||
String[] ids = SimpleTimeZone.getAvailableIDs();
|
||||
for (String id : ids) {
|
||||
/*
|
||||
@ -97,10 +101,36 @@ public class TimeZoneUtils {
|
||||
* if(hasDaylight){ result = result + second; }
|
||||
* timeZoneComboBox.addItem(item + " (" + result + ")");
|
||||
*/
|
||||
timeZoneList.add(createTimeZoneString(TimeZone.getTimeZone(id)));
|
||||
timeZoneList.add(TimeZone.getTimeZone(id));
|
||||
}
|
||||
|
||||
return timeZoneList;
|
||||
/*
|
||||
* Sort the list of time zones first by offset, then by ID.
|
||||
*/
|
||||
Collections.sort(timeZoneList, new Comparator<TimeZone>(){
|
||||
@Override
|
||||
public int compare(TimeZone o1, TimeZone o2){
|
||||
int offsetDelta = Integer.compare(o1.getRawOffset(), o2.getRawOffset());
|
||||
|
||||
if (offsetDelta == 0) {
|
||||
return o1.getID().compareToIgnoreCase(o2.getID());
|
||||
}
|
||||
|
||||
return offsetDelta;
|
||||
}
|
||||
});
|
||||
|
||||
/*
|
||||
* Create a list of Strings encompassing both the GMT offset and the
|
||||
* time zone ID.
|
||||
*/
|
||||
List<String> outputList = new ArrayList<>();
|
||||
|
||||
for (TimeZone timeZone : timeZoneList) {
|
||||
outputList.add(createTimeZoneString(timeZone));
|
||||
}
|
||||
|
||||
return outputList;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -156,6 +156,8 @@ public class ExtractedContent implements AutopsyVisitableItem {
|
||||
return filePath + "drive_network.png"; //NON-NLS
|
||||
} else if (typeID == BlackboardArtifact.ARTIFACT_TYPE.TSK_FACE_DETECTED.getTypeID()) {
|
||||
return filePath + "face.png"; //NON-NLS
|
||||
} else if (typeID == BlackboardArtifact.ARTIFACT_TYPE.TSK_WIFI_NETWORK.getTypeID()) {
|
||||
return filePath + "network-wifi.png"; //NON-NLS
|
||||
}
|
||||
return filePath + "artifact-icon.png"; //NON-NLS
|
||||
}
|
||||
|
BIN
Core/src/org/sleuthkit/autopsy/images/network-wifi.png
Normal file
BIN
Core/src/org/sleuthkit/autopsy/images/network-wifi.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 453 B |
@ -21,9 +21,6 @@ package org.sleuthkit.autopsy.modules.embeddedfileextractor;
|
||||
import java.io.File;
|
||||
import java.io.FileOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.OutputStream;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Paths;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
@ -164,18 +161,19 @@ class SevenZipExtractor {
|
||||
*
|
||||
* More heuristics to be added here
|
||||
*
|
||||
* @param archiveFile the AbstractFile for the parent archive which
|
||||
* which we are checking
|
||||
* @param inArchive The SevenZip archive currently open for extraction
|
||||
* @param archiveFile the AbstractFile for the parent archive which
|
||||
* which we are checking
|
||||
* @param inArchive The SevenZip archive currently open for
|
||||
* extraction
|
||||
*
|
||||
* @param inArchiveItemIndex Index of item inside the SevenZip archive. Each
|
||||
* file inside an archive is associated with a unique
|
||||
* integer
|
||||
* file inside an archive is associated with a
|
||||
* unique integer
|
||||
*
|
||||
* @param depthMap a concurrent hashmap which keeps track of the
|
||||
* depth of all nested archives, key of objectID
|
||||
* @param escapedFilePath the path to the archiveFileItem which has been
|
||||
* escaped
|
||||
* @param depthMap a concurrent hashmap which keeps track of the
|
||||
* depth of all nested archives, key of objectID
|
||||
* @param escapedFilePath the path to the archiveFileItem which has been
|
||||
* escaped
|
||||
*
|
||||
* @return true if potential zip bomb, false otherwise
|
||||
*/
|
||||
@ -605,7 +603,7 @@ class SevenZipExtractor {
|
||||
inArchiveItemIndex, PropID.SIZE);
|
||||
if (freeDiskSpace != IngestMonitor.DISK_FREE_SPACE_UNKNOWN && archiveItemSize != null && archiveItemSize > 0) { //if free space is known and file is not empty.
|
||||
String archiveItemPath = (String) inArchive.getProperty(
|
||||
inArchiveItemIndex, PropID.PATH);
|
||||
inArchiveItemIndex, PropID.PATH);
|
||||
long newDiskSpace = freeDiskSpace - archiveItemSize;
|
||||
if (newDiskSpace < MIN_FREE_DISK_SPACE) {
|
||||
String msg = NbBundle.getMessage(SevenZipExtractor.class,
|
||||
@ -797,138 +795,55 @@ class SevenZipExtractor {
|
||||
}
|
||||
|
||||
/**
|
||||
* Stream used to unpack the archive to local file
|
||||
* UnpackStream used by the SevenZipBindings to do archive extraction. A memory
|
||||
* leak exists in the SevenZip library that will not let go of the streams until
|
||||
* the entire archive extraction is complete. Instead of creating a new UnpackStream
|
||||
* for every file in the archive, instead we just rebase our EncodedFileOutputStream pointer
|
||||
* for every new file.
|
||||
*/
|
||||
private abstract static class UnpackStream implements ISequentialOutStream {
|
||||
private final static class UnpackStream implements ISequentialOutStream {
|
||||
|
||||
private OutputStream output;
|
||||
private EncodedFileOutputStream output;
|
||||
private String localAbsPath;
|
||||
private int bytesWritten;
|
||||
|
||||
UnpackStream(String localAbsPath) {
|
||||
UnpackStream(String localAbsPath) throws IOException {
|
||||
this.output = new EncodedFileOutputStream(new FileOutputStream(localAbsPath), TskData.EncodingType.XOR1);
|
||||
this.localAbsPath = localAbsPath;
|
||||
try {
|
||||
output = new EncodedFileOutputStream(new FileOutputStream(localAbsPath), TskData.EncodingType.XOR1);
|
||||
} catch (IOException ex) {
|
||||
logger.log(Level.SEVERE, "Error writing extracted file: " + localAbsPath, ex); //NON-NLS
|
||||
}
|
||||
|
||||
this.bytesWritten = 0;
|
||||
}
|
||||
|
||||
public abstract long getSize();
|
||||
|
||||
OutputStream getOutput() {
|
||||
return output;
|
||||
public void setNewOutputStream(String localAbsPath) throws IOException {
|
||||
this.output.close();
|
||||
this.output = new EncodedFileOutputStream(new FileOutputStream(localAbsPath), TskData.EncodingType.XOR1);
|
||||
this.localAbsPath = localAbsPath;
|
||||
this.bytesWritten = 0;
|
||||
}
|
||||
|
||||
String getLocalAbsPath() {
|
||||
return localAbsPath;
|
||||
}
|
||||
|
||||
public void close() {
|
||||
if (output != null) {
|
||||
try {
|
||||
output.flush();
|
||||
output.close();
|
||||
output = null;
|
||||
} catch (IOException e) {
|
||||
logger.log(Level.SEVERE, "Error closing unpack stream for file: {0}", localAbsPath); //NON-NLS
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Stream used to unpack the archive of unknown size to local file
|
||||
*/
|
||||
private static class UnknownSizeUnpackStream extends UnpackStream {
|
||||
|
||||
private long freeDiskSpace;
|
||||
private boolean outOfSpace = false;
|
||||
private long bytesWritten = 0;
|
||||
|
||||
UnknownSizeUnpackStream(String localAbsPath, long freeDiskSpace) {
|
||||
super(localAbsPath);
|
||||
this.freeDiskSpace = freeDiskSpace;
|
||||
}
|
||||
|
||||
@Override
|
||||
public long getSize() {
|
||||
return this.bytesWritten;
|
||||
public int getSize() {
|
||||
return bytesWritten;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int write(byte[] bytes) throws SevenZipException {
|
||||
try {
|
||||
// If the content size is unknown, cautiously write to disk.
|
||||
// Write only if byte array is less than 80% of the current
|
||||
// free disk space.
|
||||
if (freeDiskSpace == IngestMonitor.DISK_FREE_SPACE_UNKNOWN || bytes.length < 0.8 * freeDiskSpace) {
|
||||
getOutput().write(bytes);
|
||||
// NOTE: this method is called multiple times for a
|
||||
// single extractSlow() call. Update bytesWritten and
|
||||
// freeDiskSpace after every write operation.
|
||||
this.bytesWritten += bytes.length;
|
||||
this.freeDiskSpace -= bytes.length;
|
||||
} else {
|
||||
this.outOfSpace = true;
|
||||
logger.log(Level.INFO, NbBundle.getMessage(
|
||||
SevenZipExtractor.class,
|
||||
"EmbeddedFileExtractorIngestModule.ArchiveExtractor.UnpackStream.write.noSpace.msg"));
|
||||
throw new SevenZipException(
|
||||
NbBundle.getMessage(SevenZipExtractor.class, "EmbeddedFileExtractorIngestModule.ArchiveExtractor.UnpackStream.write.noSpace.msg"));
|
||||
}
|
||||
output.write(bytes);
|
||||
this.bytesWritten += bytes.length;
|
||||
} catch (IOException ex) {
|
||||
throw new SevenZipException(
|
||||
NbBundle.getMessage(SevenZipExtractor.class, "EmbeddedFileExtractorIngestModule.ArchiveExtractor.UnpackStream.write.exception.msg",
|
||||
getLocalAbsPath()), ex);
|
||||
NbBundle.getMessage(SevenZipExtractor.class,
|
||||
"EmbeddedFileExtractorIngestModule.ArchiveExtractor.UnpackStream.write.exception.msg",
|
||||
localAbsPath), ex);
|
||||
}
|
||||
return bytes.length;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() {
|
||||
if (getOutput() != null) {
|
||||
try {
|
||||
getOutput().flush();
|
||||
getOutput().close();
|
||||
if (this.outOfSpace) {
|
||||
Files.delete(Paths.get(getLocalAbsPath()));
|
||||
}
|
||||
} catch (IOException e) {
|
||||
logger.log(Level.SEVERE, "Error closing unpack stream for file: {0}", getLocalAbsPath()); //NON-NLS
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Stream used to unpack the archive of known size to local file
|
||||
*/
|
||||
private static class KnownSizeUnpackStream extends UnpackStream {
|
||||
|
||||
private long size;
|
||||
|
||||
KnownSizeUnpackStream(String localAbsPath, long size) {
|
||||
super(localAbsPath);
|
||||
this.size = size;
|
||||
public void close() throws IOException {
|
||||
try(EncodedFileOutputStream out = output) {
|
||||
out.flush();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public long getSize() {
|
||||
return this.size;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int write(byte[] bytes) throws SevenZipException {
|
||||
try {
|
||||
getOutput().write(bytes);
|
||||
} catch (IOException ex) {
|
||||
throw new SevenZipException(
|
||||
NbBundle.getMessage(SevenZipExtractor.class, "EmbeddedFileExtractorIngestModule.ArchiveExtractor.UnpackStream.write.exception.msg",
|
||||
getLocalAbsPath()), ex);
|
||||
}
|
||||
return bytes.length;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@ -975,7 +890,6 @@ class SevenZipExtractor {
|
||||
private final ProgressHandle progressHandle;
|
||||
|
||||
private int inArchiveItemIndex;
|
||||
private final long freeDiskSpace;
|
||||
|
||||
private long createTimeInSeconds;
|
||||
private long modTimeInSeconds;
|
||||
@ -992,7 +906,6 @@ class SevenZipExtractor {
|
||||
String password, long freeDiskSpace) {
|
||||
|
||||
this.inArchive = inArchive;
|
||||
this.freeDiskSpace = freeDiskSpace;
|
||||
this.progressHandle = progressHandle;
|
||||
this.archiveFile = archiveFile;
|
||||
this.archiveDetailsMap = archiveDetailsMap;
|
||||
@ -1000,15 +913,17 @@ class SevenZipExtractor {
|
||||
}
|
||||
|
||||
/**
|
||||
* Get stream is called by the internal framework as it traverses
|
||||
* the archive structure. The ISequentialOutStream is where the
|
||||
* archive file contents will be expanded and written to the local disk.
|
||||
* Get stream is called by the internal framework as it traverses the
|
||||
* archive structure. The ISequentialOutStream is where the archive file
|
||||
* contents will be expanded and written to the local disk.
|
||||
*
|
||||
* Skips folders, as there is nothing to extract.
|
||||
*
|
||||
* @param inArchiveItemIndex current location of the
|
||||
* @param mode Will always be EXTRACT
|
||||
* @param mode Will always be EXTRACT
|
||||
*
|
||||
* @return
|
||||
*
|
||||
* @throws SevenZipException
|
||||
*/
|
||||
@Override
|
||||
@ -1023,17 +938,24 @@ class SevenZipExtractor {
|
||||
return null;
|
||||
}
|
||||
|
||||
final Long archiveItemSize = (Long) inArchive.getProperty(
|
||||
inArchiveItemIndex, PropID.SIZE);
|
||||
final String localAbsPath = archiveDetailsMap.get(
|
||||
inArchiveItemIndex).getLocalAbsPath();
|
||||
|
||||
if (archiveItemSize != null) {
|
||||
unpackStream = new SevenZipExtractor.KnownSizeUnpackStream(
|
||||
localAbsPath, archiveItemSize);
|
||||
} else {
|
||||
unpackStream = new SevenZipExtractor.UnknownSizeUnpackStream(
|
||||
localAbsPath, freeDiskSpace);
|
||||
//If the Unpackstream has been allocated, then set the Outputstream
|
||||
//to another file rather than creating a new unpack stream. The 7Zip
|
||||
//binding has a memory leak, so creating new unpack streams will not be
|
||||
//dereferenced. As a fix, we create one UnpackStream, and mutate its state,
|
||||
//so that there only exists one 8192 byte buffer in memory per archive.
|
||||
try {
|
||||
if (unpackStream != null) {
|
||||
unpackStream.setNewOutputStream(localAbsPath);
|
||||
} else {
|
||||
unpackStream = new UnpackStream(localAbsPath);
|
||||
}
|
||||
} catch (IOException ex) {
|
||||
logger.log(Level.WARNING, String.format("Error opening or setting new stream " //NON-NLS
|
||||
+ "for archive file at %s", localAbsPath), ex.getMessage()); //NON-NLS
|
||||
return null;
|
||||
}
|
||||
|
||||
return unpackStream;
|
||||
@ -1044,6 +966,7 @@ class SevenZipExtractor {
|
||||
* Called after getStream.
|
||||
*
|
||||
* @param mode Will always be EXTRACT.
|
||||
*
|
||||
* @throws SevenZipException
|
||||
*/
|
||||
@Override
|
||||
@ -1103,7 +1026,11 @@ class SevenZipExtractor {
|
||||
!(Boolean) inArchive.getProperty(inArchiveItemIndex, PropID.IS_FOLDER),
|
||||
0L, createTimeInSeconds, accessTimeInSeconds, modTimeInSeconds, localRelPath);
|
||||
|
||||
unpackStream.close();
|
||||
try {
|
||||
unpackStream.close();
|
||||
} catch (IOException e) {
|
||||
logger.log(Level.WARNING, "Error closing unpack stream for file: {0}", localAbsPath); //NON-NLS
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -1214,9 +1141,9 @@ class SevenZipExtractor {
|
||||
*/
|
||||
List<AbstractFile> getRootFileObjects() {
|
||||
List<AbstractFile> ret = new ArrayList<>();
|
||||
for (UnpackedNode child : rootNode.getChildren()) {
|
||||
rootNode.getChildren().forEach((child) -> {
|
||||
ret.add(child.getFile());
|
||||
}
|
||||
});
|
||||
return ret;
|
||||
}
|
||||
|
||||
@ -1228,17 +1155,17 @@ class SevenZipExtractor {
|
||||
*/
|
||||
List<AbstractFile> getAllFileObjects() {
|
||||
List<AbstractFile> ret = new ArrayList<>();
|
||||
for (UnpackedNode child : rootNode.getChildren()) {
|
||||
rootNode.getChildren().forEach((child) -> {
|
||||
getAllFileObjectsRec(ret, child);
|
||||
}
|
||||
});
|
||||
return ret;
|
||||
}
|
||||
|
||||
private void getAllFileObjectsRec(List<AbstractFile> list, UnpackedNode parent) {
|
||||
list.add(parent.getFile());
|
||||
for (UnpackedNode child : parent.getChildren()) {
|
||||
parent.getChildren().forEach((child) -> {
|
||||
getAllFileObjectsRec(list, child);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -91,7 +91,7 @@ HashDbImportDatabaseDialog.errorMessage.failedToOpenHashDbMsg=Failed to open has
|
||||
HashLookupModuleFactory.moduleName.text=Hash Lookup
|
||||
HashLookupModuleFactory.moduleDescription.text=Identifies known and notable files using supplied hash sets, such as a standard NSRL hash set.
|
||||
HashDbIngestModule.fileReadErrorMsg=Read Error\: {0}
|
||||
HashDbIngestModule.calcHashValueErr=Error encountered while calculating the hash value for {0}.
|
||||
HashDbIngestModule.calcHashValueErr=Error encountered while calculating the hash value for {0} ({1}).
|
||||
HashDbIngestModule.hashLookupErrorMsg=Hash Lookup Error\: {0}
|
||||
HashDbIngestModule.settingKnownBadStateErr=Error encountered while setting notable state for {0}.
|
||||
HashDbIngestModule.lookingUpKnownBadHashValueErr=Error encountered while looking up notable hash value for {0}.
|
||||
|
@ -229,7 +229,9 @@ public class HashDbIngestModule implements FileIngestModule {
|
||||
services.postMessage(IngestMessage.createErrorMessage(
|
||||
HashLookupModuleFactory.getModuleName(),
|
||||
NbBundle.getMessage(this.getClass(), "HashDbIngestModule.fileReadErrorMsg", name),
|
||||
NbBundle.getMessage(this.getClass(), "HashDbIngestModule.calcHashValueErr", name)));
|
||||
NbBundle.getMessage(this.getClass(), "HashDbIngestModule.calcHashValueErr",
|
||||
file.getParentPath() + file.getName(),
|
||||
file.isMetaFlagSet(TskData.TSK_FS_META_FLAG_ENUM.ALLOC)?"Allocated File" : "Deleted File")));
|
||||
return ProcessResult.ERROR;
|
||||
}
|
||||
}
|
||||
|
@ -471,7 +471,7 @@ final class FilesSetRulePanel extends javax.swing.JPanel {
|
||||
if (!this.nameTextField.getText().isEmpty()) {
|
||||
if (this.nameRegexCheckbox.isSelected()) {
|
||||
try {
|
||||
Pattern pattern = Pattern.compile(this.nameTextField.getText());
|
||||
Pattern pattern = Pattern.compile(this.nameTextField.getText(), Pattern.CASE_INSENSITIVE);
|
||||
if (this.fullNameRadioButton.isSelected()) {
|
||||
condition = new FilesSet.Rule.FullNameCondition(pattern);
|
||||
} else {
|
||||
@ -556,7 +556,7 @@ final class FilesSetRulePanel extends javax.swing.JPanel {
|
||||
if (!this.pathTextField.getText().isEmpty()) {
|
||||
if (this.pathRegexCheckBox.isSelected()) {
|
||||
try {
|
||||
condition = new FilesSet.Rule.ParentPathCondition(Pattern.compile(this.pathTextField.getText()));
|
||||
condition = new FilesSet.Rule.ParentPathCondition(Pattern.compile(this.pathTextField.getText(), Pattern.CASE_INSENSITIVE));
|
||||
} catch (PatternSyntaxException ex) {
|
||||
logger.log(Level.SEVERE, "Attempt to get malformed path condition", ex); // NON-NLS
|
||||
throw new IllegalStateException("The files set rule panel path condition is not in a valid state"); // NON-NLS
|
||||
|
@ -271,6 +271,9 @@ class ReportHTML implements TableReportModule {
|
||||
case TSK_ACCOUNT:
|
||||
in = getClass().getResourceAsStream("/org/sleuthkit/autopsy/report/images/accounts.png"); //NON-NLS
|
||||
break;
|
||||
case TSK_WIFI_NETWORK:
|
||||
in = getClass().getResourceAsStream("/org/sleuthkit/autopsy/report/images/network-wifi.png"); //NON-NLS
|
||||
break;
|
||||
default:
|
||||
logger.log(Level.WARNING, "useDataTypeIcon: unhandled artifact type = {0}", dataType); //NON-NLS
|
||||
in = getClass().getResourceAsStream("/org/sleuthkit/autopsy/report/images/star.png"); //NON-NLS
|
||||
|
@ -41,6 +41,7 @@ import org.sleuthkit.autopsy.coreutils.Logger;
|
||||
import org.sleuthkit.autopsy.datasourceprocessors.AutoIngestDataSourceProcessor;
|
||||
import org.sleuthkit.autopsy.coreutils.TimeStampUtils;
|
||||
import org.sleuthkit.datamodel.Content;
|
||||
import org.sleuthkit.datamodel.DataSource;
|
||||
|
||||
/*
|
||||
* A runnable that adds an archive data source as well as data sources contained
|
||||
@ -195,10 +196,19 @@ class AddArchiveTask implements Runnable {
|
||||
continue;
|
||||
}
|
||||
|
||||
// if we are here it means the data source was addedd successfully
|
||||
// if we are here it means the data source was added successfully
|
||||
success = true;
|
||||
newDataSources.addAll(internalDataSource.getContent());
|
||||
|
||||
// Update the names for all new data sources to be the root archive plus the name of the data source
|
||||
for (Content c:internalDataSource.getContent()) {
|
||||
if (c instanceof DataSource) {
|
||||
DataSource ds = (DataSource) c;
|
||||
String newName = Paths.get(archivePath).getFileName() + "/" + ds.getName();
|
||||
ds.setDisplayName(newName);
|
||||
}
|
||||
}
|
||||
|
||||
// skip all other DSPs for this data source
|
||||
break;
|
||||
}
|
||||
|
@ -45,6 +45,7 @@ import org.sleuthkit.autopsy.coordinationservice.CoordinationService;
|
||||
import org.sleuthkit.autopsy.coordinationservice.CoordinationService.CoordinationServiceException;
|
||||
import org.sleuthkit.autopsy.coreutils.Logger;
|
||||
import org.sleuthkit.autopsy.coreutils.NetworkUtils;
|
||||
import org.sleuthkit.autopsy.coreutils.StopWatch;
|
||||
import org.sleuthkit.autopsy.events.AutopsyEventException;
|
||||
import org.sleuthkit.autopsy.events.AutopsyEventPublisher;
|
||||
import org.sleuthkit.autopsy.experimental.autoingest.AutoIngestJob.ProcessingStatus;
|
||||
@ -666,43 +667,73 @@ final class AutoIngestMonitor extends Observable implements PropertyChangeListen
|
||||
* @return A result code indicating success, partial success, or failure.
|
||||
*/
|
||||
CaseDeletionResult deleteCase(AutoIngestJob job) {
|
||||
String caseName = job.getManifest().getCaseName();
|
||||
Path caseDirectoryPath = job.getCaseDirectoryPath();
|
||||
Path metadataFilePath = caseDirectoryPath.resolve(caseName + CaseMetadata.getFileExtension());
|
||||
StopWatch stopWatch = new StopWatch();
|
||||
stopWatch.start();
|
||||
synchronized (jobsLock) {
|
||||
String caseName = job.getManifest().getCaseName();
|
||||
Path metadataFilePath = job.getCaseDirectoryPath().resolve(caseName + CaseMetadata.getFileExtension());
|
||||
|
||||
stopWatch.stop();
|
||||
LOGGER.log(Level.INFO, String.format("Used %d s to acquire jobsLock (Java monitor in AutoIngestMonitor class) for case %s at %s", stopWatch.getElapsedTimeSecs(), caseName, caseDirectoryPath));
|
||||
stopWatch.reset();
|
||||
stopWatch.start();
|
||||
try {
|
||||
CaseMetadata metadata = new CaseMetadata(metadataFilePath);
|
||||
stopWatch.stop();
|
||||
LOGGER.log(Level.INFO, String.format("Used %d s to read case metadata for case %s at %s", stopWatch.getElapsedTimeSecs(), caseName, caseDirectoryPath));
|
||||
stopWatch.reset();
|
||||
stopWatch.start();
|
||||
Case.deleteCase(metadata);
|
||||
|
||||
} catch (CaseMetadata.CaseMetadataException ex) {
|
||||
LOGGER.log(Level.SEVERE, String.format("Failed to get case metadata file %s for case %s at %s", metadataFilePath.toString(), caseName, job.getCaseDirectoryPath().toString()), ex);
|
||||
LOGGER.log(Level.SEVERE, String.format("Failed to read case metadata file %s for case %s at %s", metadataFilePath, caseName, caseDirectoryPath), ex);
|
||||
stopWatch.stop();
|
||||
LOGGER.log(Level.INFO, String.format("Used %d s to fail to read case metadata file %s for case %s at %s", stopWatch.getElapsedTimeSecs(), metadataFilePath, caseName, caseDirectoryPath));
|
||||
return CaseDeletionResult.FAILED;
|
||||
} catch (CaseActionException ex) {
|
||||
LOGGER.log(Level.SEVERE, String.format("Failed to physically delete case %s at %s", caseName, job.getCaseDirectoryPath().toString()), ex);
|
||||
LOGGER.log(Level.SEVERE, String.format("Failed to delete case %s at %s", caseName, caseDirectoryPath), ex);
|
||||
return CaseDeletionResult.FAILED;
|
||||
}
|
||||
|
||||
// Update the state of completed jobs associated with this case to indicate
|
||||
// that the case has been deleted
|
||||
for (AutoIngestJob completedJob : getCompletedJobs()) {
|
||||
stopWatch.reset();
|
||||
stopWatch.start();
|
||||
List<AutoIngestJob> completedJobs = getCompletedJobs();
|
||||
stopWatch.stop();
|
||||
LOGGER.log(Level.INFO, String.format("Used %d s to get completed jobs listing for case %s at %s", stopWatch.getElapsedTimeSecs(), caseName, caseDirectoryPath));
|
||||
stopWatch.reset();
|
||||
stopWatch.start();
|
||||
for (AutoIngestJob completedJob : completedJobs) {
|
||||
if (caseName.equals(completedJob.getManifest().getCaseName())) {
|
||||
try {
|
||||
completedJob.setProcessingStatus(DELETED);
|
||||
AutoIngestJobNodeData nodeData = new AutoIngestJobNodeData(completedJob);
|
||||
coordinationService.setNodeData(CoordinationService.CategoryNode.MANIFESTS, completedJob.getManifest().getFilePath().toString(), nodeData.toArray());
|
||||
} catch (CoordinationServiceException | InterruptedException ex) {
|
||||
LOGGER.log(Level.SEVERE, String.format("Failed to update completed job node data for %s when deleting case %s", completedJob.getManifest().getFilePath().toString(), caseName), ex);
|
||||
LOGGER.log(Level.SEVERE, String.format("Failed to update completed job node data for %s when deleting case %s at %s", completedJob.getManifest().getFilePath(), caseName, caseDirectoryPath), ex);
|
||||
stopWatch.stop();
|
||||
LOGGER.log(Level.INFO, String.format("Used %d s to fail to update job node data for completed jobs for case %s at %s", stopWatch.getElapsedTimeSecs(), caseName, caseDirectoryPath));
|
||||
return CaseDeletionResult.PARTIALLY_DELETED;
|
||||
}
|
||||
}
|
||||
}
|
||||
stopWatch.stop();
|
||||
LOGGER.log(Level.INFO, String.format("Used %d s to update job node data for completed jobs for case %s at %s", stopWatch.getElapsedTimeSecs(), caseName, caseDirectoryPath));
|
||||
|
||||
// Remove jobs associated with this case from the completed jobs collection.
|
||||
jobsSnapshot.completedJobs.removeIf((AutoIngestJob completedJob)
|
||||
stopWatch.reset();
|
||||
stopWatch.start();
|
||||
completedJobs.removeIf((AutoIngestJob completedJob)
|
||||
-> completedJob.getManifest().getCaseName().equals(caseName));
|
||||
stopWatch.stop();
|
||||
LOGGER.log(Level.INFO, String.format("Used %d s to remove completed jobs for case %s at %s from current jobs snapshot", stopWatch.getElapsedTimeSecs(), caseName, caseDirectoryPath));
|
||||
|
||||
// Publish a message to update auto ingest nodes.
|
||||
stopWatch.reset();
|
||||
stopWatch.start();
|
||||
eventPublisher.publishRemotely(new AutoIngestCaseDeletedEvent(caseName, LOCAL_HOST_NAME, AutoIngestManager.getSystemUserNameProperty()));
|
||||
stopWatch.stop();
|
||||
LOGGER.log(Level.INFO, String.format("Used %d s to publish job deletion event for case %s at %s", stopWatch.getElapsedTimeSecs(), caseName,caseDirectoryPath));
|
||||
}
|
||||
|
||||
return CaseDeletionResult.FULLY_DELETED;
|
||||
|
@ -254,6 +254,7 @@ public final class ImageGalleryController {
|
||||
|
||||
/**
|
||||
* Should the "forward" button on the history be enabled?
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public ReadOnlyBooleanProperty getCanAdvance() {
|
||||
@ -262,6 +263,7 @@ public final class ImageGalleryController {
|
||||
|
||||
/**
|
||||
* Should the "Back" button on the history be enabled?
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public ReadOnlyBooleanProperty getCanRetreat() {
|
||||
@ -269,10 +271,9 @@ public final class ImageGalleryController {
|
||||
}
|
||||
|
||||
/**
|
||||
* Display the passed in group. Causes this group to
|
||||
* get recorded in the history queue and observers of the
|
||||
* current state will be notified and update their panels/widgets
|
||||
* appropriately.
|
||||
* Display the passed in group. Causes this group to get recorded in the
|
||||
* history queue and observers of the current state will be notified and
|
||||
* update their panels/widgets appropriately.
|
||||
*
|
||||
* @param newState
|
||||
*/
|
||||
@ -283,6 +284,7 @@ public final class ImageGalleryController {
|
||||
|
||||
/**
|
||||
* Display the next group in the "forward" history stack
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public GroupViewState advance() {
|
||||
@ -291,6 +293,7 @@ public final class ImageGalleryController {
|
||||
|
||||
/**
|
||||
* Display the previous group in the "back" history stack
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public GroupViewState retreat() {
|
||||
@ -500,10 +503,6 @@ public final class ImageGalleryController {
|
||||
return drawableDB.getFileFromID(fileID);
|
||||
}
|
||||
|
||||
public ReadOnlyDoubleProperty regroupProgress() {
|
||||
return groupManager.regroupProgress();
|
||||
}
|
||||
|
||||
public HashSetManager getHashSetManager() {
|
||||
return hashSetManager;
|
||||
}
|
||||
@ -764,8 +763,9 @@ public final class ImageGalleryController {
|
||||
//do in transaction
|
||||
drawableDbTransaction = taskDB.beginTransaction();
|
||||
|
||||
/* We are going to periodically commit the CaseDB transaction and sleep so
|
||||
* that the user can have Autopsy do other stuff while these bulk tasks are ongoing.
|
||||
/* We are going to periodically commit the CaseDB transaction
|
||||
* and sleep so that the user can have Autopsy do other stuff
|
||||
* while these bulk tasks are ongoing.
|
||||
*/
|
||||
int caseDbCounter = 0;
|
||||
for (final AbstractFile f : files) {
|
||||
@ -812,7 +812,7 @@ public final class ImageGalleryController {
|
||||
taskDB.commitTransaction(drawableDbTransaction, true);
|
||||
drawableDbTransaction = null;
|
||||
|
||||
} catch (TskCoreException | InterruptedException ex) {
|
||||
} catch (TskCoreException | InterruptedException ex) {
|
||||
progressHandle.progress(Bundle.BulkTask_stopCopy_status());
|
||||
logger.log(Level.WARNING, "Stopping copy to drawable db task. Failed to transfer all database contents", ex); //NON-NLS
|
||||
MessageNotifyUtil.Notify.warn(Bundle.BulkTask_errPopulating_errMsg(), ex.getMessage());
|
||||
|
@ -211,7 +211,7 @@ public class ImageGalleryModule {
|
||||
}
|
||||
}
|
||||
else if (IngestManager.IngestModuleEvent.valueOf(evt.getPropertyName()) == DATA_ADDED) {
|
||||
ModuleDataEvent mde = (ModuleDataEvent)evt.getOldValue();
|
||||
ModuleDataEvent mde = (ModuleDataEvent) evt.getOldValue();
|
||||
|
||||
if (mde.getBlackboardArtifactType().getTypeID() == ARTIFACT_TYPE.TSK_METADATA_EXIF.getTypeID()) {
|
||||
DrawableDB drawableDB = controller.getDatabase();
|
||||
|
@ -68,13 +68,6 @@ public class DrawableGroup implements Comparable<DrawableGroup> {
|
||||
DrawableGroup(GroupKey<?> groupKey, Set<Long> filesInGroup, boolean seen) {
|
||||
this.groupKey = groupKey;
|
||||
this.fileIDs.setAll(filesInGroup);
|
||||
fileIDs.addListener((ListChangeListener.Change<? extends Long> listchange) -> {
|
||||
boolean seenChanged = false;
|
||||
while (false == seenChanged && listchange.next()) {
|
||||
seenChanged |= listchange.wasAdded();
|
||||
}
|
||||
invalidateProperties(seenChanged);
|
||||
});
|
||||
this.seen.set(seen);
|
||||
}
|
||||
|
||||
@ -183,15 +176,21 @@ public class DrawableGroup implements Comparable<DrawableGroup> {
|
||||
if (fileIDs.contains(f) == false) {
|
||||
fileIDs.add(f);
|
||||
}
|
||||
// invalidate no matter what because the file could have new hash hits, etc.
|
||||
invalidateProperties(true);
|
||||
}
|
||||
|
||||
synchronized void setFiles(Set<? extends Long> newFileIds) {
|
||||
fileIDs.removeIf(fileID -> newFileIds.contains(fileID) == false);
|
||||
invalidateProperties(false);
|
||||
newFileIds.stream().forEach(this::addFile);
|
||||
}
|
||||
|
||||
synchronized void removeFile(Long f) {
|
||||
fileIDs.removeAll(f);
|
||||
if (fileIDs.contains(f)) {
|
||||
fileIDs.removeAll(f);
|
||||
invalidateProperties(false);
|
||||
}
|
||||
}
|
||||
|
||||
private void invalidateProperties(boolean seenChanged) {
|
||||
|
@ -50,6 +50,7 @@ import javafx.beans.property.ReadOnlyBooleanWrapper;
|
||||
import javafx.beans.property.ReadOnlyDoubleProperty;
|
||||
import javafx.beans.property.ReadOnlyObjectProperty;
|
||||
import javafx.beans.property.ReadOnlyObjectWrapper;
|
||||
import javafx.beans.property.ReadOnlyStringProperty;
|
||||
import javafx.collections.FXCollections;
|
||||
import javafx.collections.ObservableList;
|
||||
import javafx.concurrent.Service;
|
||||
@ -278,8 +279,8 @@ public class GroupManager {
|
||||
}
|
||||
|
||||
/**
|
||||
* Update unseenGroups list accordingly based on the current status
|
||||
* of 'group'. Removes it if it is seen or adds it if it is unseen.
|
||||
* Update unseenGroups list accordingly based on the current status of
|
||||
* 'group'. Removes it if it is seen or adds it if it is unseen.
|
||||
*
|
||||
* @param group
|
||||
*/
|
||||
@ -505,6 +506,10 @@ public class GroupManager {
|
||||
return regrouper.progressProperty();
|
||||
}
|
||||
|
||||
public ReadOnlyStringProperty regroupMessage() {
|
||||
return regrouper.messageProperty();
|
||||
}
|
||||
|
||||
@Subscribe
|
||||
synchronized public void handleTagAdded(ContentTagAddedEvent evt) {
|
||||
GroupKey<?> newGroupKey = null;
|
||||
@ -730,12 +735,7 @@ public class GroupManager {
|
||||
*/
|
||||
@SuppressWarnings({"unchecked", "rawtypes"})
|
||||
@NbBundle.Messages({"# {0} - groupBy attribute Name",
|
||||
"# {1} - sortBy name",
|
||||
"# {2} - sort Order",
|
||||
"ReGroupTask.displayTitle=regrouping files by {0} sorted by {1} in {2} order",
|
||||
"# {0} - groupBy attribute Name",
|
||||
"# {1} - atribute value",
|
||||
"ReGroupTask.progressUpdate=regrouping files by {0} : {1}"})
|
||||
"ReGroupTask.displayTitle=regrouping by {0}: " })
|
||||
class ReGroupTask<AttrValType extends Comparable<AttrValType>> extends LoggedTask<Void> {
|
||||
|
||||
private final DataSource dataSource;
|
||||
@ -743,16 +743,14 @@ public class GroupManager {
|
||||
private final GroupSortBy sortBy;
|
||||
private final SortOrder sortOrder;
|
||||
|
||||
private final ProgressHandle groupProgress;
|
||||
|
||||
ReGroupTask(DataSource dataSource, DrawableAttribute<AttrValType> groupBy, GroupSortBy sortBy, SortOrder sortOrder) {
|
||||
super(Bundle.ReGroupTask_displayTitle(groupBy.attrName.toString(), sortBy.getDisplayName(), sortOrder.toString()), true);
|
||||
super(Bundle.ReGroupTask_displayTitle(groupBy.attrName.toString() ), true);
|
||||
this.dataSource = dataSource;
|
||||
this.groupBy = groupBy;
|
||||
this.sortBy = sortBy;
|
||||
this.sortOrder = sortOrder;
|
||||
|
||||
groupProgress = ProgressHandle.createHandle(Bundle.ReGroupTask_displayTitle(groupBy.attrName.toString(), sortBy.getDisplayName(), sortOrder.toString()), this);
|
||||
updateTitle(Bundle.ReGroupTask_displayTitle(groupBy.attrName.toString() ));
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -761,7 +759,8 @@ public class GroupManager {
|
||||
if (isCancelled()) {
|
||||
return null;
|
||||
}
|
||||
groupProgress.start();
|
||||
|
||||
updateProgress(-1, 1);
|
||||
|
||||
analyzedGroups.clear();
|
||||
unSeenGroups.clear();
|
||||
@ -769,7 +768,7 @@ public class GroupManager {
|
||||
// Get the list of group keys
|
||||
Multimap<DataSource, AttrValType> valsByDataSource = findValuesForAttribute();
|
||||
|
||||
groupProgress.switchToDeterminate(valsByDataSource.entries().size());
|
||||
updateProgress(0, valsByDataSource.entries().size());
|
||||
int p = 0;
|
||||
// For each key value, partially create the group and add it to the list.
|
||||
for (final Map.Entry<DataSource, AttrValType> valForDataSource : valsByDataSource.entries()) {
|
||||
@ -777,9 +776,8 @@ public class GroupManager {
|
||||
return null;
|
||||
}
|
||||
p++;
|
||||
updateMessage(Bundle.ReGroupTask_progressUpdate(groupBy.attrName.toString(), valForDataSource.getValue()));
|
||||
updateMessage(Bundle.ReGroupTask_displayTitle(groupBy.attrName.toString()) + valForDataSource.getValue());
|
||||
updateProgress(p, valsByDataSource.size());
|
||||
groupProgress.progress(Bundle.ReGroupTask_progressUpdate(groupBy.attrName.toString(), valForDataSource), p);
|
||||
popuplateIfAnalyzed(new GroupKey<>(groupBy, valForDataSource.getValue(), valForDataSource.getKey()), this);
|
||||
}
|
||||
|
||||
@ -808,8 +806,8 @@ public class GroupManager {
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
groupProgress.finish();
|
||||
updateProgress(1, 1);
|
||||
updateMessage("");
|
||||
}
|
||||
return null;
|
||||
}
|
||||
@ -827,12 +825,9 @@ public class GroupManager {
|
||||
}
|
||||
|
||||
/**
|
||||
* find the distinct values for the given column (DrawableAttribute)
|
||||
*
|
||||
* Find the distinct values for the given column (DrawableAttribute).
|
||||
* These values represent the groups of files.
|
||||
*
|
||||
* @param groupBy
|
||||
*
|
||||
* @return map of data source (or null if group by attribute ignores
|
||||
* data sources) to list of unique group values
|
||||
*/
|
||||
|
@ -10,12 +10,21 @@
|
||||
<?import javafx.scene.layout.HBox?>
|
||||
<?import javafx.scene.layout.StackPane?>
|
||||
|
||||
<fx:root id="AnchorPane" maxHeight="-Infinity" maxWidth="1.7976931348623157E308" minHeight="-Infinity" minWidth="-Infinity" prefHeight="-1.0" prefWidth="-1.0" type="javafx.scene.layout.AnchorPane" xmlns="http://javafx.com/javafx/8.0.65" xmlns:fx="http://javafx.com/fxml/1">
|
||||
<fx:root id="AnchorPane" maxHeight="-Infinity" maxWidth="1.7976931348623157E308" minHeight="-Infinity" minWidth="-Infinity" prefHeight="-1.0" prefWidth="-1.0" type="javafx.scene.layout.AnchorPane" xmlns="http://javafx.com/javafx/8.0.141" xmlns:fx="http://javafx.com/fxml/1">
|
||||
<children>
|
||||
<BorderPane minHeight="-Infinity" minWidth="-Infinity" prefHeight="-1.0" prefWidth="-1.0" AnchorPane.bottomAnchor="0.0" AnchorPane.leftAnchor="0.0" AnchorPane.rightAnchor="0.0" AnchorPane.topAnchor="0.0">
|
||||
<right>
|
||||
<HBox alignment="CENTER_RIGHT" prefHeight="-1.0" prefWidth="-1.0" spacing="5.0" BorderPane.alignment="CENTER_RIGHT">
|
||||
<children>
|
||||
<Label fx:id="staleLabel" text="Some data may be out of date. Enable listening to ingest to update.">
|
||||
<graphic>
|
||||
<ImageView fitHeight="16.0" fitWidth="16.0" pickOnBounds="true" preserveRatio="true">
|
||||

|
||||
</ImageView>
|
||||
</graphic>
|
||||
</Label>
|
||||
<StackPane maxHeight="-Infinity" maxWidth="-Infinity" minHeight="-Infinity" minWidth="-Infinity" prefHeight="-1.0" prefWidth="-1.0" HBox.hgrow="NEVER">
|
||||
<children>
|
||||
<ProgressBar id="progBar" fx:id="fileTaskProgresBar" focusTraversable="false" maxHeight="-1.0" maxWidth="1.7976931348623157E308" minHeight="-Infinity" minWidth="-1.0" prefHeight="24.0" prefWidth="-1.0" progress="0.0" visible="true" />
|
||||
@ -31,37 +40,27 @@
|
||||
<Insets />
|
||||
</HBox.margin>
|
||||
</StackPane>
|
||||
<StackPane maxHeight="-Infinity" maxWidth="-Infinity" minHeight="-Infinity" minWidth="-Infinity" prefHeight="-1.0" prefWidth="-1.0" HBox.hgrow="NEVER">
|
||||
<children>
|
||||
<ProgressBar fx:id="bgTaskProgressBar" maxHeight="-1.0" maxWidth="-1.0" minHeight="-Infinity" minWidth="-1.0" prefHeight="24.0" prefWidth="-1.0" progress="0.0" StackPane.alignment="CENTER" />
|
||||
<Label fx:id="bgTaskLabel" alignment="CENTER" cache="false" contentDisplay="CENTER" disable="false" focusTraversable="false" maxHeight="-Infinity" maxWidth="-Infinity" minHeight="-Infinity" minWidth="-Infinity" text="" StackPane.alignment="CENTER">
|
||||
<StackPane.margin>
|
||||
<Insets left="3.0" right="3.0" />
|
||||
</StackPane.margin>
|
||||
<padding>
|
||||
<Insets bottom="3.0" left="3.0" right="3.0" top="3.0" />
|
||||
</padding></Label>
|
||||
</children>
|
||||
<HBox.margin>
|
||||
<Insets right="5.0" />
|
||||
</HBox.margin>
|
||||
</StackPane>
|
||||
</children>
|
||||
<BorderPane.margin>
|
||||
<Insets left="10.0" />
|
||||
</BorderPane.margin>
|
||||
</HBox>
|
||||
</right>
|
||||
<left><Label fx:id="staleLabel" text="Some data may be out of date. Enable listening to ingest to update." BorderPane.alignment="CENTER">
|
||||
<graphic><ImageView fitHeight="16.0" fitWidth="16.0" pickOnBounds="true" preserveRatio="true">
|
||||
</ImageView>
|
||||
</graphic>
|
||||
<BorderPane.margin>
|
||||
<Insets bottom="5.0" left="5.0" right="10.0" top="5.0" />
|
||||
</BorderPane.margin></Label>
|
||||
</left>
|
||||
<left>
|
||||
<StackPane maxHeight="-Infinity" maxWidth="-Infinity" minHeight="-Infinity" minWidth="-Infinity" prefHeight="-1.0" prefWidth="-1.0" BorderPane.alignment="CENTER">
|
||||
<children>
|
||||
<ProgressBar fx:id="regroupProgressBar" maxHeight="-1.0" maxWidth="-Infinity" minHeight="-Infinity" minWidth="-1.0" prefHeight="24.0" prefWidth="500.0" progress="0.0" StackPane.alignment="CENTER_LEFT" />
|
||||
<Label fx:id="regroupLabel" cache="false" contentDisplay="CENTER" disable="false" focusTraversable="false" maxHeight="-Infinity" maxWidth="-Infinity" minHeight="-Infinity" minWidth="-Infinity" prefWidth="500.0" text="" textOverrun="CENTER_ELLIPSIS" StackPane.alignment="CENTER_LEFT">
|
||||
<StackPane.margin>
|
||||
<Insets left="3.0" right="3.0" />
|
||||
</StackPane.margin>
|
||||
<padding>
|
||||
<Insets bottom="3.0" left="3.0" right="3.0" top="3.0" />
|
||||
</padding>
|
||||
</Label>
|
||||
</children>
|
||||
</StackPane>
|
||||
</left>
|
||||
</BorderPane>
|
||||
</children>
|
||||
</fx:root>
|
||||
|
@ -28,28 +28,26 @@ import javafx.scene.control.Tooltip;
|
||||
import javafx.scene.layout.AnchorPane;
|
||||
import org.openide.util.NbBundle;
|
||||
import org.sleuthkit.autopsy.imagegallery.ImageGalleryController;
|
||||
import org.sleuthkit.autopsy.imagegallery.datamodel.grouping.GroupManager;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public class StatusBar extends AnchorPane {
|
||||
|
||||
private final ImageGalleryController controller;
|
||||
|
||||
@FXML
|
||||
private ProgressBar fileTaskProgresBar;
|
||||
|
||||
@FXML
|
||||
private Label fileUpdateTaskLabel;
|
||||
|
||||
@FXML
|
||||
private Label bgTaskLabel;
|
||||
|
||||
private Label regroupLabel;
|
||||
@FXML
|
||||
private Label staleLabel;
|
||||
|
||||
@FXML
|
||||
private ProgressBar bgTaskProgressBar;
|
||||
private ProgressBar regroupProgressBar;
|
||||
|
||||
private final ImageGalleryController controller;
|
||||
private final GroupManager groupManager;
|
||||
|
||||
@FXML
|
||||
@NbBundle.Messages({"StatusBar.fileUpdateTaskLabel.text= File Update Tasks",
|
||||
@ -58,23 +56,25 @@ public class StatusBar extends AnchorPane {
|
||||
void initialize() {
|
||||
assert fileTaskProgresBar != null : "fx:id=\"fileTaskProgresBar\" was not injected: check your FXML file 'StatusBar.fxml'.";
|
||||
assert fileUpdateTaskLabel != null : "fx:id=\"fileUpdateTaskLabel\" was not injected: check your FXML file 'StatusBar.fxml'.";
|
||||
assert bgTaskLabel != null : "fx:id=\"bgTaskLabel\" was not injected: check your FXML file 'StatusBar.fxml'.";
|
||||
assert bgTaskProgressBar != null : "fx:id=\"bgTaskProgressBar\" was not injected: check your FXML file 'StatusBar.fxml'.";
|
||||
assert regroupLabel != null : "fx:id=\"regroupLabel\" was not injected: check your FXML file 'StatusBar.fxml'.";
|
||||
assert regroupProgressBar != null : "fx:id=\"regroupProgressBar\" was not injected: check your FXML file 'StatusBar.fxml'.";
|
||||
|
||||
fileUpdateTaskLabel.textProperty().bind(controller.getDBTasksQueueSizeProperty().asString().concat(Bundle.StatusBar_fileUpdateTaskLabel_text()));
|
||||
fileTaskProgresBar.progressProperty().bind(controller.getDBTasksQueueSizeProperty().negate());
|
||||
|
||||
controller.regroupProgress().addListener((ov, oldSize, newSize) -> {
|
||||
groupManager.regroupProgress().addListener((ov, oldSize, newSize) -> {
|
||||
Platform.runLater(() -> {
|
||||
if (controller.regroupProgress().lessThan(1.0).get()) {
|
||||
if (groupManager.regroupProgress().lessThan(1.0).get()) {
|
||||
// Regrouping in progress
|
||||
bgTaskProgressBar.progressProperty().setValue(-1.0);
|
||||
bgTaskLabel.setText(Bundle.StatusBar_bgTaskLabel_text());
|
||||
regroupProgressBar.progressProperty().setValue(groupManager.regroupProgress().doubleValue());
|
||||
regroupLabel.setText(groupManager.regroupMessage().get());
|
||||
|
||||
} else {
|
||||
// Clear the progress bar
|
||||
bgTaskProgressBar.progressProperty().setValue(0.0);
|
||||
bgTaskLabel.setText("");
|
||||
regroupProgressBar.progressProperty().setValue(0.0);
|
||||
regroupLabel.setText("");
|
||||
}
|
||||
regroupLabel.setTooltip(new Tooltip(regroupLabel.getText()));
|
||||
});
|
||||
});
|
||||
|
||||
@ -84,6 +84,7 @@ public class StatusBar extends AnchorPane {
|
||||
|
||||
public StatusBar(ImageGalleryController controller) {
|
||||
this.controller = controller;
|
||||
this.groupManager = controller.getGroupManager();
|
||||
FXMLLoader fxmlLoader = new FXMLLoader(getClass().getResource("StatusBar.fxml")); //NON-NLS
|
||||
fxmlLoader.setRoot(this);
|
||||
fxmlLoader.setController(this);
|
||||
@ -93,6 +94,5 @@ public class StatusBar extends AnchorPane {
|
||||
} catch (IOException exception) {
|
||||
throw new RuntimeException(exception);
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
8
NEWS.txt
8
NEWS.txt
@ -1,8 +1,8 @@
|
||||
---------------- VERSION 4.9.0 --------------
|
||||
---------------- VERSION 4.9.1 --------------
|
||||
Bug Fixes:
|
||||
- Fixed possible ingest deadlock from Image Gallery database inserts
|
||||
- Image Gallery does not need lock on Case DB during pre-population, which makes UI more responsive
|
||||
- Other misc Image Gallery fixes
|
||||
- Fixed possible ingest deadlock from Image Gallery database inserts.
|
||||
- Image Gallery does not need lock on Case DB during pre-population, which makes UI more responsive.
|
||||
- Other misc Image Gallery fixes.
|
||||
|
||||
---------------- VERSION 4.9.0 --------------
|
||||
|
||||
|
@ -64,9 +64,7 @@
|
||||
</run-dependency>
|
||||
</dependency>
|
||||
</module-dependencies>
|
||||
<public-packages>
|
||||
<package>org.sleuthkit.autopsy.recentactivity</package>
|
||||
</public-packages>
|
||||
<public-packages/>
|
||||
<class-path-extension>
|
||||
<runtime-relative-path>ext/gson-2.1.jar</runtime-relative-path>
|
||||
<binary-origin>release/modules/ext/gson-2.1.jar</binary-origin>
|
||||
|
@ -39,6 +39,7 @@ import java.io.FileReader;
|
||||
import java.io.IOException;
|
||||
import org.sleuthkit.autopsy.casemodule.services.FileManager;
|
||||
import org.sleuthkit.autopsy.coreutils.Logger;
|
||||
import org.sleuthkit.autopsy.coreutils.NetworkUtils;
|
||||
import org.sleuthkit.autopsy.ingest.IngestJobContext;
|
||||
import org.sleuthkit.autopsy.ingest.ModuleDataEvent;
|
||||
import org.sleuthkit.datamodel.AbstractFile;
|
||||
@ -163,7 +164,7 @@ class Chrome extends Extract {
|
||||
NbBundle.getMessage(this.getClass(), "Chrome.moduleName")));
|
||||
bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DOMAIN,
|
||||
NbBundle.getMessage(this.getClass(), "Chrome.parentModuleName"),
|
||||
(Util.extractDomain((result.get("url").toString() != null) ? result.get("url").toString() : "")))); //NON-NLS
|
||||
(NetworkUtils.extractDomain((result.get("url").toString() != null) ? result.get("url").toString() : "")))); //NON-NLS
|
||||
|
||||
BlackboardArtifact bbart = this.addArtifact(ARTIFACT_TYPE.TSK_WEB_HISTORY, historyFile, bbattributes);
|
||||
if (bbart != null) {
|
||||
@ -286,7 +287,7 @@ class Chrome extends Extract {
|
||||
} else {
|
||||
date = Long.valueOf(0);
|
||||
}
|
||||
String domain = Util.extractDomain(url);
|
||||
String domain = NetworkUtils.extractDomain(url);
|
||||
try {
|
||||
BlackboardArtifact bbart = bookmarkFile.newArtifact(ARTIFACT_TYPE.TSK_WEB_BOOKMARK);
|
||||
Collection<BlackboardAttribute> bbattributes = new ArrayList<>();
|
||||
@ -496,7 +497,7 @@ class Chrome extends Extract {
|
||||
//bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_LAST_ACCESSED.getTypeID(), "Recent Activity", "Last Visited", time));
|
||||
bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED,
|
||||
NbBundle.getMessage(this.getClass(), "Chrome.parentModuleName"), time));
|
||||
String domain = Util.extractDomain((result.get("url").toString() != null) ? result.get("url").toString() : ""); //NON-NLS
|
||||
String domain = NetworkUtils.extractDomain((result.get("url").toString() != null) ? result.get("url").toString() : ""); //NON-NLS
|
||||
bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DOMAIN,
|
||||
NbBundle.getMessage(this.getClass(), "Chrome.parentModuleName"), domain));
|
||||
bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PROG_NAME,
|
||||
@ -590,7 +591,7 @@ class Chrome extends Extract {
|
||||
NbBundle.getMessage(this.getClass(), "Chrome.moduleName")));
|
||||
bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_URL_DECODED,
|
||||
NbBundle.getMessage(this.getClass(), "Chrome.parentModuleName"),
|
||||
(Util.extractDomain((result.get("origin_url").toString() != null) ? result.get("url").toString() : "")))); //NON-NLS
|
||||
(NetworkUtils.extractDomain((result.get("origin_url").toString() != null) ? result.get("url").toString() : "")))); //NON-NLS
|
||||
bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_USER_NAME,
|
||||
NbBundle.getMessage(this.getClass(), "Chrome.parentModuleName"),
|
||||
((result.get("username_value").toString() != null) ? result.get("username_value").toString().replaceAll("'", "''") : ""))); //NON-NLS
|
||||
|
@ -26,6 +26,7 @@ import java.io.BufferedReader;
|
||||
|
||||
import org.openide.util.NbBundle;
|
||||
import org.sleuthkit.autopsy.coreutils.ExecUtil;
|
||||
import org.sleuthkit.autopsy.coreutils.NetworkUtils;
|
||||
import java.io.File;
|
||||
import java.io.FileInputStream;
|
||||
import java.io.FileNotFoundException;
|
||||
@ -609,6 +610,6 @@ class ExtractIE extends Extract {
|
||||
return null;
|
||||
}
|
||||
|
||||
return Util.extractDomain(url);
|
||||
return NetworkUtils.extractDomain(url);
|
||||
}
|
||||
}
|
||||
|
@ -374,7 +374,9 @@ class ExtractRegistry extends Extract {
|
||||
// Add all "usb" dataType nodes to collection of BlackboardArtifacts
|
||||
// that we will submit in a ModuleDataEvent for additional processing.
|
||||
Collection<BlackboardArtifact> usbBBartifacts = new ArrayList<>();
|
||||
|
||||
// Add all "ssid" dataType nodes to collection of BlackboardArtifacts
|
||||
// that we will submit in a ModuleDataEvent for additional processing.
|
||||
Collection<BlackboardArtifact> wifiBBartifacts = new ArrayList<>();
|
||||
for (int i = 0; i < len; i++) {
|
||||
Element tempnode = (Element) children.item(i);
|
||||
|
||||
@ -734,6 +736,7 @@ class ExtractRegistry extends Extract {
|
||||
bbart.addAttributes(bbattributes);
|
||||
// index the artifact for keyword search
|
||||
this.indexArtifact(bbart);
|
||||
wifiBBartifacts.add(bbart);
|
||||
} catch (TskCoreException ex) {
|
||||
logger.log(Level.SEVERE, "Error adding SSID artifact to blackboard."); //NON-NLS
|
||||
}
|
||||
@ -756,6 +759,9 @@ class ExtractRegistry extends Extract {
|
||||
if (!usbBBartifacts.isEmpty()) {
|
||||
IngestServices.getInstance().fireModuleDataEvent(new ModuleDataEvent(moduleName, BlackboardArtifact.ARTIFACT_TYPE.TSK_DEVICE_ATTACHED, usbBBartifacts));
|
||||
}
|
||||
if (!wifiBBartifacts.isEmpty()){
|
||||
IngestServices.getInstance().fireModuleDataEvent(new ModuleDataEvent(moduleName, BlackboardArtifact.ARTIFACT_TYPE.TSK_WIFI_NETWORK, wifiBBartifacts));
|
||||
}
|
||||
return true;
|
||||
} catch (FileNotFoundException ex) {
|
||||
logger.log(Level.SEVERE, "Error finding the registry file."); //NON-NLS
|
||||
|
@ -35,6 +35,7 @@ import java.util.logging.Level;
|
||||
import org.openide.util.NbBundle;
|
||||
import org.sleuthkit.autopsy.casemodule.services.FileManager;
|
||||
import org.sleuthkit.autopsy.coreutils.Logger;
|
||||
import org.sleuthkit.autopsy.coreutils.NetworkUtils;
|
||||
import org.sleuthkit.autopsy.datamodel.ContentUtils;
|
||||
import org.sleuthkit.autopsy.ingest.IngestJobContext;
|
||||
import org.sleuthkit.autopsy.ingest.IngestServices;
|
||||
@ -669,6 +670,6 @@ class Firefox extends Extract {
|
||||
return null;
|
||||
}
|
||||
|
||||
return Util.extractDomain(url);
|
||||
return NetworkUtils.extractDomain(url);
|
||||
}
|
||||
}
|
||||
|
@ -84,83 +84,6 @@ class Util {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param url
|
||||
* @return empty string if no domain could be found
|
||||
*/
|
||||
private static String getBaseDomain(String url) {
|
||||
String host = null;
|
||||
|
||||
//strip protocol
|
||||
String cleanUrl = url.replaceFirst(".*:\\/\\/", "");
|
||||
|
||||
//strip after slashes
|
||||
String dirToks[] = cleanUrl.split("\\/");
|
||||
if (dirToks.length > 0) {
|
||||
host = dirToks[0];
|
||||
} else {
|
||||
host = cleanUrl;
|
||||
}
|
||||
|
||||
//get the domain part from host (last 2)
|
||||
StringTokenizer tok = new StringTokenizer(host, ".");
|
||||
StringBuilder hostB = new StringBuilder();
|
||||
int toks = tok.countTokens();
|
||||
|
||||
for (int count = 0; count < toks; ++count) {
|
||||
String part = tok.nextToken();
|
||||
int diff = toks - count;
|
||||
if (diff < 3) {
|
||||
hostB.append(part);
|
||||
}
|
||||
if (diff == 2) {
|
||||
hostB.append(".");
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
String base = hostB.toString();
|
||||
// verify there are no special characters in there
|
||||
if (base.matches(".*[~`!@#$%^&\\*\\(\\)\\+={}\\[\\];:\\?<>,/ ].*")) {
|
||||
return "";
|
||||
}
|
||||
return base;
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param value
|
||||
* @return empty string if no domain name was found
|
||||
*/
|
||||
public static String extractDomain(String value) {
|
||||
if (value == null) {
|
||||
return "";
|
||||
|
||||
}
|
||||
String result = "";
|
||||
// String domainPattern = "(\\w+)\\.(AC|AD|AE|AERO|AF|AG|AI|AL|AM|AN|AO|AQ|AR|ARPA|AS|ASIA|AT|AU|AW|AX|AZ|BA|BB|BD|BE|BF|BG|BH|BI|BIZ|BJ|BM|BN|BO|BR|BS|BT|BV|BW|BY|BZ|CA|CAT|CC|CD|CF|CG|CH|CI|CK|CL|CM|CN|CO|COM|COOP|CR|CU|CV|CW|CX|CY|CZ|DE|DJ|DK|DM|DO|DZ|EC|EDU|EE|EG|ER|ES|ET|EU|FI|FJ|FK|FM|FO|FR|GA|GB|GD|GE|GF|GG|GH|GI|GL|GM|GN|GOV|GP|GQ|GR|GS|GT|GU|GW|GY|HK|HM|HN|HR|HT|HU|ID|IE|IL|IM|IN|INFO|INT|IO|IQ|IR|IS|IT|JE|JM|JO|JOBS|JP|KE|KG|KH|KI|KM|KN|KP|KR|KW|KY|KZ|LA|LB|LC|LI|LK|LR|LS|LT|LU|LV|LY|MA|MC|MD|ME|MG|MH|MIL|MK|ML|MM|MN|MO|MOBI|MP|MQ|MR|MS|MT|MU|MUSEUM|MV|MW|MX|MY|MZ|NA|NAME|NC|NE|NET|NF|NG|NI|NL|NO|NP|NR|NU|NZ|OM|ORG|PA|PE|PF|PG|PH|PK|PL|PM|PN|PR|PRO|PS|PT|PW|PY|QA|RE|RO|RS|RU|RW|SA|SB|SC|SD|SE|SG|SH|SI|SJ|SK|SL|SM|SN|SO|SR|ST|SU|SV|SX|SY|SZ|TC|TD|TEL|TF|TG|TH|TJ|TK|TL|TM|TN|TO|TP|TR|TRAVEL|TT|TV|TW|TZ|UA|UG|UK|US|UY|UZ|VA|VC|VE|VG|VI|VN|VU|WF|WS|XXX|YE|YT|ZA|ZM|ZW(co\\.[a-z].))";
|
||||
// Pattern p = Pattern.compile(domainPattern,Pattern.CASE_INSENSITIVE);
|
||||
// Matcher m = p.matcher(value);
|
||||
// while (m.find()) {
|
||||
// result = value.substring(m.start(0),m.end(0));
|
||||
// }
|
||||
|
||||
try {
|
||||
URL url = new URL(value);
|
||||
result = url.getHost();
|
||||
} catch (MalformedURLException ex) {
|
||||
//do not log if not a valid URL, and handle later
|
||||
//Logger.getLogger(Util.class.getName()).log(Level.SEVERE, null, ex);
|
||||
}
|
||||
|
||||
//was not a valid URL, try a less picky method
|
||||
if (result == null || result.trim().isEmpty()) {
|
||||
return getBaseDomain(value);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
public static String getFileName(String value) {
|
||||
String filename = "";
|
||||
String filematch = "^([a-zA-Z]\\:)(\\\\[^\\\\/:*?<>\"|]*(?<!\\[ \\]))*(\\.[a-zA-Z]{2,6})$"; //NON-NLS
|
||||
|
@ -1,5 +1,5 @@
|
||||
#Updated by build script
|
||||
#Sat, 13 Oct 2018 21:02:18 -0400
|
||||
#Tue, 13 Nov 2018 17:30:09 -0500
|
||||
LBL_splash_window_title=Starting Autopsy
|
||||
SPLASH_HEIGHT=314
|
||||
SPLASH_WIDTH=538
|
||||
@ -8,4 +8,4 @@ SplashRunningTextBounds=0,289,538,18
|
||||
SplashRunningTextColor=0x0
|
||||
SplashRunningTextFontSize=19
|
||||
|
||||
currentVersion=Autopsy 4.9.0
|
||||
currentVersion=Autopsy 4.9.1
|
||||
|
@ -1,4 +1,4 @@
|
||||
#Updated by build script
|
||||
#Sat, 13 Oct 2018 21:02:18 -0400
|
||||
CTL_MainWindow_Title=Autopsy 4.9.0
|
||||
CTL_MainWindow_Title_No_Project=Autopsy 4.9.0
|
||||
#Tue, 13 Nov 2018 17:30:09 -0500
|
||||
CTL_MainWindow_Title=Autopsy 4.9.1
|
||||
CTL_MainWindow_Title_No_Project=Autopsy 4.9.1
|
||||
|
@ -1,268 +0,0 @@
|
||||
#!/usr/bin/perl
|
||||
|
||||
# Updates various Autopsy version numbers
|
||||
|
||||
use strict;
|
||||
use File::Copy;
|
||||
|
||||
# global variables
|
||||
my $VER;
|
||||
|
||||
|
||||
my $TESTING = 0;
|
||||
print "TESTING MODE (no commits)\n" if ($TESTING);
|
||||
|
||||
|
||||
|
||||
sub main {
|
||||
|
||||
# Get the Autopsy version argument
|
||||
if (scalar (@ARGV) != 1) {
|
||||
print stderr "Missing release version argument (i.e. 4.9.0)\n";
|
||||
exit;
|
||||
}
|
||||
|
||||
$VER = $ARGV[0];
|
||||
die "Invalid version number: $VER (1.2.3 or 1.2.3b1 expected)" unless ($VER =~ /^\d+\.\d+\.\d+(b\d+)?$/);
|
||||
|
||||
|
||||
my $AUT_RELNAME = "autopsy-${VER}";
|
||||
# Verify the tag doesn't already exist
|
||||
exec_pipe(*OUT, "git tag | grep \"${AUT_RELNAME}\$\"");
|
||||
my $foo = read_pipe_line(*OUT);
|
||||
if ($foo ne "") {
|
||||
print "Tag ${AUT_RELNAME} already exists\n";
|
||||
print "Remove with 'git tag -d ${AUT_RELNAME}'\n";
|
||||
die "stopping";
|
||||
}
|
||||
close(OUT);
|
||||
|
||||
# Assume we running out of 'release' folder
|
||||
chdir ".." or die "Error changing directories to root";
|
||||
|
||||
|
||||
# verify_precheckin();
|
||||
|
||||
|
||||
# Update the version info in that tag
|
||||
update_project_properties();
|
||||
update_doxygen_dev();
|
||||
update_doxygen_user();
|
||||
|
||||
print "Files updated. You need to commit and push them\n";
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
######################################################
|
||||
# Utility functions
|
||||
|
||||
|
||||
# Function to execute a command and send output to pipe
|
||||
# returns handle
|
||||
# exec_pipe(HANDLE, CMD);
|
||||
sub exec_pipe {
|
||||
my $handle = shift(@_);
|
||||
my $cmd = shift(@_);
|
||||
|
||||
die "Can't open pipe for exec_pipe"
|
||||
unless defined(my $pid = open($handle, '-|'));
|
||||
|
||||
if ($pid) {
|
||||
return $handle;
|
||||
}
|
||||
else {
|
||||
$| = 1;
|
||||
exec("$cmd") or die "Can't exec program: $!";
|
||||
}
|
||||
}
|
||||
|
||||
# Read a line of text from an open exec_pipe handle
|
||||
sub read_pipe_line {
|
||||
my $handle = shift(@_);
|
||||
my $out;
|
||||
|
||||
for (my $i = 0; $i < 100; $i++) {
|
||||
$out = <$handle>;
|
||||
return $out if (defined $out);
|
||||
}
|
||||
return $out;
|
||||
}
|
||||
|
||||
|
||||
# Prompt user for argument and return response
|
||||
sub prompt_user {
|
||||
my $q = shift(@_);
|
||||
print "$q: ";
|
||||
$| = 1;
|
||||
$_ = <STDIN>;
|
||||
chomp;
|
||||
return $_;
|
||||
}
|
||||
|
||||
|
||||
|
||||
#############################################
|
||||
# File update methods
|
||||
|
||||
|
||||
|
||||
# Verify that all files in the current source directory
|
||||
# are checked in. dies if any are modified.
|
||||
sub verify_precheckin {
|
||||
|
||||
#system ("git pull");
|
||||
|
||||
print "Verifying everything is checked in\n";
|
||||
exec_pipe(*OUT, "git status -s | grep \"^ M\"");
|
||||
|
||||
my $foo = read_pipe_line(*OUT);
|
||||
if ($foo ne "") {
|
||||
print "Files not checked in\n";
|
||||
while ($foo ne "") {
|
||||
print "$foo";
|
||||
$foo = read_pipe_line(*OUT);
|
||||
}
|
||||
die "stopping" unless ($TESTING);
|
||||
}
|
||||
close(OUT);
|
||||
|
||||
print "Verifying everything is pushed\n";
|
||||
exec_pipe(*OUT, "git status -sb | grep \"^##\" | grep \"ahead \"");
|
||||
my $foo = read_pipe_line(*OUT);
|
||||
if ($foo ne "") {
|
||||
print "$foo";
|
||||
print "Files not pushed to remote\n";
|
||||
die "stopping" unless ($TESTING);
|
||||
}
|
||||
close(OUT);
|
||||
}
|
||||
|
||||
|
||||
|
||||
# update the version in nbproject/project.properties
|
||||
sub update_project_properties {
|
||||
|
||||
my $orig = "project.properties";
|
||||
my $temp = "${orig}-bak";
|
||||
|
||||
print "Updating the version in ${orig}\n";
|
||||
|
||||
chdir "nbproject" or die "cannot change into nbproject directory";
|
||||
|
||||
|
||||
open (CONF_IN, "<${orig}") or die "Cannot open ${orig}";
|
||||
open (CONF_OUT, ">${temp}") or die "Cannot open ${temp}";
|
||||
|
||||
my $found = 0;
|
||||
while (<CONF_IN>) {
|
||||
if (/^app\.version=/) {
|
||||
print CONF_OUT "app.version=$VER\n";
|
||||
$found++;
|
||||
}
|
||||
else {
|
||||
print CONF_OUT $_;
|
||||
}
|
||||
}
|
||||
close (CONF_IN);
|
||||
close (CONF_OUT);
|
||||
|
||||
if ($found != 1) {
|
||||
die "$found (instead of 1) occurrences of app.version found in ${orig}";
|
||||
}
|
||||
|
||||
unlink ($orig) or die "Error deleting ${orig}";
|
||||
rename ($temp, $orig) or die "Error renaming tmp $orig file";
|
||||
system("git add ${orig}") unless ($TESTING);
|
||||
chdir ".." or die "Error changing directories back to root";
|
||||
}
|
||||
|
||||
|
||||
|
||||
# update the dev docs
|
||||
sub update_doxygen_dev {
|
||||
|
||||
my $orig = "Doxyfile";
|
||||
my $temp = "${orig}-bak";
|
||||
|
||||
print "Updating the version in ${orig} (Dev)\n";
|
||||
|
||||
chdir "docs/doxygen" or die "cannot change into docs/doxygen directory";
|
||||
|
||||
|
||||
open (CONF_IN, "<${orig}") or die "Cannot open ${orig}";
|
||||
open (CONF_OUT, ">${temp}") or die "Cannot open ${temp}";
|
||||
|
||||
my $found = 0;
|
||||
while (<CONF_IN>) {
|
||||
if (/^PROJECT_NUMBER/) {
|
||||
print CONF_OUT "PROJECT_NUMBER = ${VER}\n";
|
||||
$found++;
|
||||
}
|
||||
elsif (/^HTML_OUTPUT/) {
|
||||
print CONF_OUT "HTML_OUTPUT = api-docs/${VER}/\n";
|
||||
$found++;
|
||||
}
|
||||
else {
|
||||
print CONF_OUT $_;
|
||||
}
|
||||
}
|
||||
close (CONF_IN);
|
||||
close (CONF_OUT);
|
||||
|
||||
if ($found != 2) {
|
||||
die "$found (instead of 2) occurrences of version found in (DEV) ${orig}";
|
||||
}
|
||||
|
||||
unlink ($orig) or die "Error deleting ${orig}";
|
||||
rename ($temp, $orig) or die "Error renaming tmp $orig file";
|
||||
system("git add ${orig}") unless ($TESTING);
|
||||
chdir "../.." or die "Error changing directories back to root";
|
||||
}
|
||||
|
||||
|
||||
# update the user docs
|
||||
sub update_doxygen_user {
|
||||
|
||||
my $orig = "Doxyfile";
|
||||
my $temp = "${orig}-bak";
|
||||
|
||||
print "Updating the version in ${orig} (User)\n";
|
||||
|
||||
chdir "docs/doxygen-user" or die "cannot change into docs/doxygen-user directory";
|
||||
|
||||
|
||||
open (CONF_IN, "<${orig}") or die "Cannot open ${orig}";
|
||||
open (CONF_OUT, ">${temp}") or die "Cannot open ${temp}";
|
||||
|
||||
my $found = 0;
|
||||
while (<CONF_IN>) {
|
||||
if (/^PROJECT_NUMBER/) {
|
||||
print CONF_OUT "PROJECT_NUMBER = ${VER}\n";
|
||||
$found++;
|
||||
}
|
||||
elsif (/^HTML_OUTPUT/) {
|
||||
print CONF_OUT "HTML_OUTPUT = ${VER}\n";
|
||||
$found++;
|
||||
}
|
||||
else {
|
||||
print CONF_OUT $_;
|
||||
}
|
||||
}
|
||||
close (CONF_IN);
|
||||
close (CONF_OUT);
|
||||
|
||||
if ($found != 2) {
|
||||
die "$found (instead of 2) occurrences of version found in (USER) ${orig}";
|
||||
}
|
||||
|
||||
unlink ($orig) or die "Error deleting ${orig}";
|
||||
rename ($temp, $orig) or die "Error renaming tmp $orig file";
|
||||
system("git add ${orig}") unless ($TESTING);
|
||||
chdir "../.." or die "Error changing directories back to root";
|
||||
}
|
||||
|
||||
|
||||
main();
|
@ -1,199 +0,0 @@
|
||||
#!/usr/bin/perl
|
||||
|
||||
# Updates various TSK version numbers
|
||||
# use this when the version of TSK that Autopsy depends on changes
|
||||
|
||||
use strict;
|
||||
use File::Copy;
|
||||
|
||||
# global variables
|
||||
my $VER;
|
||||
|
||||
my $TESTING = 0;
|
||||
print "TESTING MODE (no commits)\n" if ($TESTING);
|
||||
|
||||
|
||||
sub main {
|
||||
|
||||
# Get the TSK version argument
|
||||
if (scalar (@ARGV) != 1) {
|
||||
print stderr "Missing release version argument (i.e. 4.9.0)\n";
|
||||
exit;
|
||||
}
|
||||
|
||||
$VER = $ARGV[0];
|
||||
die "Invalid version number: $VER (1.2.3 or 1.2.3b1 expected)" unless ($VER =~ /^\d+\.\d+\.\d+(b\d+)?$/);
|
||||
|
||||
# Assume we running out of 'release' folder
|
||||
chdir ".." or die "Error changing directories to root";
|
||||
|
||||
# Update the version info in that tag
|
||||
update_tsk_version();
|
||||
update_core_project_properties();
|
||||
update_core_project_xml();
|
||||
|
||||
print "Files updated. You need to commit and push them\n";
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
######################################################
|
||||
# Utility functions
|
||||
|
||||
|
||||
# Function to execute a command and send output to pipe
|
||||
# returns handle
|
||||
# exec_pipe(HANDLE, CMD);
|
||||
sub exec_pipe {
|
||||
my $handle = shift(@_);
|
||||
my $cmd = shift(@_);
|
||||
|
||||
die "Can't open pipe for exec_pipe"
|
||||
unless defined(my $pid = open($handle, '-|'));
|
||||
|
||||
if ($pid) {
|
||||
return $handle;
|
||||
}
|
||||
else {
|
||||
$| = 1;
|
||||
exec("$cmd") or die "Can't exec program: $!";
|
||||
}
|
||||
}
|
||||
|
||||
# Read a line of text from an open exec_pipe handle
|
||||
sub read_pipe_line {
|
||||
my $handle = shift(@_);
|
||||
my $out;
|
||||
|
||||
for (my $i = 0; $i < 100; $i++) {
|
||||
$out = <$handle>;
|
||||
return $out if (defined $out);
|
||||
}
|
||||
return $out;
|
||||
}
|
||||
|
||||
|
||||
|
||||
#############################################
|
||||
# File update methods
|
||||
|
||||
|
||||
|
||||
# update the tskversion.xml
|
||||
sub update_tsk_version {
|
||||
|
||||
my $orig = "TSKVersion.xml";
|
||||
my $temp = "${orig}-bak";
|
||||
|
||||
print "Updating the version in ${orig}\n";
|
||||
|
||||
open (CONF_IN, "<${orig}") or die "Cannot open ${orig}";
|
||||
open (CONF_OUT, ">${temp}") or die "Cannot open ${temp}";
|
||||
|
||||
my $found = 0;
|
||||
while (<CONF_IN>) {
|
||||
if (/name="TSK_VERSION" value=/) {
|
||||
print CONF_OUT " <property name=\"TSK_VERSION\" value=\"${VER}\"/>\n";
|
||||
$found++;
|
||||
}
|
||||
else {
|
||||
print CONF_OUT $_;
|
||||
}
|
||||
}
|
||||
close (CONF_IN);
|
||||
close (CONF_OUT);
|
||||
|
||||
if ($found != 1) {
|
||||
die "$found (instead of 1) occurrences of app.version found in ${orig}";
|
||||
}
|
||||
|
||||
unlink ($orig) or die "Error deleting ${orig}";
|
||||
rename ($temp, $orig) or die "Error renaming tmp $orig file";
|
||||
system("git add ${orig}") unless ($TESTING);
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
||||
sub update_core_project_properties {
|
||||
|
||||
my $orig = "project.properties";
|
||||
my $temp = "${orig}-bak";
|
||||
|
||||
print "Updating the version in ${orig}\n";
|
||||
|
||||
chdir "Core/nbproject" or die "cannot change into Core/nbproject directory";
|
||||
|
||||
|
||||
open (CONF_IN, "<${orig}") or die "Cannot open ${orig}";
|
||||
open (CONF_OUT, ">${temp}") or die "Cannot open ${temp}";
|
||||
|
||||
my $found = 0;
|
||||
while (<CONF_IN>) {
|
||||
if (/^file\.reference\.sleuthkit\-postgresql-/) {
|
||||
print CONF_OUT "file.reference.sleuthkit-postgresql-${VER}.jar=release/modules/ext/sleuthkit-postgresql-${VER}.jar\n";
|
||||
$found++;
|
||||
}
|
||||
|
||||
else {
|
||||
print CONF_OUT $_;
|
||||
}
|
||||
}
|
||||
close (CONF_IN);
|
||||
close (CONF_OUT);
|
||||
|
||||
if ($found != 1) {
|
||||
die "$found (instead of 1) occurrences of version found in ${orig}";
|
||||
}
|
||||
|
||||
unlink ($orig) or die "Error deleting ${orig}";
|
||||
rename ($temp, $orig) or die "Error renaming tmp $orig file";
|
||||
system("git add ${orig}") unless ($TESTING);
|
||||
chdir "../.." or die "Error changing directories back to root";
|
||||
}
|
||||
|
||||
sub update_core_project_xml {
|
||||
|
||||
my $orig = "project.xml";
|
||||
my $temp = "${orig}-bak";
|
||||
|
||||
print "Updating the version in ${orig}\n";
|
||||
|
||||
chdir "Core/nbproject" or die "cannot change into Core/nbproject directory";
|
||||
|
||||
open (CONF_IN, "<${orig}") or die "Cannot open ${orig}";
|
||||
open (CONF_OUT, ">${temp}") or die "Cannot open ${temp}";
|
||||
|
||||
my $found = 0;
|
||||
while (<CONF_IN>) {
|
||||
if (/<runtime-relative-path>ext\/sleuthkit-postgresql/) {
|
||||
print CONF_OUT " <runtime-relative-path>ext/sleuthkit-postgresql-${VER}.jar</runtime-relative-path>\n";
|
||||
$found++;
|
||||
}
|
||||
elsif (/<binary-origin>release\/modules\/ext\/sleuthkit-postgresql/) {
|
||||
print CONF_OUT " <binary-origin>release/modules/ext/sleuthkit-postgresql-${VER}.jar</binary-origin>\n";
|
||||
$found++;
|
||||
}
|
||||
else {
|
||||
print CONF_OUT $_;
|
||||
}
|
||||
}
|
||||
close (CONF_IN);
|
||||
close (CONF_OUT);
|
||||
|
||||
if ($found != 2) {
|
||||
die "$found (instead of 2) occurrences of version found in ${orig}";
|
||||
}
|
||||
|
||||
unlink ($orig) or die "Error deleting ${orig}";
|
||||
rename ($temp, $orig) or die "Error renaming tmp $orig file";
|
||||
system("git add ${orig}") unless ($TESTING);
|
||||
chdir "../.." or die "Error changing directories back to root";
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
main();
|
Loading…
x
Reference in New Issue
Block a user