From 257cb46125c833389c91deb169287ff22409af7b Mon Sep 17 00:00:00 2001 From: Mark McKinnon Date: Tue, 21 May 2019 16:08:02 -0400 Subject: [PATCH 01/16] Initial code for deleting a data source from SOLR This is the initial code for deleting a data source id from SOLR. --- .../actions/DeleteDataSourceAction.java | 18 +++++++++-- .../KeywordSearchService.java | 9 ++++++ .../keywordsearch/Bundle.properties-MERGED | 1 + .../autopsy/keywordsearch/Server.java | 32 +++++++++++++++++++ .../keywordsearch/SolrSearchService.java | 22 +++++++++++++ 5 files changed, 80 insertions(+), 2 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/actions/DeleteDataSourceAction.java b/Core/src/org/sleuthkit/autopsy/actions/DeleteDataSourceAction.java index c81e81e3fb..4557cddaef 100644 --- a/Core/src/org/sleuthkit/autopsy/actions/DeleteDataSourceAction.java +++ b/Core/src/org/sleuthkit/autopsy/actions/DeleteDataSourceAction.java @@ -22,10 +22,15 @@ import java.awt.event.ActionEvent; import java.text.MessageFormat; import java.util.logging.Level; import javax.swing.AbstractAction; +import org.openide.util.Lookup; import org.openide.util.NbBundle; import org.sleuthkit.autopsy.casemodule.Case; +import org.sleuthkit.autopsy.casemodule.CaseMetadata; import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; import org.sleuthkit.autopsy.coreutils.Logger; +import org.sleuthkit.autopsy.keywordsearchservice.KeywordSearchService; +import org.sleuthkit.autopsy.keywordsearchservice.KeywordSearchServiceException; +import org.sleuthkit.autopsy.progress.ProgressIndicator; import org.sleuthkit.datamodel.TskCoreException; /** @@ -48,11 +53,20 @@ public final class DeleteDataSourceAction extends AbstractAction { try { //VersionNumber checkVersionNumber = Case.getCurrentCaseThrows().getSleuthkitCase().getDBSchemaVersion(); Case.getCurrentCaseThrows().getSleuthkitCase().deleteDataSource(selectDataSource); - } catch (NoCurrentCaseException | TskCoreException e) { + deleteDataSource(selectDataSource); + } catch (NoCurrentCaseException | TskCoreException | KeywordSearchServiceException e) { String msg = MessageFormat.format(Bundle.ErrorDeletingDataSource_name_text(), selectDataSource); logger.log(Level.WARNING, msg, e); //throw new TskCoreException(msg, e); } } - + private static void deleteDataSource(Long dataSourceId) throws KeywordSearchServiceException { + try { + KeywordSearchService kwsService = Lookup.getDefault().lookup(KeywordSearchService.class); + kwsService.deleteDataSource(dataSourceId); + } catch (KeywordSearchServiceException e) { + logger.log(Level.WARNING, "KWS Error", e); + } + + } } diff --git a/Core/src/org/sleuthkit/autopsy/keywordsearchservice/KeywordSearchService.java b/Core/src/org/sleuthkit/autopsy/keywordsearchservice/KeywordSearchService.java index 1a9e947b76..6610db8178 100644 --- a/Core/src/org/sleuthkit/autopsy/keywordsearchservice/KeywordSearchService.java +++ b/Core/src/org/sleuthkit/autopsy/keywordsearchservice/KeywordSearchService.java @@ -71,4 +71,13 @@ public interface KeywordSearchService extends Closeable { */ public void deleteTextIndex(CaseMetadata metadata) throws KeywordSearchServiceException; + /** + * Deletes the keyword search text for a specific data source. + * + * @param dataSourceId The data source id to be deleted from Solr. + * + * @throws KeywordSearchServiceException if unable to delete. + */ + public void deleteDataSource(Long dataSourceId) throws KeywordSearchServiceException; + } diff --git a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/Bundle.properties-MERGED b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/Bundle.properties-MERGED index c3a2ee4faf..c02e0ddba1 100755 --- a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/Bundle.properties-MERGED +++ b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/Bundle.properties-MERGED @@ -347,6 +347,7 @@ SolrSearch.openCore.msg=Opening text index SolrSearch.openGiantCore.msg=Opening text index. Text index for this case is very large and may take long time to load. SolrSearch.openLargeCore.msg=Opening text index. This may take several minutes. SolrSearch.readingIndexes.msg=Reading text index metadata file +SolrSearchService.deleteDataSource.exceptionMessage.noCurrentSolrCore=DeleteDataSource did not contain a current Solr core so could not delete the Data Source # {0} - index folder path SolrSearchService.exceptionMessage.failedToDeleteIndexFiles=Failed to delete text index files at {0} SolrSearchService.exceptionMessage.noCurrentSolrCore=IndexMetadata did not contain a current Solr core so could not delete the case diff --git a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/Server.java b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/Server.java index 0234ac7f73..bcc975c3ad 100644 --- a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/Server.java +++ b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/Server.java @@ -54,6 +54,7 @@ import org.apache.solr.client.solrj.request.CoreAdminRequest; import org.apache.solr.client.solrj.response.CoreAdminResponse; import org.apache.solr.client.solrj.response.QueryResponse; import org.apache.solr.client.solrj.response.TermsResponse; +import org.apache.solr.client.solrj.response.UpdateResponse; import org.apache.solr.common.SolrDocument; import org.apache.solr.common.SolrDocumentList; import org.apache.solr.common.SolrException; @@ -1215,6 +1216,25 @@ public class Server { } } + /** + * Delete a data source fo SOLR. + * + * @param dataSourceId to delete + * + * @throws NoOpenCoreException + */ + public void deleteDataSource(Long dataSourceId) throws NoOpenCoreException { + currentCoreLock.writeLock().lock(); + try { + if (null == currentCore) { + throw new NoOpenCoreException(); + } + currentCore.deleteDataSource(dataSourceId); + } finally { + currentCoreLock.writeLock().unlock(); + } + } + /** * Get the text contents of the given file as stored in SOLR. * @@ -1456,6 +1476,18 @@ public class Server { } } + private void deleteDataSource(Long dsObjId) { + String dataSourceId = Long.toString(dsObjId); + String deleteQuery = "image_id:" + dataSourceId; + try { + // Get the first result. + UpdateResponse updateResponse = solrCore.deleteByQuery(deleteQuery); + int x = 0; + } catch (SolrServerException | IOException ex) { + logger.log(Level.SEVERE, "Error deleting content from Solr. Solr image id " + dataSourceId, ex); //NON-NLS + } + } + void addDocument(SolrInputDocument doc) throws KeywordSearchModuleException { try { solrCore.add(doc); diff --git a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/SolrSearchService.java b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/SolrSearchService.java index be47a307fc..04a3ee18ba 100644 --- a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/SolrSearchService.java +++ b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/SolrSearchService.java @@ -191,6 +191,28 @@ public class SolrSearchService implements KeywordSearchService, AutopsyService { } } + /** + * Deletes a data source from Solr for a case. + * + * @param dataSourceId the id of the data source to delete. + * + * @throws org.sleuthkit.autopsy.keywordsearchservice.KeywordSearchServiceException + */ + @NbBundle.Messages({ + "SolrSearchService.deleteDataSource.exceptionMessage.noCurrentSolrCore=DeleteDataSource did not contain a current Solr core so could not delete the Data Source", + }) + @Override + public void deleteDataSource(Long dataSourceId) throws KeywordSearchServiceException { + try { + KeywordSearch.getServer().deleteDataSource(dataSourceId); + } catch (NoOpenCoreException ex) { + logger.log(Level.WARNING, NbBundle.getMessage(SolrSearchService.class, + "SolrSearchService.deleteDataSource.exceptionMessage.noCurrentSolrCore")); + throw new KeywordSearchServiceException(NbBundle.getMessage(SolrSearchService.class, + "SolrSearchService.deleteDataSource.exceptionMessage.noCurrentSolrCore")); + } + } + /** * Deletes Solr core for a case. * From a53ae96598851ae8e0ac51149a0498ff478d9e65 Mon Sep 17 00:00:00 2001 From: Mark McKinnon Date: Wed, 22 May 2019 13:45:27 -0400 Subject: [PATCH 02/16] Fix possible NPE Fix possible NPE if content was null. Skip statement and continue. --- .../src/org/sleuthkit/autopsy/keywordsearch/QueryResults.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/QueryResults.java b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/QueryResults.java index 66d0803812..3d847c6521 100644 --- a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/QueryResults.java +++ b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/QueryResults.java @@ -220,7 +220,7 @@ class QueryResults { logger.log(Level.SEVERE, "Failed to get text source object for ", tskCoreException); //NON-NLS } - if (saveResults) { + if ((saveResults) && (content != null)) { /* * Post an artifact for the hit to the blackboard. */ From e7e82db60a903f0bb09b8dbd456d26716892ec53 Mon Sep 17 00:00:00 2001 From: Mark McKinnon Date: Wed, 22 May 2019 13:46:18 -0400 Subject: [PATCH 03/16] Add commit to solr deletion Add commit to solr image id deletion --- .../src/org/sleuthkit/autopsy/keywordsearch/Server.java | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/Server.java b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/Server.java index bcc975c3ad..74a7c294df 100644 --- a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/Server.java +++ b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/Server.java @@ -1230,6 +1230,9 @@ public class Server { throw new NoOpenCoreException(); } currentCore.deleteDataSource(dataSourceId); + currentCore.commit(); + } catch (SolrServerException ex) { + logger.log(Level.SEVERE, "Solr delete data dource failed for data source: " + Long.toString(dataSourceId), ex); //NON-NLS } finally { currentCoreLock.writeLock().unlock(); } @@ -1482,7 +1485,6 @@ public class Server { try { // Get the first result. UpdateResponse updateResponse = solrCore.deleteByQuery(deleteQuery); - int x = 0; } catch (SolrServerException | IOException ex) { logger.log(Level.SEVERE, "Error deleting content from Solr. Solr image id " + dataSourceId, ex); //NON-NLS } From 67d39899d05aeec863d813f5a02c66901deef886 Mon Sep 17 00:00:00 2001 From: Mark McKinnon Date: Sun, 26 May 2019 15:54:02 -0400 Subject: [PATCH 04/16] Update DeleteDataSourceAction.java Codacy Fix --- .../org/sleuthkit/autopsy/actions/DeleteDataSourceAction.java | 2 -- 1 file changed, 2 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/actions/DeleteDataSourceAction.java b/Core/src/org/sleuthkit/autopsy/actions/DeleteDataSourceAction.java index 4557cddaef..d7afe92c3d 100644 --- a/Core/src/org/sleuthkit/autopsy/actions/DeleteDataSourceAction.java +++ b/Core/src/org/sleuthkit/autopsy/actions/DeleteDataSourceAction.java @@ -25,12 +25,10 @@ import javax.swing.AbstractAction; import org.openide.util.Lookup; import org.openide.util.NbBundle; import org.sleuthkit.autopsy.casemodule.Case; -import org.sleuthkit.autopsy.casemodule.CaseMetadata; import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.keywordsearchservice.KeywordSearchService; import org.sleuthkit.autopsy.keywordsearchservice.KeywordSearchServiceException; -import org.sleuthkit.autopsy.progress.ProgressIndicator; import org.sleuthkit.datamodel.TskCoreException; /** From 249ed2445697d053a1ff2efbb21f79c1f0836ab0 Mon Sep 17 00:00:00 2001 From: Mark McKinnon Date: Sun, 26 May 2019 16:00:48 -0400 Subject: [PATCH 05/16] Update DeleteDataSourceAction.java fix codacy --- .../org/sleuthkit/autopsy/actions/DeleteDataSourceAction.java | 4 ---- 1 file changed, 4 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/actions/DeleteDataSourceAction.java b/Core/src/org/sleuthkit/autopsy/actions/DeleteDataSourceAction.java index d7afe92c3d..60b9063bf9 100644 --- a/Core/src/org/sleuthkit/autopsy/actions/DeleteDataSourceAction.java +++ b/Core/src/org/sleuthkit/autopsy/actions/DeleteDataSourceAction.java @@ -31,10 +31,6 @@ import org.sleuthkit.autopsy.keywordsearchservice.KeywordSearchService; import org.sleuthkit.autopsy.keywordsearchservice.KeywordSearchServiceException; import org.sleuthkit.datamodel.TskCoreException; -/** - * - * @author markm - */ public final class DeleteDataSourceAction extends AbstractAction { private static final Logger logger = Logger.getLogger(DeleteDataSourceAction.class.getName()); private final Long selectDataSource; From 96ed64161d95245e20feac180bb2df314a8bdd21 Mon Sep 17 00:00:00 2001 From: Mark McKinnon Date: Mon, 15 Jul 2019 20:18:06 -0400 Subject: [PATCH 06/16] Update ImageNode.java Update comments from 2409 that were carried over. --- Core/src/org/sleuthkit/autopsy/datamodel/ImageNode.java | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/datamodel/ImageNode.java b/Core/src/org/sleuthkit/autopsy/datamodel/ImageNode.java index d5d5d1bd04..dbfa44e207 100644 --- a/Core/src/org/sleuthkit/autopsy/datamodel/ImageNode.java +++ b/Core/src/org/sleuthkit/autopsy/datamodel/ImageNode.java @@ -205,7 +205,7 @@ public class ImageNode extends AbstractContentNode { } private Boolean checkSchemaVersion() { - String sqlStatement = "SELECT a.value creationMajorVersion, b.value creationMinorVersion FROM tsk_db_info_extended a, tsk_db_info_extended b " + + String sqlStatement = "SELECT a.value AS creationMajorVersion, b.value AS creationMinorVersion FROM tsk_db_info_extended a, tsk_db_info_extended b " + " WHERE a.name = 'CREATION_SCHEMA_MAJOR_VERSION' and b.name = 'CREATION_SCHEMA_MINOR_VERSION';"; try (CaseDbQuery query = Case.getCurrentCaseThrows().getSleuthkitCase().executeQuery(sqlStatement);) { ResultSet schemaVersion = query.getResultSet(); @@ -213,11 +213,9 @@ public class ImageNode extends AbstractContentNode { int creationMajorVersion = schemaVersion.getInt("creationMajorVersion"); int creationMinorVersion = schemaVersion.getInt("creationMinorVersion"); if ((creationMajorVersion == 8 && creationMinorVersion >= 3) || creationMajorVersion > 8) { - schemaVersion.close(); return true; } } - schemaVersion.close(); } catch (SQLException | TskCoreException | NoCurrentCaseException ex) { logger.log(Level.SEVERE, "Failed to get the Create Major and Minor Schema Versions", ex); } From 8ddc174f2ff43a8d7f89671c4d8a066ff24cd2b6 Mon Sep 17 00:00:00 2001 From: Mark McKinnon Date: Mon, 15 Jul 2019 20:24:51 -0400 Subject: [PATCH 07/16] Update DeleteDataSourceAction.java adjust for comments in 2409 --- .../autopsy/actions/DeleteDataSourceAction.java | 15 +++++---------- 1 file changed, 5 insertions(+), 10 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/actions/DeleteDataSourceAction.java b/Core/src/org/sleuthkit/autopsy/actions/DeleteDataSourceAction.java index 60b9063bf9..abed76e98a 100644 --- a/Core/src/org/sleuthkit/autopsy/actions/DeleteDataSourceAction.java +++ b/Core/src/org/sleuthkit/autopsy/actions/DeleteDataSourceAction.java @@ -19,7 +19,6 @@ package org.sleuthkit.autopsy.actions; import java.awt.event.ActionEvent; -import java.text.MessageFormat; import java.util.logging.Level; import javax.swing.AbstractAction; import org.openide.util.Lookup; @@ -33,25 +32,21 @@ import org.sleuthkit.datamodel.TskCoreException; public final class DeleteDataSourceAction extends AbstractAction { private static final Logger logger = Logger.getLogger(DeleteDataSourceAction.class.getName()); - private final Long selectDataSource; + private final Long selectedDataSource; @NbBundle.Messages({"DeleteDataSourceAction.name.text=Delete Data Source"}) public DeleteDataSourceAction(Long selectedDataSource) { super(Bundle.DeleteDataSourceAction_name_text()); - selectDataSource = selectedDataSource; + this.selectedDataSource = selectedDataSource; } - @NbBundle.Messages({"ErrorDeletingDataSource.name.text=Error Deleting Data Source"}) @Override public void actionPerformed(ActionEvent event) { try { - //VersionNumber checkVersionNumber = Case.getCurrentCaseThrows().getSleuthkitCase().getDBSchemaVersion(); - Case.getCurrentCaseThrows().getSleuthkitCase().deleteDataSource(selectDataSource); - deleteDataSource(selectDataSource); + Case.getCurrentCaseThrows().getSleuthkitCase().deleteDataSource(selectedDataSource); + deleteDataSource(selectedDataSource); } catch (NoCurrentCaseException | TskCoreException | KeywordSearchServiceException e) { - String msg = MessageFormat.format(Bundle.ErrorDeletingDataSource_name_text(), selectDataSource); - logger.log(Level.WARNING, msg, e); - //throw new TskCoreException(msg, e); + logger.log(Level.WARNING, "Error Deleting Data source " + selectedDataSource, e); } } private static void deleteDataSource(Long dataSourceId) throws KeywordSearchServiceException { From 83991049637e89341d4df8159c8e3e9776e0bf2b Mon Sep 17 00:00:00 2001 From: Mark McKinnon Date: Mon, 15 Jul 2019 20:34:23 -0400 Subject: [PATCH 08/16] Update Server.java Fix spelling error in comment and remove long conversion in msg. --- .../src/org/sleuthkit/autopsy/keywordsearch/Server.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/Server.java b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/Server.java index 74a7c294df..000c11a282 100644 --- a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/Server.java +++ b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/Server.java @@ -1217,7 +1217,7 @@ public class Server { } /** - * Delete a data source fo SOLR. + * Delete a data source from SOLR. * * @param dataSourceId to delete * @@ -1232,7 +1232,7 @@ public class Server { currentCore.deleteDataSource(dataSourceId); currentCore.commit(); } catch (SolrServerException ex) { - logger.log(Level.SEVERE, "Solr delete data dource failed for data source: " + Long.toString(dataSourceId), ex); //NON-NLS + logger.log(Level.SEVERE, "Solr delete data dource failed for data source: " + dataSourceId, ex); //NON-NLS } finally { currentCoreLock.writeLock().unlock(); } From 1b16ffa54bb3c952abe9aac3496ca59358d2e472 Mon Sep 17 00:00:00 2001 From: Mark McKinnon Date: Tue, 13 Aug 2019 11:17:57 -0400 Subject: [PATCH 09/16] Update from merge of 2409 Update code from merge of 2409 --- .../actions/DeleteDataSourceAction.java | 126 +-- .../autopsy/datamodel/ImageNode.java | 696 ++++++++--------- .../autopsy/keywordsearch/QueryResults.java | 728 +++++++++--------- 3 files changed, 775 insertions(+), 775 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/actions/DeleteDataSourceAction.java b/Core/src/org/sleuthkit/autopsy/actions/DeleteDataSourceAction.java index f03cd446f1..697e37480b 100644 --- a/Core/src/org/sleuthkit/autopsy/actions/DeleteDataSourceAction.java +++ b/Core/src/org/sleuthkit/autopsy/actions/DeleteDataSourceAction.java @@ -1,63 +1,63 @@ -/* - * Autopsy Forensic Browser - * - * Copyright 2019 Basis Technology Corp. - * Contact: carrier sleuthkit org - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.sleuthkit.autopsy.actions; - -import java.awt.event.ActionEvent; -import java.util.logging.Level; -import javax.swing.AbstractAction; -import org.openide.util.Lookup; -import org.openide.util.NbBundle; -import org.sleuthkit.autopsy.casemodule.Case; -import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; -import org.sleuthkit.autopsy.coreutils.Logger; -import org.sleuthkit.autopsy.keywordsearchservice.KeywordSearchService; -import org.sleuthkit.autopsy.keywordsearchservice.KeywordSearchServiceException; -import org.sleuthkit.datamodel.TskCoreException; - -/** - * Instances of this Action allow users to delete the specified data source. - */ -public final class DeleteDataSourceAction extends AbstractAction { - private static final Logger logger = Logger.getLogger(DeleteDataSourceAction.class.getName()); - private final Long selectedDataSource; - - @NbBundle.Messages({"DeleteDataSourceAction.name.text=Delete Data Source"}) - public DeleteDataSourceAction(Long selectedDataSource) { - super(Bundle.DeleteDataSourceAction_name_text()); - this.selectedDataSource = selectedDataSource; - } - - @Override - public void actionPerformed(ActionEvent event) { - try { - Case.getCurrentCaseThrows().getSleuthkitCase().deleteDataSource(selectedDataSource); - } catch (NoCurrentCaseException | TskCoreException e) { - logger.log(Level.WARNING, "Error Deleting Data source " + selectedDataSource, e); - } - } - private static void deleteDataSource(Long dataSourceId) throws KeywordSearchServiceException { - try { - KeywordSearchService kwsService = Lookup.getDefault().lookup(KeywordSearchService.class); - kwsService.deleteDataSource(dataSourceId); - } catch (KeywordSearchServiceException e) { - logger.log(Level.WARNING, "KWS Error", e); - } - - } -} +/* + * Autopsy Forensic Browser + * + * Copyright 2019 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.actions; + +import java.awt.event.ActionEvent; +import java.util.logging.Level; +import javax.swing.AbstractAction; +import org.openide.util.Lookup; +import org.openide.util.NbBundle; +import org.sleuthkit.autopsy.casemodule.Case; +import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; +import org.sleuthkit.autopsy.coreutils.Logger; +import org.sleuthkit.autopsy.keywordsearchservice.KeywordSearchService; +import org.sleuthkit.autopsy.keywordsearchservice.KeywordSearchServiceException; +import org.sleuthkit.datamodel.TskCoreException; + +/** + * Instances of this Action allow users to delete the specified data source. + */ +public final class DeleteDataSourceAction extends AbstractAction { + private static final Logger logger = Logger.getLogger(DeleteDataSourceAction.class.getName()); + private final Long selectedDataSource; + + @NbBundle.Messages({"DeleteDataSourceAction.name.text=Delete Data Source"}) + public DeleteDataSourceAction(Long selectedDataSource) { + super(Bundle.DeleteDataSourceAction_name_text()); + this.selectedDataSource = selectedDataSource; + } + + @Override + public void actionPerformed(ActionEvent event) { + try { + Case.getCurrentCaseThrows().getSleuthkitCase().deleteDataSource(selectedDataSource); + } catch (NoCurrentCaseException | TskCoreException e) { + logger.log(Level.WARNING, "Error Deleting Data source " + selectedDataSource, e); + } + } + private static void deleteDataSource(Long dataSourceId) throws KeywordSearchServiceException { + try { + KeywordSearchService kwsService = Lookup.getDefault().lookup(KeywordSearchService.class); + kwsService.deleteDataSource(dataSourceId); + } catch (KeywordSearchServiceException e) { + logger.log(Level.WARNING, "KWS Error", e); + } + + } +} diff --git a/Core/src/org/sleuthkit/autopsy/datamodel/ImageNode.java b/Core/src/org/sleuthkit/autopsy/datamodel/ImageNode.java index 3eefc3aeff..18f5eef67b 100644 --- a/Core/src/org/sleuthkit/autopsy/datamodel/ImageNode.java +++ b/Core/src/org/sleuthkit/autopsy/datamodel/ImageNode.java @@ -1,348 +1,348 @@ -/* - * Autopsy Forensic Browser - * - * Copyright 2011-2019 Basis Technology Corp. - * Contact: carrier sleuthkit org - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.sleuthkit.autopsy.datamodel; - -import java.beans.PropertyChangeEvent; -import java.beans.PropertyChangeListener; -import java.sql.ResultSet; -import java.sql.SQLException; -import java.util.ArrayList; -import java.util.Collections; -import java.util.EnumSet; -import java.util.List; -import java.util.logging.Level; -import javax.swing.Action; -import org.apache.commons.lang3.tuple.Pair; -import org.openide.nodes.Sheet; -import org.openide.util.NbBundle; -import org.openide.util.NbBundle.Messages; -import org.sleuthkit.autopsy.actions.DeleteDataSourceAction; -import org.sleuthkit.autopsy.casemodule.Case; -import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; -import org.sleuthkit.autopsy.casemodule.datasourcesummary.ViewSummaryInformationAction; -import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationAttributeInstance; -import org.sleuthkit.autopsy.corecomponents.DataResultViewerTable; -import org.sleuthkit.autopsy.coreutils.Logger; -import org.sleuthkit.autopsy.directorytree.ExplorerNodeActionVisitor; -import org.sleuthkit.autopsy.directorytree.FileSearchAction; -import org.sleuthkit.autopsy.directorytree.NewWindowViewAction; -import org.sleuthkit.autopsy.ingest.IngestManager; -import org.sleuthkit.autopsy.ingest.ModuleContentEvent; -import org.sleuthkit.autopsy.ingest.runIngestModuleWizard.RunIngestModulesAction; -import org.sleuthkit.datamodel.Content; -import org.sleuthkit.datamodel.Image; -import org.sleuthkit.datamodel.SleuthkitCase.CaseDbQuery; -import org.sleuthkit.datamodel.TskCoreException; -import org.sleuthkit.datamodel.VirtualDirectory; -import org.sleuthkit.autopsy.datamodel.BaseChildFactory.NoSuchEventBusException; -import org.sleuthkit.datamodel.CaseDbSchemaVersionNumber; -import org.sleuthkit.datamodel.Tag; - -/** - * This class is used to represent the "Node" for the image. The children of - * this node are volumes. - */ -public class ImageNode extends AbstractContentNode { - - private static final Logger logger = Logger.getLogger(ImageNode.class.getName()); - - /** - * Helper so that the display name and the name used in building the path - * are determined the same way. - * - * @param i Image to get the name of - * - * @return short name for the Image - */ - static String nameForImage(Image i) { - return i.getName(); - } - - /** - * @param img - */ - public ImageNode(Image img) { - super(img); - - // set name, display name, and icon - String imgName = nameForImage(img); - this.setDisplayName(imgName); - this.setIconBaseWithExtension("org/sleuthkit/autopsy/images/hard-drive-icon.jpg"); //NON-NLS - - // Listen for ingest events so that we can detect new added files (e.g. carved) - IngestManager.getInstance().addIngestModuleEventListener(pcl); - // Listen for case events so that we can detect when case is closed - Case.addEventTypeSubscriber(EnumSet.of(Case.Events.CURRENT_CASE), pcl); - } - - private void removeListeners() { - IngestManager.getInstance().removeIngestModuleEventListener(pcl); - Case.removeEventTypeSubscriber(EnumSet.of(Case.Events.CURRENT_CASE), pcl); - } - - /** - * Right click action for this node - * - * @param context - * - * @return - */ - @Override - @Messages({"ImageNode.action.runIngestMods.text=Run Ingest Modules", - "ImageNode.getActions.openFileSearchByAttr.text=Open File Search by Attributes"}) - public Action[] getActions(boolean context) { - - List actionsList = new ArrayList<>(); - for (Action a : super.getActions(true)) { - actionsList.add(a); - } - actionsList.addAll(ExplorerNodeActionVisitor.getActions(content)); - actionsList.add(new FileSearchAction( - Bundle.ImageNode_getActions_openFileSearchByAttr_text())); - actionsList.add(new ViewSummaryInformationAction(content.getId())); - actionsList.add(new RunIngestModulesAction(Collections.singletonList(content))); - actionsList.add(new NewWindowViewAction( - NbBundle.getMessage(this.getClass(), "ImageNode.getActions.viewInNewWin.text"), this)); - if (checkSchemaVersion()) { - actionsList.add(new DeleteDataSourceAction(content.getId())); - } - return actionsList.toArray(new Action[0]); - } - - @Override - @Messages({"ImageNode.createSheet.size.name=Size (Bytes)", - "ImageNode.createSheet.size.displayName=Size (Bytes)", - "ImageNode.createSheet.size.desc=Size of the data source in bytes.", - "ImageNode.createSheet.type.name=Type", - "ImageNode.createSheet.type.displayName=Type", - "ImageNode.createSheet.type.desc=Type of the image.", - "ImageNode.createSheet.type.text=Image", - "ImageNode.createSheet.sectorSize.name=Sector Size (Bytes)", - "ImageNode.createSheet.sectorSize.displayName=Sector Size (Bytes)", - "ImageNode.createSheet.sectorSize.desc=Sector size of the image in bytes.", - "ImageNode.createSheet.timezone.name=Timezone", - "ImageNode.createSheet.timezone.displayName=Timezone", - "ImageNode.createSheet.timezone.desc=Timezone of the image", - "ImageNode.createSheet.deviceId.name=Device ID", - "ImageNode.createSheet.deviceId.displayName=Device ID", - "ImageNode.createSheet.deviceId.desc=Device ID of the image"}) - protected Sheet createSheet() { - Sheet sheet = super.createSheet(); - Sheet.Set sheetSet = sheet.get(Sheet.PROPERTIES); - if (sheetSet == null) { - sheetSet = Sheet.createPropertiesSet(); - sheet.put(sheetSet); - } - - sheetSet.put(new NodeProperty<>(NbBundle.getMessage(this.getClass(), "ImageNode.createSheet.name.name"), - NbBundle.getMessage(this.getClass(), "ImageNode.createSheet.name.displayName"), - NbBundle.getMessage(this.getClass(), "ImageNode.createSheet.name.desc"), - getDisplayName())); - - sheetSet.put(new NodeProperty<>(Bundle.ImageNode_createSheet_type_name(), - Bundle.ImageNode_createSheet_type_displayName(), - Bundle.ImageNode_createSheet_type_desc(), - Bundle.ImageNode_createSheet_type_text())); - - sheetSet.put(new NodeProperty<>(Bundle.ImageNode_createSheet_size_name(), - Bundle.ImageNode_createSheet_size_displayName(), - Bundle.ImageNode_createSheet_size_desc(), - this.content.getSize())); - sheetSet.put(new NodeProperty<>(Bundle.ImageNode_createSheet_sectorSize_name(), - Bundle.ImageNode_createSheet_sectorSize_displayName(), - Bundle.ImageNode_createSheet_sectorSize_desc(), - this.content.getSsize())); - - sheetSet.put(new NodeProperty<>(Bundle.ImageNode_createSheet_timezone_name(), - Bundle.ImageNode_createSheet_timezone_displayName(), - Bundle.ImageNode_createSheet_timezone_desc(), - this.content.getTimeZone())); - - try (CaseDbQuery query = Case.getCurrentCaseThrows().getSleuthkitCase().executeQuery("SELECT device_id FROM data_source_info WHERE obj_id = " + this.content.getId());) { - ResultSet deviceIdSet = query.getResultSet(); - if (deviceIdSet.next()) { - sheetSet.put(new NodeProperty<>(Bundle.ImageNode_createSheet_deviceId_name(), - Bundle.ImageNode_createSheet_deviceId_displayName(), - Bundle.ImageNode_createSheet_deviceId_desc(), - deviceIdSet.getString("device_id"))); - } - } catch (SQLException | TskCoreException | NoCurrentCaseException ex) { - logger.log(Level.SEVERE, "Failed to get device id for the following image: " + this.content.getId(), ex); - } - - return sheet; - } - - @Override - public T accept(ContentNodeVisitor visitor) { - return visitor.visit(this); - } - - @Override - public boolean isLeafTypeNode() { - return false; - } - - @Override - public T accept(DisplayableItemNodeVisitor visitor) { - return visitor.visit(this); - } - - @Override - public String getItemType() { - return getClass().getName(); - } - - private Boolean checkSchemaVersion() { - try { - CaseDbSchemaVersionNumber creationVersion = Case.getCurrentCaseThrows().getSleuthkitCase().getDBSchemaCreationVersion(); - - if ((creationVersion.getMajor() == 8 && creationVersion.getMinor() >= 3) || creationVersion.getMajor() > 8) { - return true; - } - } catch (NoCurrentCaseException ex) { - logger.log(Level.WARNING, "Failed to get creation schema version: ", ex); - } - - return false; - - } - - /* - * This property change listener refreshes the tree when a new file is - * carved out of this image (i.e, the image is being treated as raw bytes - * and was ingested by the RawDSProcessor). - */ - private final PropertyChangeListener pcl = (PropertyChangeEvent evt) -> { - String eventType = evt.getPropertyName(); - - // See if the new file is a child of ours - if (eventType.equals(IngestManager.IngestModuleEvent.CONTENT_CHANGED.toString())) { - if ((evt.getOldValue() instanceof ModuleContentEvent) == false) { - return; - } - ModuleContentEvent moduleContentEvent = (ModuleContentEvent) evt.getOldValue(); - if ((moduleContentEvent.getSource() instanceof Content) == false) { - return; - } - Content newContent = (Content) moduleContentEvent.getSource(); - - try { - Content parent = newContent.getParent(); - if (parent != null) { - // Is this a new carved file? - if (parent.getName().equals(VirtualDirectory.NAME_CARVED)) { - // Is this new carved file for this data source? - if (newContent.getDataSource().getId() == getContent().getDataSource().getId()) { - // Find the image (if any) associated with the new content and - // trigger a refresh if it matches the image wrapped by this node. - while ((parent = parent.getParent()) != null) { - if (parent.getId() == getContent().getId()) { - BaseChildFactory.post(getName(), new BaseChildFactory.RefreshKeysEvent()); - break; - } - } - } - } - } - } catch (TskCoreException ex) { - // Do nothing. - } catch (NoSuchEventBusException ex) { - logger.log(Level.WARNING, "Failed to post key refresh event.", ex); // NON-NLS - } - } else if (eventType.equals(Case.Events.CURRENT_CASE.toString())) { - if (evt.getNewValue() == null) { - // case was closed. Remove listeners so that we don't get called with a stale case handle - removeListeners(); - } - } - }; - - /** - * Reads and returns a list of all tags associated with this content node. - * - * Null implementation of an abstract method. - * - * @return list of tags associated with the node. - */ - @Override - protected List getAllTagsFromDatabase() { - return new ArrayList<>(); - } - - /** - * Returns correlation attribute instance for the underlying content of the - * node. - * - * Null implementation of an abstract method. - * - * @return correlation attribute instance for the underlying content of the - * node. - */ - @Override - protected CorrelationAttributeInstance getCorrelationAttributeInstance() { - return null; - } - - /** - * Returns Score property for the node. - * - * Null implementation of an abstract method. - * - * @param tags list of tags. - * - * @return Score property for the underlying content of the node. - */ - @Override - protected Pair getScorePropertyAndDescription(List tags) { - return Pair.of(DataResultViewerTable.Score.NO_SCORE, NO_DESCR); - } - - /** - * Returns comment property for the node. - * - * Null implementation of an abstract method. - * - * @param tags list of tags - * @param attribute correlation attribute instance - * - * @return Comment property for the underlying content of the node. - */ - @Override - protected DataResultViewerTable.HasCommentStatus getCommentProperty(List tags, CorrelationAttributeInstance attribute) { - return DataResultViewerTable.HasCommentStatus.NO_COMMENT; - } - - /** - * Returns occurrences/count property for the node. - * - * Null implementation of an abstract method. - * - * @param attributeType the type of the attribute to count - * @param attributeValue the value of the attribute to coun - * @param defaultDescription a description to use when none is determined by - * the getCountPropertyAndDescription method - * - * @return count property for the underlying content of the node. - */ - @Override - protected Pair getCountPropertyAndDescription(CorrelationAttributeInstance.Type attributeType, String attributeValue, String defaultDescription) { - return Pair.of(-1L, NO_DESCR); - } -} +/* + * Autopsy Forensic Browser + * + * Copyright 2011-2019 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.datamodel; + +import java.beans.PropertyChangeEvent; +import java.beans.PropertyChangeListener; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.EnumSet; +import java.util.List; +import java.util.logging.Level; +import javax.swing.Action; +import org.apache.commons.lang3.tuple.Pair; +import org.openide.nodes.Sheet; +import org.openide.util.NbBundle; +import org.openide.util.NbBundle.Messages; +import org.sleuthkit.autopsy.actions.DeleteDataSourceAction; +import org.sleuthkit.autopsy.casemodule.Case; +import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; +import org.sleuthkit.autopsy.casemodule.datasourcesummary.ViewSummaryInformationAction; +import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationAttributeInstance; +import org.sleuthkit.autopsy.corecomponents.DataResultViewerTable; +import org.sleuthkit.autopsy.coreutils.Logger; +import org.sleuthkit.autopsy.directorytree.ExplorerNodeActionVisitor; +import org.sleuthkit.autopsy.directorytree.FileSearchAction; +import org.sleuthkit.autopsy.directorytree.NewWindowViewAction; +import org.sleuthkit.autopsy.ingest.IngestManager; +import org.sleuthkit.autopsy.ingest.ModuleContentEvent; +import org.sleuthkit.autopsy.ingest.runIngestModuleWizard.RunIngestModulesAction; +import org.sleuthkit.datamodel.Content; +import org.sleuthkit.datamodel.Image; +import org.sleuthkit.datamodel.SleuthkitCase.CaseDbQuery; +import org.sleuthkit.datamodel.TskCoreException; +import org.sleuthkit.datamodel.VirtualDirectory; +import org.sleuthkit.autopsy.datamodel.BaseChildFactory.NoSuchEventBusException; +import org.sleuthkit.datamodel.CaseDbSchemaVersionNumber; +import org.sleuthkit.datamodel.Tag; + +/** + * This class is used to represent the "Node" for the image. The children of + * this node are volumes. + */ +public class ImageNode extends AbstractContentNode { + + private static final Logger logger = Logger.getLogger(ImageNode.class.getName()); + + /** + * Helper so that the display name and the name used in building the path + * are determined the same way. + * + * @param i Image to get the name of + * + * @return short name for the Image + */ + static String nameForImage(Image i) { + return i.getName(); + } + + /** + * @param img + */ + public ImageNode(Image img) { + super(img); + + // set name, display name, and icon + String imgName = nameForImage(img); + this.setDisplayName(imgName); + this.setIconBaseWithExtension("org/sleuthkit/autopsy/images/hard-drive-icon.jpg"); //NON-NLS + + // Listen for ingest events so that we can detect new added files (e.g. carved) + IngestManager.getInstance().addIngestModuleEventListener(pcl); + // Listen for case events so that we can detect when case is closed + Case.addEventTypeSubscriber(EnumSet.of(Case.Events.CURRENT_CASE), pcl); + } + + private void removeListeners() { + IngestManager.getInstance().removeIngestModuleEventListener(pcl); + Case.removeEventTypeSubscriber(EnumSet.of(Case.Events.CURRENT_CASE), pcl); + } + + /** + * Right click action for this node + * + * @param context + * + * @return + */ + @Override + @Messages({"ImageNode.action.runIngestMods.text=Run Ingest Modules", + "ImageNode.getActions.openFileSearchByAttr.text=Open File Search by Attributes"}) + public Action[] getActions(boolean context) { + + List actionsList = new ArrayList<>(); + for (Action a : super.getActions(true)) { + actionsList.add(a); + } + actionsList.addAll(ExplorerNodeActionVisitor.getActions(content)); + actionsList.add(new FileSearchAction( + Bundle.ImageNode_getActions_openFileSearchByAttr_text())); + actionsList.add(new ViewSummaryInformationAction(content.getId())); + actionsList.add(new RunIngestModulesAction(Collections.singletonList(content))); + actionsList.add(new NewWindowViewAction( + NbBundle.getMessage(this.getClass(), "ImageNode.getActions.viewInNewWin.text"), this)); + if (checkSchemaVersion()) { + actionsList.add(new DeleteDataSourceAction(content.getId())); + } + return actionsList.toArray(new Action[0]); + } + + @Override + @Messages({"ImageNode.createSheet.size.name=Size (Bytes)", + "ImageNode.createSheet.size.displayName=Size (Bytes)", + "ImageNode.createSheet.size.desc=Size of the data source in bytes.", + "ImageNode.createSheet.type.name=Type", + "ImageNode.createSheet.type.displayName=Type", + "ImageNode.createSheet.type.desc=Type of the image.", + "ImageNode.createSheet.type.text=Image", + "ImageNode.createSheet.sectorSize.name=Sector Size (Bytes)", + "ImageNode.createSheet.sectorSize.displayName=Sector Size (Bytes)", + "ImageNode.createSheet.sectorSize.desc=Sector size of the image in bytes.", + "ImageNode.createSheet.timezone.name=Timezone", + "ImageNode.createSheet.timezone.displayName=Timezone", + "ImageNode.createSheet.timezone.desc=Timezone of the image", + "ImageNode.createSheet.deviceId.name=Device ID", + "ImageNode.createSheet.deviceId.displayName=Device ID", + "ImageNode.createSheet.deviceId.desc=Device ID of the image"}) + protected Sheet createSheet() { + Sheet sheet = super.createSheet(); + Sheet.Set sheetSet = sheet.get(Sheet.PROPERTIES); + if (sheetSet == null) { + sheetSet = Sheet.createPropertiesSet(); + sheet.put(sheetSet); + } + + sheetSet.put(new NodeProperty<>(NbBundle.getMessage(this.getClass(), "ImageNode.createSheet.name.name"), + NbBundle.getMessage(this.getClass(), "ImageNode.createSheet.name.displayName"), + NbBundle.getMessage(this.getClass(), "ImageNode.createSheet.name.desc"), + getDisplayName())); + + sheetSet.put(new NodeProperty<>(Bundle.ImageNode_createSheet_type_name(), + Bundle.ImageNode_createSheet_type_displayName(), + Bundle.ImageNode_createSheet_type_desc(), + Bundle.ImageNode_createSheet_type_text())); + + sheetSet.put(new NodeProperty<>(Bundle.ImageNode_createSheet_size_name(), + Bundle.ImageNode_createSheet_size_displayName(), + Bundle.ImageNode_createSheet_size_desc(), + this.content.getSize())); + sheetSet.put(new NodeProperty<>(Bundle.ImageNode_createSheet_sectorSize_name(), + Bundle.ImageNode_createSheet_sectorSize_displayName(), + Bundle.ImageNode_createSheet_sectorSize_desc(), + this.content.getSsize())); + + sheetSet.put(new NodeProperty<>(Bundle.ImageNode_createSheet_timezone_name(), + Bundle.ImageNode_createSheet_timezone_displayName(), + Bundle.ImageNode_createSheet_timezone_desc(), + this.content.getTimeZone())); + + try (CaseDbQuery query = Case.getCurrentCaseThrows().getSleuthkitCase().executeQuery("SELECT device_id FROM data_source_info WHERE obj_id = " + this.content.getId());) { + ResultSet deviceIdSet = query.getResultSet(); + if (deviceIdSet.next()) { + sheetSet.put(new NodeProperty<>(Bundle.ImageNode_createSheet_deviceId_name(), + Bundle.ImageNode_createSheet_deviceId_displayName(), + Bundle.ImageNode_createSheet_deviceId_desc(), + deviceIdSet.getString("device_id"))); + } + } catch (SQLException | TskCoreException | NoCurrentCaseException ex) { + logger.log(Level.SEVERE, "Failed to get device id for the following image: " + this.content.getId(), ex); + } + + return sheet; + } + + @Override + public T accept(ContentNodeVisitor visitor) { + return visitor.visit(this); + } + + @Override + public boolean isLeafTypeNode() { + return false; + } + + @Override + public T accept(DisplayableItemNodeVisitor visitor) { + return visitor.visit(this); + } + + @Override + public String getItemType() { + return getClass().getName(); + } + + private Boolean checkSchemaVersion() { + try { + CaseDbSchemaVersionNumber creationVersion = Case.getCurrentCaseThrows().getSleuthkitCase().getDBSchemaCreationVersion(); + + if ((creationVersion.getMajor() == 8 && creationVersion.getMinor() >= 3) || creationVersion.getMajor() > 8) { + return true; + } + } catch (NoCurrentCaseException ex) { + logger.log(Level.WARNING, "Failed to get creation schema version: ", ex); + } + + return false; + + } + + /* + * This property change listener refreshes the tree when a new file is + * carved out of this image (i.e, the image is being treated as raw bytes + * and was ingested by the RawDSProcessor). + */ + private final PropertyChangeListener pcl = (PropertyChangeEvent evt) -> { + String eventType = evt.getPropertyName(); + + // See if the new file is a child of ours + if (eventType.equals(IngestManager.IngestModuleEvent.CONTENT_CHANGED.toString())) { + if ((evt.getOldValue() instanceof ModuleContentEvent) == false) { + return; + } + ModuleContentEvent moduleContentEvent = (ModuleContentEvent) evt.getOldValue(); + if ((moduleContentEvent.getSource() instanceof Content) == false) { + return; + } + Content newContent = (Content) moduleContentEvent.getSource(); + + try { + Content parent = newContent.getParent(); + if (parent != null) { + // Is this a new carved file? + if (parent.getName().equals(VirtualDirectory.NAME_CARVED)) { + // Is this new carved file for this data source? + if (newContent.getDataSource().getId() == getContent().getDataSource().getId()) { + // Find the image (if any) associated with the new content and + // trigger a refresh if it matches the image wrapped by this node. + while ((parent = parent.getParent()) != null) { + if (parent.getId() == getContent().getId()) { + BaseChildFactory.post(getName(), new BaseChildFactory.RefreshKeysEvent()); + break; + } + } + } + } + } + } catch (TskCoreException ex) { + // Do nothing. + } catch (NoSuchEventBusException ex) { + logger.log(Level.WARNING, "Failed to post key refresh event.", ex); // NON-NLS + } + } else if (eventType.equals(Case.Events.CURRENT_CASE.toString())) { + if (evt.getNewValue() == null) { + // case was closed. Remove listeners so that we don't get called with a stale case handle + removeListeners(); + } + } + }; + + /** + * Reads and returns a list of all tags associated with this content node. + * + * Null implementation of an abstract method. + * + * @return list of tags associated with the node. + */ + @Override + protected List getAllTagsFromDatabase() { + return new ArrayList<>(); + } + + /** + * Returns correlation attribute instance for the underlying content of the + * node. + * + * Null implementation of an abstract method. + * + * @return correlation attribute instance for the underlying content of the + * node. + */ + @Override + protected CorrelationAttributeInstance getCorrelationAttributeInstance() { + return null; + } + + /** + * Returns Score property for the node. + * + * Null implementation of an abstract method. + * + * @param tags list of tags. + * + * @return Score property for the underlying content of the node. + */ + @Override + protected Pair getScorePropertyAndDescription(List tags) { + return Pair.of(DataResultViewerTable.Score.NO_SCORE, NO_DESCR); + } + + /** + * Returns comment property for the node. + * + * Null implementation of an abstract method. + * + * @param tags list of tags + * @param attribute correlation attribute instance + * + * @return Comment property for the underlying content of the node. + */ + @Override + protected DataResultViewerTable.HasCommentStatus getCommentProperty(List tags, CorrelationAttributeInstance attribute) { + return DataResultViewerTable.HasCommentStatus.NO_COMMENT; + } + + /** + * Returns occurrences/count property for the node. + * + * Null implementation of an abstract method. + * + * @param attributeType the type of the attribute to count + * @param attributeValue the value of the attribute to coun + * @param defaultDescription a description to use when none is determined by + * the getCountPropertyAndDescription method + * + * @return count property for the underlying content of the node. + */ + @Override + protected Pair getCountPropertyAndDescription(CorrelationAttributeInstance.Type attributeType, String attributeValue, String defaultDescription) { + return Pair.of(-1L, NO_DESCR); + } +} diff --git a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/QueryResults.java b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/QueryResults.java index 4155708a28..ce72c2cf69 100644 --- a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/QueryResults.java +++ b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/QueryResults.java @@ -1,364 +1,364 @@ -/* - * Autopsy Forensic Browser - * - * Copyright 2011-2018 Basis Technology Corp. - * Contact: carrier sleuthkit org - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.sleuthkit.autopsy.keywordsearch; - -import java.util.ArrayList; -import java.util.Collection; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.logging.Level; -import javax.swing.SwingWorker; -import org.apache.commons.lang.StringUtils; -import org.netbeans.api.progress.ProgressHandle; -import org.netbeans.api.progress.aggregate.ProgressContributor; -import org.openide.util.NbBundle; -import org.sleuthkit.autopsy.casemodule.Case; -import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; -import org.sleuthkit.autopsy.coreutils.EscapeUtil; -import org.sleuthkit.autopsy.coreutils.Logger; -import org.sleuthkit.autopsy.ingest.IngestMessage; -import org.sleuthkit.autopsy.ingest.IngestServices;; -import org.sleuthkit.datamodel.AbstractFile; -import org.sleuthkit.datamodel.Blackboard; -import org.sleuthkit.datamodel.BlackboardArtifact; -import org.sleuthkit.datamodel.BlackboardAttribute; -import org.sleuthkit.datamodel.Content; -import org.sleuthkit.datamodel.SleuthkitCase; -import org.sleuthkit.datamodel.TskCoreException; - -/** - * Stores and processes the results of a keyword search query. Processing - * includes posting keyword hit artifacts to the blackboard, sending messages - * about the search hits to the ingest inbox, and publishing an event to notify - * subscribers of the blackboard posts. - */ -class QueryResults { - - private static final Logger logger = Logger.getLogger(QueryResults.class.getName()); - private static final String MODULE_NAME = KeywordSearchModuleFactory.getModuleName(); - private final KeywordSearchQuery query; - private final Map> results = new HashMap<>(); - - /** - * Constructs a object that stores and processes the results of a keyword - * search query. Processing includes adding keyword hit artifacts to the - * blackboard, sending messages about the search hits to the ingest inbox, - * and publishing an event to notify subscribers of the blackboard posts. - * - * The KeywordSearchQuery is used to do the blackboard posts. - * - * @param query The query. - */ - QueryResults(KeywordSearchQuery query) { - this.query = query; - } - - /** - * Gets the keyword search query that generated the results stored in this - * object. - * - * @return The query. - */ - KeywordSearchQuery getQuery() { - return query; - } - - /** - * Adds the keyword hits for a keyword to the hits that are stored in this - * object. All calls to this method MUST be completed before calling the - * process method. - * - * @param keyword The keyword, - * @param hits The hits. - */ - void addResult(Keyword keyword, List hits) { - results.put(keyword, hits); - } - - /** - * Gets the keyword hits stored in this object for a given keyword. - * - * @param keyword The keyword. - * - * @return The keyword hits. - */ - List getResults(Keyword keyword) { - return results.get(keyword); - } - - /** - * Gets the set of unique keywords for which keyword hits have been stored - * in this object. - * - * @return - */ - Set getKeywords() { - return results.keySet(); - } - - /** - * Processes the keyword hits stored in this object by adding keyword hit - * artifacts to the blackboard, sending messages about the search hits to - * the ingest inbox, and publishing an event to notify subscribers of the - * blackboard posts. - * - * Makes one artifact per keyword per searched text source object (file or - * artifact), i.e., if a keyword is found several times in the text - * extracted from the source object, only one artifact is created. - * - * This method ASSUMES that the processing is being done using a SwingWorker - * that should be checked for task cancellation. - * - * All calls to the addResult method MUST be completed before calling this - * method. - * - * @param progress A progress indicator that reports the number of - * keywords processed. Can be null. - * @param subProgress A progress contributor that reports the keyword - * currently being processed. Can be null. - * @param worker The SwingWorker that is being used to do the - * processing, will be checked for task cancellation - * before processing each keyword. - * @param notifyInbox Whether or not to write a message to the ingest - * messages inbox if there is a keyword hit in the text - * exrtacted from the text source object. - * @param saveResults Flag whether to save search results as KWS artifacts. - * - */ - void process(ProgressHandle progress, ProgressContributor subProgress, SwingWorker worker, boolean notifyInbox, boolean saveResults) { - /* - * Initialize the progress indicator to the number of keywords that will - * be processed. - */ - if (null != progress) { - progress.start(getKeywords().size()); - } - - /* - * Process the keyword hits for each keyword. - */ - int keywordsProcessed = 0; - final Collection hitArtifacts = new ArrayList<>(); - for (final Keyword keyword : getKeywords()) { - /* - * Cancellation check. - */ - if (worker.isCancelled()) { - logger.log(Level.INFO, "Processing cancelled, exiting before processing search term {0}", keyword.getSearchTerm()); //NON-NLS - break; - } - - /* - * Update the progress indicator and the show the current keyword - * via the progress contributor. - */ - if (progress != null) { - progress.progress(keyword.toString(), keywordsProcessed); - } - if (subProgress != null) { - String hitDisplayStr = keyword.getSearchTerm(); - if (hitDisplayStr.length() > 50) { - hitDisplayStr = hitDisplayStr.substring(0, 49) + "..."; - } - subProgress.progress(query.getKeywordList().getName() + ": " + hitDisplayStr, keywordsProcessed); - } - - /* - * Reduce the hits for this keyword to one hit per text source - * object so that only one hit artifact is generated per text source - * object, no matter how many times the keyword was actually found. - */ - for (KeywordHit hit : getOneHitPerTextSourceObject(keyword)) { - /* - * Get a snippet (preview) for the hit. Regex queries always - * have snippets made from the content_str pulled back from Solr - * for executing the search. Other types of queries may or may - * not have snippets yet. - */ - String snippet = hit.getSnippet(); - if (StringUtils.isBlank(snippet)) { - final String snippetQuery = KeywordSearchUtil.escapeLuceneQuery(keyword.getSearchTerm()); - try { - snippet = LuceneQuery.querySnippet(snippetQuery, hit.getSolrObjectId(), hit.getChunkId(), !query.isLiteral(), true); - } catch (NoOpenCoreException e) { - logger.log(Level.SEVERE, "Solr core closed while executing snippet query " + snippetQuery, e); //NON-NLS - break; // Stop processing. - } catch (Exception e) { - logger.log(Level.SEVERE, "Error executing snippet query " + snippetQuery, e); //NON-NLS - continue; // Try processing the next hit. - } - } - - /* - * Get the content (file or artifact) that is the text source - * for the hit. - */ - Content content = null; - try { - SleuthkitCase tskCase = Case.getCurrentCaseThrows().getSleuthkitCase(); - content = tskCase.getContentById(hit.getContentID()); - } catch (TskCoreException | NoCurrentCaseException tskCoreException) { - logger.log(Level.SEVERE, "Failed to get text source object for keyword hit", tskCoreException); //NON-NLS - } - - if ((content != null) && saveResults) { - /* - * Post an artifact for the hit to the blackboard. - */ - BlackboardArtifact artifact = query.createKeywordHitArtifact(content, keyword, hit, snippet, query.getKeywordList().getName()); - - /* - * Send an ingest inbox message for the hit. - */ - if (null != artifact) { - hitArtifacts.add(artifact); - if (notifyInbox) { - try { - writeSingleFileInboxMessage(artifact, content); - } catch (TskCoreException ex) { - logger.log(Level.SEVERE, "Error sending message to ingest messages inbox", ex); //NON-NLS - } - } - } - } - } - - ++keywordsProcessed; - } - - /* - * Post the artifacts to the blackboard which will publish an event to - * notify subscribers of the new artifacts. - */ - if (!hitArtifacts.isEmpty()) { - try { - SleuthkitCase tskCase = Case.getCurrentCaseThrows().getSleuthkitCase(); - Blackboard blackboard = tskCase.getBlackboard(); - - blackboard.postArtifacts(hitArtifacts, MODULE_NAME); - } catch (NoCurrentCaseException | Blackboard.BlackboardException ex) { - logger.log(Level.SEVERE, "Failed to post KWH artifact to blackboard.", ex); //NON-NLS - } - } - } - - /** - * Reduce the hits for a given keyword to one hit per text source object so - * that only one hit artifact is generated per text source object, no matter - * how many times the keyword was actually found. - * - * @param keyword The keyword. - * - * @return Collection The reduced set of keyword hits. - */ - private Collection getOneHitPerTextSourceObject(Keyword keyword) { - /* - * For each Solr document (chunk) for a text source object, return only - * a single keyword hit from the first chunk of text (the one with the - * lowest chunk id). - */ - HashMap< Long, KeywordHit> hits = new HashMap<>(); - getResults(keyword).forEach((hit) -> { - if (!hits.containsKey(hit.getSolrObjectId())) { - hits.put(hit.getSolrObjectId(), hit); - } else if (hit.getChunkId() < hits.get(hit.getSolrObjectId()).getChunkId()) { - hits.put(hit.getSolrObjectId(), hit); - } - }); - return hits.values(); - } - - /** - * Send an ingest inbox message indicating that there was a keyword hit in - * the given text source object. - * - * @param artifact The keyword hit artifact for the hit. - * @param hitContent The text source object. - * - * @throws TskCoreException If there is a problem generating or send the - * inbox message. - */ - private void writeSingleFileInboxMessage(BlackboardArtifact artifact, Content hitContent) throws TskCoreException { - StringBuilder subjectSb = new StringBuilder(1024); - if (!query.isLiteral()) { - subjectSb.append(NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.regExpHitLbl")); - } else { - subjectSb.append(NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.kwHitLbl")); - } - - StringBuilder detailsSb = new StringBuilder(1024); - String uniqueKey = null; - BlackboardAttribute attr = artifact.getAttribute(new BlackboardAttribute.Type(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_KEYWORD)); - if (attr != null) { - final String keyword = attr.getValueString(); - subjectSb.append(keyword); - uniqueKey = keyword.toLowerCase(); - detailsSb.append(""); //NON-NLS - detailsSb.append(""); //NON-NLS - detailsSb.append(NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.kwHitThLbl")); - detailsSb.append(""); //NON-NLS - detailsSb.append(""); //NON-NLS - } - - //preview - attr = artifact.getAttribute(new BlackboardAttribute.Type(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_KEYWORD_PREVIEW)); - if (attr != null) { - detailsSb.append(""); //NON-NLS - detailsSb.append(NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.previewThLbl")); - detailsSb.append(""); //NON-NLS - detailsSb.append(""); //NON-NLS - } - - //file - detailsSb.append(""); //NON-NLS - detailsSb.append(NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.fileThLbl")); - if (hitContent instanceof AbstractFile) { - AbstractFile hitFile = (AbstractFile) hitContent; - detailsSb.append(""); //NON-NLS - } else { - detailsSb.append(""); //NON-NLS - } - detailsSb.append(""); //NON-NLS - - //list - attr = artifact.getAttribute(new BlackboardAttribute.Type(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME)); - if (attr != null) { - detailsSb.append(""); //NON-NLS - detailsSb.append(NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.listThLbl")); - detailsSb.append(""); //NON-NLS - detailsSb.append(""); //NON-NLS - } - - //regex - if (!query.isLiteral()) { - attr = artifact.getAttribute(new BlackboardAttribute.Type(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_KEYWORD_REGEXP)); - if (attr != null) { - detailsSb.append(""); //NON-NLS - detailsSb.append(NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.regExThLbl")); - detailsSb.append(""); //NON-NLS - detailsSb.append(""); //NON-NLS - } - } - detailsSb.append("
").append(EscapeUtil.escapeHtml(keyword)).append("
").append(EscapeUtil.escapeHtml(attr.getValueString())).append("
").append(hitFile.getParentPath()).append(hitFile.getName()).append("").append(hitContent.getName()).append("
").append(attr.getValueString()).append("
").append(attr.getValueString()).append("
"); //NON-NLS - - IngestServices.getInstance().postMessage(IngestMessage.createDataMessage(MODULE_NAME, subjectSb.toString(), detailsSb.toString(), uniqueKey, artifact)); - } -} +/* + * Autopsy Forensic Browser + * + * Copyright 2011-2018 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.keywordsearch; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.logging.Level; +import javax.swing.SwingWorker; +import org.apache.commons.lang.StringUtils; +import org.netbeans.api.progress.ProgressHandle; +import org.netbeans.api.progress.aggregate.ProgressContributor; +import org.openide.util.NbBundle; +import org.sleuthkit.autopsy.casemodule.Case; +import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; +import org.sleuthkit.autopsy.coreutils.EscapeUtil; +import org.sleuthkit.autopsy.coreutils.Logger; +import org.sleuthkit.autopsy.ingest.IngestMessage; +import org.sleuthkit.autopsy.ingest.IngestServices;; +import org.sleuthkit.datamodel.AbstractFile; +import org.sleuthkit.datamodel.Blackboard; +import org.sleuthkit.datamodel.BlackboardArtifact; +import org.sleuthkit.datamodel.BlackboardAttribute; +import org.sleuthkit.datamodel.Content; +import org.sleuthkit.datamodel.SleuthkitCase; +import org.sleuthkit.datamodel.TskCoreException; + +/** + * Stores and processes the results of a keyword search query. Processing + * includes posting keyword hit artifacts to the blackboard, sending messages + * about the search hits to the ingest inbox, and publishing an event to notify + * subscribers of the blackboard posts. + */ +class QueryResults { + + private static final Logger logger = Logger.getLogger(QueryResults.class.getName()); + private static final String MODULE_NAME = KeywordSearchModuleFactory.getModuleName(); + private final KeywordSearchQuery query; + private final Map> results = new HashMap<>(); + + /** + * Constructs a object that stores and processes the results of a keyword + * search query. Processing includes adding keyword hit artifacts to the + * blackboard, sending messages about the search hits to the ingest inbox, + * and publishing an event to notify subscribers of the blackboard posts. + * + * The KeywordSearchQuery is used to do the blackboard posts. + * + * @param query The query. + */ + QueryResults(KeywordSearchQuery query) { + this.query = query; + } + + /** + * Gets the keyword search query that generated the results stored in this + * object. + * + * @return The query. + */ + KeywordSearchQuery getQuery() { + return query; + } + + /** + * Adds the keyword hits for a keyword to the hits that are stored in this + * object. All calls to this method MUST be completed before calling the + * process method. + * + * @param keyword The keyword, + * @param hits The hits. + */ + void addResult(Keyword keyword, List hits) { + results.put(keyword, hits); + } + + /** + * Gets the keyword hits stored in this object for a given keyword. + * + * @param keyword The keyword. + * + * @return The keyword hits. + */ + List getResults(Keyword keyword) { + return results.get(keyword); + } + + /** + * Gets the set of unique keywords for which keyword hits have been stored + * in this object. + * + * @return + */ + Set getKeywords() { + return results.keySet(); + } + + /** + * Processes the keyword hits stored in this object by adding keyword hit + * artifacts to the blackboard, sending messages about the search hits to + * the ingest inbox, and publishing an event to notify subscribers of the + * blackboard posts. + * + * Makes one artifact per keyword per searched text source object (file or + * artifact), i.e., if a keyword is found several times in the text + * extracted from the source object, only one artifact is created. + * + * This method ASSUMES that the processing is being done using a SwingWorker + * that should be checked for task cancellation. + * + * All calls to the addResult method MUST be completed before calling this + * method. + * + * @param progress A progress indicator that reports the number of + * keywords processed. Can be null. + * @param subProgress A progress contributor that reports the keyword + * currently being processed. Can be null. + * @param worker The SwingWorker that is being used to do the + * processing, will be checked for task cancellation + * before processing each keyword. + * @param notifyInbox Whether or not to write a message to the ingest + * messages inbox if there is a keyword hit in the text + * exrtacted from the text source object. + * @param saveResults Flag whether to save search results as KWS artifacts. + * + */ + void process(ProgressHandle progress, ProgressContributor subProgress, SwingWorker worker, boolean notifyInbox, boolean saveResults) { + /* + * Initialize the progress indicator to the number of keywords that will + * be processed. + */ + if (null != progress) { + progress.start(getKeywords().size()); + } + + /* + * Process the keyword hits for each keyword. + */ + int keywordsProcessed = 0; + final Collection hitArtifacts = new ArrayList<>(); + for (final Keyword keyword : getKeywords()) { + /* + * Cancellation check. + */ + if (worker.isCancelled()) { + logger.log(Level.INFO, "Processing cancelled, exiting before processing search term {0}", keyword.getSearchTerm()); //NON-NLS + break; + } + + /* + * Update the progress indicator and the show the current keyword + * via the progress contributor. + */ + if (progress != null) { + progress.progress(keyword.toString(), keywordsProcessed); + } + if (subProgress != null) { + String hitDisplayStr = keyword.getSearchTerm(); + if (hitDisplayStr.length() > 50) { + hitDisplayStr = hitDisplayStr.substring(0, 49) + "..."; + } + subProgress.progress(query.getKeywordList().getName() + ": " + hitDisplayStr, keywordsProcessed); + } + + /* + * Reduce the hits for this keyword to one hit per text source + * object so that only one hit artifact is generated per text source + * object, no matter how many times the keyword was actually found. + */ + for (KeywordHit hit : getOneHitPerTextSourceObject(keyword)) { + /* + * Get a snippet (preview) for the hit. Regex queries always + * have snippets made from the content_str pulled back from Solr + * for executing the search. Other types of queries may or may + * not have snippets yet. + */ + String snippet = hit.getSnippet(); + if (StringUtils.isBlank(snippet)) { + final String snippetQuery = KeywordSearchUtil.escapeLuceneQuery(keyword.getSearchTerm()); + try { + snippet = LuceneQuery.querySnippet(snippetQuery, hit.getSolrObjectId(), hit.getChunkId(), !query.isLiteral(), true); + } catch (NoOpenCoreException e) { + logger.log(Level.SEVERE, "Solr core closed while executing snippet query " + snippetQuery, e); //NON-NLS + break; // Stop processing. + } catch (Exception e) { + logger.log(Level.SEVERE, "Error executing snippet query " + snippetQuery, e); //NON-NLS + continue; // Try processing the next hit. + } + } + + /* + * Get the content (file or artifact) that is the text source + * for the hit. + */ + Content content = null; + try { + SleuthkitCase tskCase = Case.getCurrentCaseThrows().getSleuthkitCase(); + content = tskCase.getContentById(hit.getContentID()); + } catch (TskCoreException | NoCurrentCaseException tskCoreException) { + logger.log(Level.SEVERE, "Failed to get text source object for keyword hit", tskCoreException); //NON-NLS + } + + if ((content != null) && saveResults) { + /* + * Post an artifact for the hit to the blackboard. + */ + BlackboardArtifact artifact = query.createKeywordHitArtifact(content, keyword, hit, snippet, query.getKeywordList().getName()); + + /* + * Send an ingest inbox message for the hit. + */ + if (null != artifact) { + hitArtifacts.add(artifact); + if (notifyInbox) { + try { + writeSingleFileInboxMessage(artifact, content); + } catch (TskCoreException ex) { + logger.log(Level.SEVERE, "Error sending message to ingest messages inbox", ex); //NON-NLS + } + } + } + } + } + + ++keywordsProcessed; + } + + /* + * Post the artifacts to the blackboard which will publish an event to + * notify subscribers of the new artifacts. + */ + if (!hitArtifacts.isEmpty()) { + try { + SleuthkitCase tskCase = Case.getCurrentCaseThrows().getSleuthkitCase(); + Blackboard blackboard = tskCase.getBlackboard(); + + blackboard.postArtifacts(hitArtifacts, MODULE_NAME); + } catch (NoCurrentCaseException | Blackboard.BlackboardException ex) { + logger.log(Level.SEVERE, "Failed to post KWH artifact to blackboard.", ex); //NON-NLS + } + } + } + + /** + * Reduce the hits for a given keyword to one hit per text source object so + * that only one hit artifact is generated per text source object, no matter + * how many times the keyword was actually found. + * + * @param keyword The keyword. + * + * @return Collection The reduced set of keyword hits. + */ + private Collection getOneHitPerTextSourceObject(Keyword keyword) { + /* + * For each Solr document (chunk) for a text source object, return only + * a single keyword hit from the first chunk of text (the one with the + * lowest chunk id). + */ + HashMap< Long, KeywordHit> hits = new HashMap<>(); + getResults(keyword).forEach((hit) -> { + if (!hits.containsKey(hit.getSolrObjectId())) { + hits.put(hit.getSolrObjectId(), hit); + } else if (hit.getChunkId() < hits.get(hit.getSolrObjectId()).getChunkId()) { + hits.put(hit.getSolrObjectId(), hit); + } + }); + return hits.values(); + } + + /** + * Send an ingest inbox message indicating that there was a keyword hit in + * the given text source object. + * + * @param artifact The keyword hit artifact for the hit. + * @param hitContent The text source object. + * + * @throws TskCoreException If there is a problem generating or send the + * inbox message. + */ + private void writeSingleFileInboxMessage(BlackboardArtifact artifact, Content hitContent) throws TskCoreException { + StringBuilder subjectSb = new StringBuilder(1024); + if (!query.isLiteral()) { + subjectSb.append(NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.regExpHitLbl")); + } else { + subjectSb.append(NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.kwHitLbl")); + } + + StringBuilder detailsSb = new StringBuilder(1024); + String uniqueKey = null; + BlackboardAttribute attr = artifact.getAttribute(new BlackboardAttribute.Type(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_KEYWORD)); + if (attr != null) { + final String keyword = attr.getValueString(); + subjectSb.append(keyword); + uniqueKey = keyword.toLowerCase(); + detailsSb.append(""); //NON-NLS + detailsSb.append(""); //NON-NLS + detailsSb.append(NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.kwHitThLbl")); + detailsSb.append(""); //NON-NLS + detailsSb.append(""); //NON-NLS + } + + //preview + attr = artifact.getAttribute(new BlackboardAttribute.Type(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_KEYWORD_PREVIEW)); + if (attr != null) { + detailsSb.append(""); //NON-NLS + detailsSb.append(NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.previewThLbl")); + detailsSb.append(""); //NON-NLS + detailsSb.append(""); //NON-NLS + } + + //file + detailsSb.append(""); //NON-NLS + detailsSb.append(NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.fileThLbl")); + if (hitContent instanceof AbstractFile) { + AbstractFile hitFile = (AbstractFile) hitContent; + detailsSb.append(""); //NON-NLS + } else { + detailsSb.append(""); //NON-NLS + } + detailsSb.append(""); //NON-NLS + + //list + attr = artifact.getAttribute(new BlackboardAttribute.Type(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME)); + if (attr != null) { + detailsSb.append(""); //NON-NLS + detailsSb.append(NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.listThLbl")); + detailsSb.append(""); //NON-NLS + detailsSb.append(""); //NON-NLS + } + + //regex + if (!query.isLiteral()) { + attr = artifact.getAttribute(new BlackboardAttribute.Type(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_KEYWORD_REGEXP)); + if (attr != null) { + detailsSb.append(""); //NON-NLS + detailsSb.append(NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.regExThLbl")); + detailsSb.append(""); //NON-NLS + detailsSb.append(""); //NON-NLS + } + } + detailsSb.append("
").append(EscapeUtil.escapeHtml(keyword)).append("
").append(EscapeUtil.escapeHtml(attr.getValueString())).append("
").append(hitFile.getParentPath()).append(hitFile.getName()).append("").append(hitContent.getName()).append("
").append(attr.getValueString()).append("
").append(attr.getValueString()).append("
"); //NON-NLS + + IngestServices.getInstance().postMessage(IngestMessage.createDataMessage(MODULE_NAME, subjectSb.toString(), detailsSb.toString(), uniqueKey, artifact)); + } +} From d9f4e47b9b4fc2a26dad2482aeb5d4352fb09418 Mon Sep 17 00:00:00 2001 From: Mark McKinnon Date: Wed, 9 Oct 2019 17:20:11 -0400 Subject: [PATCH 10/16] Update KeywordSearchService.java Changed comment in code to reflect reviewers comment --- .../autopsy/keywordsearchservice/KeywordSearchService.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Core/src/org/sleuthkit/autopsy/keywordsearchservice/KeywordSearchService.java b/Core/src/org/sleuthkit/autopsy/keywordsearchservice/KeywordSearchService.java index fcd0d7f2f8..3ef8822e89 100644 --- a/Core/src/org/sleuthkit/autopsy/keywordsearchservice/KeywordSearchService.java +++ b/Core/src/org/sleuthkit/autopsy/keywordsearchservice/KeywordSearchService.java @@ -100,7 +100,7 @@ public interface KeywordSearchService extends Closeable { /** * Deletes the keyword search text for a specific data source. * - * @param dataSourceId The data source id to be deleted from Solr. + * @param dataSourceId The data source id to be deleted. * * @throws KeywordSearchServiceException if unable to delete. */ From d128e7a902b499b282d3d6820bc618ae05f4216f Mon Sep 17 00:00:00 2001 From: Mark McKinnon Date: Wed, 9 Oct 2019 17:20:46 -0400 Subject: [PATCH 11/16] Update DeleteDataSourceAction.java Changed selectedDataSource to dataSourceId --- .../autopsy/actions/DeleteDataSourceAction.java | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/actions/DeleteDataSourceAction.java b/Core/src/org/sleuthkit/autopsy/actions/DeleteDataSourceAction.java index 697e37480b..ae57ac3088 100644 --- a/Core/src/org/sleuthkit/autopsy/actions/DeleteDataSourceAction.java +++ b/Core/src/org/sleuthkit/autopsy/actions/DeleteDataSourceAction.java @@ -35,20 +35,20 @@ import org.sleuthkit.datamodel.TskCoreException; */ public final class DeleteDataSourceAction extends AbstractAction { private static final Logger logger = Logger.getLogger(DeleteDataSourceAction.class.getName()); - private final Long selectedDataSource; + private final Long dataSourceId; @NbBundle.Messages({"DeleteDataSourceAction.name.text=Delete Data Source"}) - public DeleteDataSourceAction(Long selectedDataSource) { + public DeleteDataSourceAction(Long dataSourceId) { super(Bundle.DeleteDataSourceAction_name_text()); - this.selectedDataSource = selectedDataSource; + this.dataSourceId = dataSourceId; } @Override public void actionPerformed(ActionEvent event) { try { - Case.getCurrentCaseThrows().getSleuthkitCase().deleteDataSource(selectedDataSource); + Case.getCurrentCaseThrows().getSleuthkitCase().deleteDataSource(dataSourceId); } catch (NoCurrentCaseException | TskCoreException e) { - logger.log(Level.WARNING, "Error Deleting Data source " + selectedDataSource, e); + logger.log(Level.WARNING, "Error Deleting Data source " + dataSourceId, e); } } private static void deleteDataSource(Long dataSourceId) throws KeywordSearchServiceException { From 86c45f3e49c994973168eb4920f14cbb58c4d8b6 Mon Sep 17 00:00:00 2001 From: Mark McKinnon Date: Thu, 10 Oct 2019 10:03:36 -0400 Subject: [PATCH 12/16] changes based on reviewer comments addressed comments from reviewer. --- .../actions/DeleteDataSourceAction.java | 3 +- .../keywordsearch/Bundle.properties-MERGED | 1 + .../autopsy/keywordsearch/Server.java | 131 +++++++++--------- .../keywordsearch/SolrSearchService.java | 4 +- 4 files changed, 73 insertions(+), 66 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/actions/DeleteDataSourceAction.java b/Core/src/org/sleuthkit/autopsy/actions/DeleteDataSourceAction.java index ae57ac3088..1292e079d4 100644 --- a/Core/src/org/sleuthkit/autopsy/actions/DeleteDataSourceAction.java +++ b/Core/src/org/sleuthkit/autopsy/actions/DeleteDataSourceAction.java @@ -47,7 +47,8 @@ public final class DeleteDataSourceAction extends AbstractAction { public void actionPerformed(ActionEvent event) { try { Case.getCurrentCaseThrows().getSleuthkitCase().deleteDataSource(dataSourceId); - } catch (NoCurrentCaseException | TskCoreException e) { + deleteDataSource(dataSourceId); + } catch (NoCurrentCaseException | TskCoreException | KeywordSearchServiceException e) { logger.log(Level.WARNING, "Error Deleting Data source " + dataSourceId, e); } } diff --git a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/Bundle.properties-MERGED b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/Bundle.properties-MERGED index 7edcb002b2..0c5b0404e8 100755 --- a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/Bundle.properties-MERGED +++ b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/Bundle.properties-MERGED @@ -243,6 +243,7 @@ Server.request.exception.exception.msg=Could not issue Solr request Server.commit.exception.msg=Could not commit index Server.addDoc.exception.msg=Could not add document to index via update handler: {0} Server.addDoc.exception.msg2=Could not add document to index via update handler: {0} +Server.delDoc.exception.msg=Error deleting content from Solr. Solr image id : {0} Server.close.exception.msg=Cannot close Core Server.close.exception.msg2=Cannot close Core Server.solrServerNoPortException.msg=Indexing server could not bind to port {0}, port is not available, consider change the default {1} port. diff --git a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/Server.java b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/Server.java index e83acb6b47..d24ebe936b 100644 --- a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/Server.java +++ b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/Server.java @@ -190,16 +190,16 @@ public class Server { } }, /** - * termfreq is a function which returns the number of times the term appears. - * This is not an actual field defined in schema.xml, but can be gotten from returned documents - * in the same way as fields. + * termfreq is a function which returns the number of times the term + * appears. This is not an actual field defined in schema.xml, but can + * be gotten from returned documents in the same way as fields. */ TERMFREQ { @Override public String toString() { return "termfreq"; //NON-NLS } - } + } }; public static final String HL_ANALYZE_CHARS_UNLIMITED = "500000"; //max 1MB in a chunk. use -1 for unlimited, but -1 option may not be supported (not documented) @@ -526,7 +526,7 @@ public class Server { @Override public void run() { MessageNotifyUtil.Notify.error( - NbBundle.getMessage(this.getClass(), "Installer.errorInitKsmMsg"), + NbBundle.getMessage(this.getClass(), "Installer.errorInitKsmMsg"), Bundle.Server_status_failed_msg()); } }); @@ -891,28 +891,30 @@ public class Server { throw new KeywordSearchModuleException(NbBundle.getMessage(this.getClass(), "Server.openCore.exception.cantOpen.msg"), ex); } } - + /** - * Get the host and port for a multiuser case. - * If the file solrserver.txt exists, then use the values from that file. - * Otherwise use the settings from the properties file. - * + * Get the host and port for a multiuser case. If the file solrserver.txt + * exists, then use the values from that file. Otherwise use the settings + * from the properties file. + * * @param caseDirectory Current case directory - * @return IndexingServerProperties containing the solr host/port for this case + * + * @return IndexingServerProperties containing the solr host/port for this + * case */ public static IndexingServerProperties getMultiUserServerProperties(String caseDirectory) { Path serverFilePath = Paths.get(caseDirectory, "solrserver.txt"); - if(serverFilePath.toFile().exists()){ - try{ + if (serverFilePath.toFile().exists()) { + try { List lines = Files.readAllLines(serverFilePath); - if(lines.isEmpty()) { + if (lines.isEmpty()) { logger.log(Level.SEVERE, "solrserver.txt file does not contain any data"); - } else if (! lines.get(0).contains(",")) { + } else if (!lines.get(0).contains(",")) { logger.log(Level.SEVERE, "solrserver.txt file is corrupt - could not read host/port from " + lines.get(0)); } else { String[] parts = lines.get(0).split(","); - if(parts.length != 2) { + if (parts.length != 2) { logger.log(Level.SEVERE, "solrserver.txt file is corrupt - could not read host/port from " + lines.get(0)); } else { return new IndexingServerProperties(parts[0], parts[1]); @@ -922,102 +924,104 @@ public class Server { logger.log(Level.SEVERE, "solrserver.txt file could not be read", ex); } } - + // Default back to the user preferences if the solrserver.txt file was not found or if an error occurred String host = UserPreferences.getIndexingServerHost(); String port = UserPreferences.getIndexingServerPort(); return new IndexingServerProperties(host, port); } - + /** - * Pick a solr server to use for this case and record it in the case directory. - * Looks for a file named "solrServerList.txt" in the root output directory - - * if this does not exist then no server is recorded. - * - * Format of solrServerList.txt: - * (host),(port) - * Ex: 10.1.2.34,8983 - * + * Pick a solr server to use for this case and record it in the case + * directory. Looks for a file named "solrServerList.txt" in the root output + * directory - if this does not exist then no server is recorded. + * + * Format of solrServerList.txt: (host),(port) Ex: 10.1.2.34,8983 + * * @param rootOutputDirectory * @param caseDirectoryPath - * @throws KeywordSearchModuleException + * + * @throws KeywordSearchModuleException */ public static void selectSolrServerForCase(Path rootOutputDirectory, Path caseDirectoryPath) throws KeywordSearchModuleException { // Look for the solr server list file String serverListName = "solrServerList.txt"; Path serverListPath = Paths.get(rootOutputDirectory.toString(), serverListName); - if(serverListPath.toFile().exists()){ - + if (serverListPath.toFile().exists()) { + // Read the list of solr servers List lines; - try{ + try { lines = Files.readAllLines(serverListPath); - } catch (IOException ex){ + } catch (IOException ex) { throw new KeywordSearchModuleException(serverListName + " could not be read", ex); } - + // Remove any lines that don't contain a comma (these are likely just whitespace) for (Iterator iterator = lines.iterator(); iterator.hasNext();) { String line = iterator.next(); - if (! line.contains(",")) { + if (!line.contains(",")) { // Remove the current element from the iterator and the list. iterator.remove(); } } - if(lines.isEmpty()) { + if (lines.isEmpty()) { throw new KeywordSearchModuleException(serverListName + " had no valid server information"); } - + // Choose which server to use int rnd = new Random().nextInt(lines.size()); String[] parts = lines.get(rnd).split(","); - if(parts.length != 2) { + if (parts.length != 2) { throw new KeywordSearchModuleException("Invalid server data: " + lines.get(rnd)); } - + // Split it up just to do a sanity check on the data String host = parts[0]; - String port = parts[1]; - if(host.isEmpty() || port.isEmpty()) { + String port = parts[1]; + if (host.isEmpty() || port.isEmpty()) { throw new KeywordSearchModuleException("Invalid server data: " + lines.get(rnd)); } - + // Write the server data to a file Path serverFile = Paths.get(caseDirectoryPath.toString(), "solrserver.txt"); try { caseDirectoryPath.toFile().mkdirs(); - if (! caseDirectoryPath.toFile().exists()) { + if (!caseDirectoryPath.toFile().exists()) { throw new KeywordSearchModuleException("Case directory " + caseDirectoryPath.toString() + " does not exist"); } Files.write(serverFile, lines.get(rnd).getBytes()); - } catch (IOException ex){ + } catch (IOException ex) { throw new KeywordSearchModuleException(serverFile.toString() + " could not be written", ex); } } } - + /** * Helper class to store the current server properties */ public static class IndexingServerProperties { + private final String host; private final String port; - - IndexingServerProperties (String host, String port) { + + IndexingServerProperties(String host, String port) { this.host = host; this.port = port; } /** * Get the host + * * @return host */ public String getHost() { return host; } - + /** * Get the port + * * @return port */ public String getPort() { @@ -1268,12 +1272,12 @@ public class Server { /** * Delete a data source from SOLR. - * + * * @param dataSourceId to delete - * + * * @throws NoOpenCoreException */ - public void deleteDataSource(Long dataSourceId) throws NoOpenCoreException { + public void deleteDataSource(Long dataSourceId) throws KeywordSearchModuleException, NoOpenCoreException { currentCoreLock.writeLock().lock(); try { if (null == currentCore) { @@ -1281,13 +1285,14 @@ public class Server { } currentCore.deleteDataSource(dataSourceId); currentCore.commit(); - } catch (SolrServerException ex) { - logger.log(Level.SEVERE, "Solr delete data dource failed for data source: " + dataSourceId, ex); //NON-NLS + } catch (SolrServerException | KeywordSearchModuleException ex) { + throw new KeywordSearchModuleException( + NbBundle.getMessage(this.getClass(), "Server.delDoc.exception.msg", dataSourceId), ex); } finally { currentCoreLock.writeLock().unlock(); - } + } } - + /** * Get the text contents of the given file as stored in SOLR. * @@ -1398,10 +1403,10 @@ public class Server { * @throws IOException */ void connectToSolrServer(HttpSolrServer solrServer) throws SolrServerException, IOException { - TimingMetric metric = HealthMonitor.getTimingMetric("Solr: Connectivity check"); + TimingMetric metric = HealthMonitor.getTimingMetric("Solr: Connectivity check"); CoreAdminRequest statusRequest = new CoreAdminRequest(); - statusRequest.setCoreName( null ); - statusRequest.setAction( CoreAdminParams.CoreAdminAction.STATUS ); + statusRequest.setCoreName(null); + statusRequest.setAction(CoreAdminParams.CoreAdminAction.STATUS); statusRequest.setIndexInfoNeeded(false); statusRequest.process(solrServer); HealthMonitor.submitTimingMetric(metric); @@ -1458,7 +1463,7 @@ public class Server { // the server to access a core needs to be built from a URL with the // core in it, and is only good for core-specific operations private final HttpSolrServer solrCore; - + private final int QUERY_TIMEOUT_MILLISECONDS = 86400000; // 24 Hours = 86,400,000 Milliseconds private Core(String name, CaseType caseType, Index index) { @@ -1470,7 +1475,7 @@ public class Server { //TODO test these settings // socket read timeout, make large enough so can index larger files - solrCore.setSoTimeout(QUERY_TIMEOUT_MILLISECONDS); + solrCore.setSoTimeout(QUERY_TIMEOUT_MILLISECONDS); //solrCore.setConnectionTimeout(1000); solrCore.setDefaultMaxConnectionsPerHost(32); solrCore.setMaxTotalConnections(32); @@ -1529,17 +1534,18 @@ public class Server { } } - private void deleteDataSource(Long dsObjId) { + private void deleteDataSource(Long dsObjId) throws KeywordSearchModuleException { String dataSourceId = Long.toString(dsObjId); String deleteQuery = "image_id:" + dataSourceId; try { // Get the first result. UpdateResponse updateResponse = solrCore.deleteByQuery(deleteQuery); } catch (SolrServerException | IOException ex) { - logger.log(Level.SEVERE, "Error deleting content from Solr. Solr image id " + dataSourceId, ex); //NON-NLS + throw new KeywordSearchModuleException( + NbBundle.getMessage(this.getClass(), "Server.delDoc.exception.msg", dataSourceId), ex); //NON-NLS } } - + void addDocument(SolrInputDocument doc) throws KeywordSearchModuleException { try { solrCore.add(doc); @@ -1561,7 +1567,8 @@ public class Server { * @param chunkID Chunk ID of the Solr document * * @return Text from matching Solr document (as String). Null if no - * matching Solr document found or error while getting content from Solr + * matching Solr document found or error while getting content + * from Solr */ private String getSolrContent(long contentID, int chunkID) { final SolrQuery q = new SolrQuery(); diff --git a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/SolrSearchService.java b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/SolrSearchService.java index b8af966867..804c8557d7 100644 --- a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/SolrSearchService.java +++ b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/SolrSearchService.java @@ -208,9 +208,7 @@ public class SolrSearchService implements KeywordSearchService, AutopsyService { public void deleteDataSource(Long dataSourceId) throws KeywordSearchServiceException { try { KeywordSearch.getServer().deleteDataSource(dataSourceId); - } catch (NoOpenCoreException ex) { - logger.log(Level.WARNING, NbBundle.getMessage(SolrSearchService.class, - "SolrSearchService.deleteDataSource.exceptionMessage.noCurrentSolrCore")); + } catch (NoOpenCoreException | KeywordSearchModuleException ex) { throw new KeywordSearchServiceException(NbBundle.getMessage(SolrSearchService.class, "SolrSearchService.deleteDataSource.exceptionMessage.noCurrentSolrCore")); } From bfb1bbe12161401cc24b2c472d082bbaead5f916 Mon Sep 17 00:00:00 2001 From: Mark McKinnon Date: Mon, 21 Oct 2019 12:19:04 -0400 Subject: [PATCH 13/16] Changed Exception Handling Changed exception handling --- .../keywordsearch/Bundle.properties-MERGED | 3 +- .../autopsy/keywordsearch/Server.java | 29 ++++++------------- 2 files changed, 10 insertions(+), 22 deletions(-) diff --git a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/Bundle.properties-MERGED b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/Bundle.properties-MERGED index 0c5b0404e8..c44ceb7556 100755 --- a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/Bundle.properties-MERGED +++ b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/Bundle.properties-MERGED @@ -36,7 +36,7 @@ KeywordSearchResultFactory.createNodeForKey.noResultsFound.text=No results found KeywordSearchResultFactory.query.exception.msg=Could not perform the query OpenIDE-Module-Display-Category=Ingest Module -OpenIDE-Module-Long-Description=Keyword Search ingest module.\n\nThe module indexes files found in the disk image at ingest time.\nIt then periodically runs the search on the indexed files using one or more keyword lists (containing pure words and/or regular expressions) and posts results.\n\nThe module also contains additional tools integrated in the main GUI, such as keyword list configuration, keyword search bar in the top-right corner, extracted text viewer and search results viewer showing highlighted keywords found. +OpenIDE-Module-Long-Description=Keyword Search ingest module.\n\nThe module indexes files found in the disk image at ingest time.\nIt then periodically runs the search on the indexed files using one or more keyword lists (containing pure words and/or regular expressions) and posts results.\n\n\The module also contains additional tools integrated in the main GUI, such as keyword list configuration, keyword search bar in the top-right corner, extracted text viewer and search results viewer showing highlighted keywords found. OpenIDE-Module-Name=KeywordSearch OptionsCategory_Name_KeywordSearchOptions=Keyword Search OptionsCategory_Keywords_KeywordSearchOptions=Keyword Search @@ -243,7 +243,6 @@ Server.request.exception.exception.msg=Could not issue Solr request Server.commit.exception.msg=Could not commit index Server.addDoc.exception.msg=Could not add document to index via update handler: {0} Server.addDoc.exception.msg2=Could not add document to index via update handler: {0} -Server.delDoc.exception.msg=Error deleting content from Solr. Solr image id : {0} Server.close.exception.msg=Cannot close Core Server.close.exception.msg2=Cannot close Core Server.solrServerNoPortException.msg=Indexing server could not bind to port {0}, port is not available, consider change the default {1} port. diff --git a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/Server.java b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/Server.java index d24ebe936b..b886e5eb71 100644 --- a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/Server.java +++ b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/Server.java @@ -1277,20 +1277,14 @@ public class Server { * * @throws NoOpenCoreException */ - public void deleteDataSource(Long dataSourceId) throws KeywordSearchModuleException, NoOpenCoreException { + private void deleteDataSource(Long dataSourceId) throws SolrServerException, KeywordSearchModuleException, NoOpenCoreException, IOException { currentCoreLock.writeLock().lock(); - try { - if (null == currentCore) { - throw new NoOpenCoreException(); - } - currentCore.deleteDataSource(dataSourceId); - currentCore.commit(); - } catch (SolrServerException | KeywordSearchModuleException ex) { - throw new KeywordSearchModuleException( - NbBundle.getMessage(this.getClass(), "Server.delDoc.exception.msg", dataSourceId), ex); - } finally { - currentCoreLock.writeLock().unlock(); + if (null == currentCore) { + throw new NoOpenCoreException(); } + currentCore.deleteDataSource(dataSourceId); + currentCore.commit(); + currentCoreLock.writeLock().unlock(); } /** @@ -1534,16 +1528,11 @@ public class Server { } } - private void deleteDataSource(Long dsObjId) throws KeywordSearchModuleException { + private void deleteDataSource(Long dsObjId) throws SolrServerException, IOException { String dataSourceId = Long.toString(dsObjId); String deleteQuery = "image_id:" + dataSourceId; - try { - // Get the first result. - UpdateResponse updateResponse = solrCore.deleteByQuery(deleteQuery); - } catch (SolrServerException | IOException ex) { - throw new KeywordSearchModuleException( - NbBundle.getMessage(this.getClass(), "Server.delDoc.exception.msg", dataSourceId), ex); //NON-NLS - } + // Get the first result. + UpdateResponse updateResponse = solrCore.deleteByQuery(deleteQuery); } void addDocument(SolrInputDocument doc) throws KeywordSearchModuleException { From e9229a20fc91eaecfd57536b27fc1e9989e2e61c Mon Sep 17 00:00:00 2001 From: Mark McKinnon Date: Mon, 21 Oct 2019 13:19:58 -0400 Subject: [PATCH 14/16] Updare for Exception handling Update for exception handling. --- .../src/org/sleuthkit/autopsy/keywordsearch/Server.java | 2 +- .../org/sleuthkit/autopsy/keywordsearch/SolrSearchService.java | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/Server.java b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/Server.java index b886e5eb71..dae1bae943 100644 --- a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/Server.java +++ b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/Server.java @@ -1277,7 +1277,7 @@ public class Server { * * @throws NoOpenCoreException */ - private void deleteDataSource(Long dataSourceId) throws SolrServerException, KeywordSearchModuleException, NoOpenCoreException, IOException { + public void deleteDataSource(Long dataSourceId) throws SolrServerException, KeywordSearchModuleException, NoOpenCoreException, IOException { currentCoreLock.writeLock().lock(); if (null == currentCore) { throw new NoOpenCoreException(); diff --git a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/SolrSearchService.java b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/SolrSearchService.java index 804c8557d7..13eb75907f 100644 --- a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/SolrSearchService.java +++ b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/SolrSearchService.java @@ -208,7 +208,7 @@ public class SolrSearchService implements KeywordSearchService, AutopsyService { public void deleteDataSource(Long dataSourceId) throws KeywordSearchServiceException { try { KeywordSearch.getServer().deleteDataSource(dataSourceId); - } catch (NoOpenCoreException | KeywordSearchModuleException ex) { + } catch (NoOpenCoreException | KeywordSearchModuleException | SolrServerException | IOException ex) { throw new KeywordSearchServiceException(NbBundle.getMessage(SolrSearchService.class, "SolrSearchService.deleteDataSource.exceptionMessage.noCurrentSolrCore")); } From f461194d920a53570184716cb7abd8b42814fa17 Mon Sep 17 00:00:00 2001 From: Mark McKinnon Date: Tue, 22 Oct 2019 14:17:06 -0400 Subject: [PATCH 15/16] Update or exception handling Update for exception handling --- .../KeywordSearchService.java | 12 ++++----- .../autopsy/keywordsearch/Bundle.properties | 1 + .../keywordsearch/Bundle.properties-MERGED | 3 ++- .../autopsy/keywordsearch/Server.java | 23 +++++++++-------- .../keywordsearch/SolrSearchService.java | 25 +++++++++---------- 5 files changed, 34 insertions(+), 30 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/keywordsearchservice/KeywordSearchService.java b/Core/src/org/sleuthkit/autopsy/keywordsearchservice/KeywordSearchService.java index 3ef8822e89..981abe05de 100644 --- a/Core/src/org/sleuthkit/autopsy/keywordsearchservice/KeywordSearchService.java +++ b/Core/src/org/sleuthkit/autopsy/keywordsearchservice/KeywordSearchService.java @@ -28,11 +28,11 @@ import org.sleuthkit.datamodel.TskCoreException; /** * An interface for implementations of a keyword search service. You can find * the implementations by using Lookup, such as: - * + * * Lookup.getDefault().lookup(KeywordSearchService.class) - * + * * although most clients should obtain a keyword search service by calling: - * + * * Case.getCurrentCase().getServices().getKeywordSearchService() * * TODO (AUT-2158): This interface should not extend Closeable. @@ -82,7 +82,7 @@ public interface KeywordSearchService extends Closeable { * @throws KeywordSearchServiceException if unable to delete. */ public void deleteTextIndex(CaseMetadata metadata) throws KeywordSearchServiceException; - + /** * Closes the keyword search service. * @@ -95,7 +95,7 @@ public interface KeywordSearchService extends Closeable { * No-op maintained for backwards compatibility. Clients should not * attempt to close case services. */ - } + } /** * Deletes the keyword search text for a specific data source. @@ -104,6 +104,6 @@ public interface KeywordSearchService extends Closeable { * * @throws KeywordSearchServiceException if unable to delete. */ - public void deleteDataSource(Long dataSourceId) throws KeywordSearchServiceException; + void deleteDataSource(Long dataSourceId) throws KeywordSearchServiceException; } diff --git a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/Bundle.properties b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/Bundle.properties index df3db1bf73..06e35314ce 100644 --- a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/Bundle.properties +++ b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/Bundle.properties @@ -298,6 +298,7 @@ GlobalEditListPanel.editWordButton.text=Edit Keyword SolrSearchService.ServiceName=Solr Keyword Search Service SolrSearchService.IndexReadOnlyDialog.title=Text Index Is Read-Only SolrSearchService.IndexReadOnlyDialog.msg=The text index for this case is read-only.
You will be able to see existing keyword search results and perform exact match and substring match keyword searches,
but you will not be able to add new text to the index or perform regex searches. You may instead open the case
with your previous version of this application. +SolrSearchService.DeleteDataSource.msg=Error Deleting Solr data for data source id {0} ExtractedContentPanel.jLabel1.text=Text Source: ExtractedContentPanel.pagePreviousButton.actionCommand=pagePreviousButton ExtractedContentPanel.pagePreviousButton.text= diff --git a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/Bundle.properties-MERGED b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/Bundle.properties-MERGED index c44ceb7556..d44a070c9b 100755 --- a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/Bundle.properties-MERGED +++ b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/Bundle.properties-MERGED @@ -36,7 +36,7 @@ KeywordSearchResultFactory.createNodeForKey.noResultsFound.text=No results found KeywordSearchResultFactory.query.exception.msg=Could not perform the query OpenIDE-Module-Display-Category=Ingest Module -OpenIDE-Module-Long-Description=Keyword Search ingest module.\n\nThe module indexes files found in the disk image at ingest time.\nIt then periodically runs the search on the indexed files using one or more keyword lists (containing pure words and/or regular expressions) and posts results.\n\n\The module also contains additional tools integrated in the main GUI, such as keyword list configuration, keyword search bar in the top-right corner, extracted text viewer and search results viewer showing highlighted keywords found. +OpenIDE-Module-Long-Description=Keyword Search ingest module.\n\nThe module indexes files found in the disk image at ingest time.\nIt then periodically runs the search on the indexed files using one or more keyword lists (containing pure words and/or regular expressions) and posts results.\n\nThe module also contains additional tools integrated in the main GUI, such as keyword list configuration, keyword search bar in the top-right corner, extracted text viewer and search results viewer showing highlighted keywords found. OpenIDE-Module-Name=KeywordSearch OptionsCategory_Name_KeywordSearchOptions=Keyword Search OptionsCategory_Keywords_KeywordSearchOptions=Keyword Search @@ -358,6 +358,7 @@ SolrSearchService.indexingError=Unable to index blackboard artifact. SolrSearchService.ServiceName=Solr Keyword Search Service SolrSearchService.IndexReadOnlyDialog.title=Text Index Is Read-Only SolrSearchService.IndexReadOnlyDialog.msg=The text index for this case is read-only.
You will be able to see existing keyword search results and perform exact match and substring match keyword searches,
but you will not be able to add new text to the index or perform regex searches. You may instead open the case
with your previous version of this application. +SolrSearchService.DeleteDataSource.msg=Error Deleting Solr data for data source id {0} ExtractedContentPanel.jLabel1.text=Text Source: ExtractedContentPanel.pagePreviousButton.actionCommand=pagePreviousButton ExtractedContentPanel.pagePreviousButton.text= diff --git a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/Server.java b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/Server.java index dae1bae943..b73d274c84 100644 --- a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/Server.java +++ b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/Server.java @@ -1277,14 +1277,17 @@ public class Server { * * @throws NoOpenCoreException */ - public void deleteDataSource(Long dataSourceId) throws SolrServerException, KeywordSearchModuleException, NoOpenCoreException, IOException { - currentCoreLock.writeLock().lock(); - if (null == currentCore) { - throw new NoOpenCoreException(); + void deleteDataSource(Long dataSourceId) throws IOException, KeywordSearchModuleException, NoOpenCoreException, SolrServerException { + try { + currentCoreLock.writeLock().lock(); + if (null == currentCore) { + throw new NoOpenCoreException(); + } + currentCore.deleteDataSource(dataSourceId); + currentCore.commit(); + } finally { + currentCoreLock.writeLock().unlock(); } - currentCore.deleteDataSource(dataSourceId); - currentCore.commit(); - currentCoreLock.writeLock().unlock(); } /** @@ -1528,11 +1531,11 @@ public class Server { } } - private void deleteDataSource(Long dsObjId) throws SolrServerException, IOException { + private void deleteDataSource(Long dsObjId) throws IOException, SolrServerException { String dataSourceId = Long.toString(dsObjId); String deleteQuery = "image_id:" + dataSourceId; - // Get the first result. - UpdateResponse updateResponse = solrCore.deleteByQuery(deleteQuery); + + solrCore.deleteByQuery(deleteQuery); } void addDocument(SolrInputDocument doc) throws KeywordSearchModuleException { diff --git a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/SolrSearchService.java b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/SolrSearchService.java index 13eb75907f..e2906f89d6 100644 --- a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/SolrSearchService.java +++ b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/SolrSearchService.java @@ -60,8 +60,7 @@ import org.sleuthkit.datamodel.TskCoreException; * text indexing and search. */ @ServiceProviders(value = { - @ServiceProvider(service = KeywordSearchService.class) - , + @ServiceProvider(service = KeywordSearchService.class), @ServiceProvider(service = AutopsyService.class) }) public class SolrSearchService implements KeywordSearchService, AutopsyService { @@ -196,24 +195,24 @@ public class SolrSearchService implements KeywordSearchService, AutopsyService { /** * Deletes a data source from Solr for a case. - * + * * @param dataSourceId the id of the data source to delete. - * - * @throws org.sleuthkit.autopsy.keywordsearchservice.KeywordSearchServiceException + * + * @throws + * org.sleuthkit.autopsy.keywordsearchservice.KeywordSearchServiceException */ - @NbBundle.Messages({ - "SolrSearchService.deleteDataSource.exceptionMessage.noCurrentSolrCore=DeleteDataSource did not contain a current Solr core so could not delete the Data Source", - }) @Override public void deleteDataSource(Long dataSourceId) throws KeywordSearchServiceException { + try { - KeywordSearch.getServer().deleteDataSource(dataSourceId); - } catch (NoOpenCoreException | KeywordSearchModuleException | SolrServerException | IOException ex) { - throw new KeywordSearchServiceException(NbBundle.getMessage(SolrSearchService.class, - "SolrSearchService.deleteDataSource.exceptionMessage.noCurrentSolrCore")); + Server ddsServer = KeywordSearch.getServer(); + ddsServer.deleteDataSource(dataSourceId); + } catch (IOException | KeywordSearchModuleException | NoOpenCoreException | SolrServerException ex) { + logger.log(Level.WARNING, NbBundle.getMessage(SolrSearchService.class, "SolrSearchService.DeleteDataSource.msg", dataSourceId), ex); + throw new KeywordSearchServiceException(NbBundle.getMessage(SolrSearchService.class, "SolrSearchService.DeleteDataSource.msg", dataSourceId), ex); } } - + /** * Deletes Solr core for a case. * From c0e3a2d33e9b9ea8b6c3f7ec552421dc4533af1b Mon Sep 17 00:00:00 2001 From: Mark McKinnon Date: Tue, 22 Oct 2019 14:22:17 -0400 Subject: [PATCH 16/16] Update Server.java Removed unused import. --- .../src/org/sleuthkit/autopsy/keywordsearch/Server.java | 1 - 1 file changed, 1 deletion(-) diff --git a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/Server.java b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/Server.java index b73d274c84..a4a3b581b3 100644 --- a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/Server.java +++ b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/Server.java @@ -55,7 +55,6 @@ import org.apache.solr.client.solrj.request.CoreAdminRequest; import org.apache.solr.client.solrj.response.CoreAdminResponse; import org.apache.solr.client.solrj.response.QueryResponse; import org.apache.solr.client.solrj.response.TermsResponse; -import org.apache.solr.client.solrj.response.UpdateResponse; import org.apache.solr.common.SolrDocument; import org.apache.solr.common.SolrDocumentList; import org.apache.solr.common.SolrException;