mirror of
https://github.com/overcuriousity/autopsy-flatpak.git
synced 2025-07-17 10:17:41 +00:00
Merge branch 'develop' of github.com:sleuthkit/autopsy into 7323-dssExcelExport
This commit is contained in:
commit
3572351a18
@ -39,6 +39,7 @@ import java.sql.SQLException;
|
||||
import java.sql.Statement;
|
||||
import java.text.SimpleDateFormat;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.Date;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
@ -81,8 +82,14 @@ import org.sleuthkit.autopsy.casemodule.events.ContentTagDeletedEvent;
|
||||
import org.sleuthkit.autopsy.casemodule.events.DataSourceAddedEvent;
|
||||
import org.sleuthkit.autopsy.casemodule.events.DataSourceDeletedEvent;
|
||||
import org.sleuthkit.autopsy.casemodule.events.DataSourceNameChangedEvent;
|
||||
import org.sleuthkit.autopsy.casemodule.events.HostsAddedEvent;
|
||||
import org.sleuthkit.autopsy.casemodule.events.HostsChangedEvent;
|
||||
import org.sleuthkit.autopsy.casemodule.events.HostsRemovedEvent;
|
||||
import org.sleuthkit.autopsy.casemodule.events.OsAccountAddedEvent;
|
||||
import org.sleuthkit.autopsy.casemodule.events.OsAccountChangedEvent;
|
||||
import org.sleuthkit.autopsy.casemodule.events.PersonsAddedEvent;
|
||||
import org.sleuthkit.autopsy.casemodule.events.PersonsChangedEvent;
|
||||
import org.sleuthkit.autopsy.casemodule.events.PersonsRemovedEvent;
|
||||
import org.sleuthkit.autopsy.casemodule.events.ReportAddedEvent;
|
||||
import org.sleuthkit.autopsy.casemodule.multiusercases.CaseNodeData.CaseNodeDataException;
|
||||
import org.sleuthkit.autopsy.casemodule.multiusercases.CoordinationServiceUtils;
|
||||
@ -130,11 +137,19 @@ import org.sleuthkit.datamodel.Content;
|
||||
import org.sleuthkit.datamodel.ContentTag;
|
||||
import org.sleuthkit.datamodel.DataSource;
|
||||
import org.sleuthkit.datamodel.FileSystem;
|
||||
import org.sleuthkit.datamodel.Host;
|
||||
import org.sleuthkit.datamodel.HostManager.HostsCreationEvent;
|
||||
import org.sleuthkit.datamodel.HostManager.HostsUpdateEvent;
|
||||
import org.sleuthkit.datamodel.HostManager.HostsDeletionEvent;
|
||||
import org.sleuthkit.datamodel.Image;
|
||||
import org.sleuthkit.datamodel.OsAccount;
|
||||
import org.sleuthkit.datamodel.OsAccountManager;
|
||||
import org.sleuthkit.datamodel.OsAccountManager.OsAccountsCreationEvent;
|
||||
import org.sleuthkit.datamodel.OsAccountManager.OsAccountsUpdateEvent;
|
||||
import org.sleuthkit.datamodel.Person;
|
||||
import org.sleuthkit.datamodel.PersonManager.PersonsCreationEvent;
|
||||
import org.sleuthkit.datamodel.PersonManager.PersonsUpdateEvent;
|
||||
import org.sleuthkit.datamodel.PersonManager.PersonsDeletionEvent;
|
||||
import org.sleuthkit.datamodel.Report;
|
||||
import org.sleuthkit.datamodel.SleuthkitCase;
|
||||
import org.sleuthkit.datamodel.TimelineManager;
|
||||
@ -426,8 +441,38 @@ public class Case {
|
||||
* OSAccount associated with the current case has changed.
|
||||
* Call getOsAccount to get the changed account;
|
||||
*/
|
||||
OS_ACCOUNT_CHANGED;
|
||||
OS_ACCOUNT_CHANGED,
|
||||
|
||||
/**
|
||||
* Hosts associated with the current case added.
|
||||
*/
|
||||
HOSTS_ADDED,
|
||||
|
||||
/**
|
||||
* Hosts associated with the current case has changed.
|
||||
*/
|
||||
HOSTS_CHANGED,
|
||||
|
||||
/**
|
||||
* Hosts associated with the current case has been deleted.
|
||||
*/
|
||||
HOSTS_DELETED,
|
||||
|
||||
/**
|
||||
* Persons associated with the current case added.
|
||||
*/
|
||||
PERSONS_ADDED,
|
||||
|
||||
/**
|
||||
* Persons associated with the current case has changed.
|
||||
*/
|
||||
PERSONS_CHANGED,
|
||||
|
||||
/**
|
||||
* Persons associated with the current case has been deleted.
|
||||
*/
|
||||
PERSONS_DELETED
|
||||
;
|
||||
};
|
||||
|
||||
/**
|
||||
@ -474,6 +519,78 @@ public class Case {
|
||||
eventPublisher.publish(new OsAccountChangedEvent(account));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Publishes an autopsy event from the sleuthkit HostCreationEvent
|
||||
* indicating that hosts have been created.
|
||||
*
|
||||
* @param event The sleuthkit event for the creation of hosts.
|
||||
*/
|
||||
@Subscribe
|
||||
public void publishHostsAddedEvent(HostsCreationEvent event) {
|
||||
eventPublisher.publish(new HostsAddedEvent(
|
||||
event == null ? Collections.emptyList() : event.getHosts()));
|
||||
}
|
||||
|
||||
/**
|
||||
* Publishes an autopsy event from the sleuthkit HostUpdateEvent
|
||||
* indicating that hosts have been updated.
|
||||
*
|
||||
* @param event The sleuthkit event for the updating of hosts.
|
||||
*/
|
||||
@Subscribe
|
||||
public void publishHostsChangedEvent(HostsUpdateEvent event) {
|
||||
eventPublisher.publish(new HostsChangedEvent(
|
||||
event == null ? Collections.emptyList() : event.getHosts()));
|
||||
}
|
||||
|
||||
/**
|
||||
* Publishes an autopsy event from the sleuthkit HostDeletedEvent
|
||||
* indicating that hosts have been deleted.
|
||||
*
|
||||
* @param event The sleuthkit event for the deleting of hosts.
|
||||
*/
|
||||
@Subscribe
|
||||
public void publishHostsDeletedEvent(HostsDeletionEvent event) {
|
||||
eventPublisher.publish(new HostsRemovedEvent(
|
||||
event == null ? Collections.emptyList() : event.getHosts()));
|
||||
}
|
||||
|
||||
/**
|
||||
* Publishes an autopsy event from the sleuthkit PersonCreationEvent
|
||||
* indicating that persons have been created.
|
||||
*
|
||||
* @param event The sleuthkit event for the creation of persons.
|
||||
*/
|
||||
@Subscribe
|
||||
public void publishPersonsAddedEvent(PersonsCreationEvent event) {
|
||||
eventPublisher.publish(new PersonsAddedEvent(
|
||||
event == null ? Collections.emptyList() : event.getPersons()));
|
||||
}
|
||||
|
||||
/**
|
||||
* Publishes an autopsy event from the sleuthkit PersonUpdateEvent
|
||||
* indicating that persons have been updated.
|
||||
*
|
||||
* @param event The sleuthkit event for the updating of persons.
|
||||
*/
|
||||
@Subscribe
|
||||
public void publishPersonsChangedEvent(PersonsUpdateEvent event) {
|
||||
eventPublisher.publish(new PersonsChangedEvent(
|
||||
event == null ? Collections.emptyList() : event.getPersons()));
|
||||
}
|
||||
|
||||
/**
|
||||
* Publishes an autopsy event from the sleuthkit PersonDeletedEvent
|
||||
* indicating that persons have been deleted.
|
||||
*
|
||||
* @param event The sleuthkit event for the deleting of persons.
|
||||
*/
|
||||
@Subscribe
|
||||
public void publishPersonsDeletedEvent(PersonsDeletionEvent event) {
|
||||
eventPublisher.publish(new PersonsRemovedEvent(
|
||||
event == null ? Collections.emptyList() : event.getPersons()));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@ -1680,7 +1797,55 @@ public class Case {
|
||||
public void notifyOsAccountChanged(OsAccount account) {
|
||||
eventPublisher.publish(new OsAccountChangedEvent(account));
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Notify via an autopsy event that a host has been added.
|
||||
* @param host The host that has been added.
|
||||
*/
|
||||
public void notifyHostAdded(Host host) {
|
||||
eventPublisher.publish(new HostsAddedEvent(Collections.singletonList(host)));
|
||||
}
|
||||
|
||||
/**
|
||||
* Notify via an autopsy event that a host has been changed.
|
||||
* @param newValue The host that has been updated.
|
||||
*/
|
||||
public void notifyHostChanged(Host newValue) {
|
||||
eventPublisher.publish(new HostsChangedEvent(Collections.singletonList(newValue)));
|
||||
}
|
||||
|
||||
/**
|
||||
* Notify via an autopsy event that a host has been deleted.
|
||||
* @param host The host that has been deleted.
|
||||
*/
|
||||
public void notifyHostDeleted(Host host) {
|
||||
eventPublisher.publish(new HostsRemovedEvent(Collections.singletonList(host)));
|
||||
}
|
||||
|
||||
/**
|
||||
* Notify via an autopsy event that a person has been added.
|
||||
* @param person The person that has been added.
|
||||
*/
|
||||
public void notifyPersonAdded(Person person) {
|
||||
eventPublisher.publish(new PersonsAddedEvent(Collections.singletonList(person)));
|
||||
}
|
||||
|
||||
/**
|
||||
* Notify via an autopsy event that a person has been changed.
|
||||
* @param newValue The person that has been updated.
|
||||
*/
|
||||
public void notifyPersonChanged(Person newValue) {
|
||||
eventPublisher.publish(new PersonsChangedEvent(Collections.singletonList(newValue)));
|
||||
}
|
||||
|
||||
/**
|
||||
* Notify via an autopsy event that a person has been deleted.
|
||||
* @param person The person that has been deleted.
|
||||
*/
|
||||
public void notifyPersonDeleted(Person person) {
|
||||
eventPublisher.publish(new PersonsRemovedEvent(Collections.singletonList(person)));
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds a report to the case.
|
||||
*
|
||||
|
@ -0,0 +1,39 @@
|
||||
/*
|
||||
* Autopsy Forensic Browser
|
||||
*
|
||||
* Copyright 2021 Basis Technology Corp.
|
||||
* Contact: carrier <at> sleuthkit <dot> org
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.sleuthkit.autopsy.casemodule.events;
|
||||
|
||||
import java.util.List;
|
||||
import org.sleuthkit.autopsy.casemodule.Case;
|
||||
import org.sleuthkit.datamodel.Host;
|
||||
|
||||
/**
|
||||
* Event fired when new hosts are added.
|
||||
*/
|
||||
public class HostsAddedEvent extends HostsEvent {
|
||||
|
||||
private static final long serialVersionUID = 1L;
|
||||
|
||||
/**
|
||||
* Main constructor.
|
||||
* @param dataModelObjects The hosts that have been added.
|
||||
*/
|
||||
public HostsAddedEvent(List<Host> dataModelObjects) {
|
||||
super(Case.Events.HOSTS_ADDED.name(), dataModelObjects);
|
||||
}
|
||||
}
|
@ -0,0 +1,41 @@
|
||||
/*
|
||||
* Autopsy Forensic Browser
|
||||
*
|
||||
* Copyright 2021 Basis Technology Corp.
|
||||
* Contact: carrier <at> sleuthkit <dot> org
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.sleuthkit.autopsy.casemodule.events;
|
||||
|
||||
import java.util.List;
|
||||
import org.sleuthkit.autopsy.casemodule.Case;
|
||||
import org.sleuthkit.datamodel.Host;
|
||||
|
||||
/**
|
||||
* Event fired when hosts are changed.
|
||||
*/
|
||||
public class HostsChangedEvent extends HostsEvent {
|
||||
|
||||
private static final long serialVersionUID = 1L;
|
||||
|
||||
/**
|
||||
* Main constructor.
|
||||
*
|
||||
* @param dataModelObjects The new values for the hosts that have been
|
||||
* changed.
|
||||
*/
|
||||
public HostsChangedEvent(List<Host> dataModelObjects) {
|
||||
super(Case.Events.HOSTS_CHANGED.name(), dataModelObjects);
|
||||
}
|
||||
}
|
@ -0,0 +1,87 @@
|
||||
/*
|
||||
* Autopsy Forensic Browser
|
||||
*
|
||||
* Copyright 2021 Basis Technology Corp.
|
||||
* Contact: carrier <at> sleuthkit <dot> org
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.sleuthkit.autopsy.casemodule.events;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
import java.util.stream.Collectors;
|
||||
import org.sleuthkit.datamodel.Host;
|
||||
import org.sleuthkit.datamodel.HostManager;
|
||||
import org.sleuthkit.datamodel.SleuthkitCase;
|
||||
import org.sleuthkit.datamodel.TskCoreException;
|
||||
|
||||
/**
|
||||
* Base event class for when something pertaining to hosts changes.
|
||||
*/
|
||||
public class HostsEvent extends TskDataModelChangeEvent<Host> {
|
||||
|
||||
/**
|
||||
* Retrieves a list of ids from a list of hosts.
|
||||
*
|
||||
* @param hosts The hosts.
|
||||
* @return The list of ids.
|
||||
*/
|
||||
private static List<Long> getIds(List<Host> hosts) {
|
||||
return getSafeList(hosts).stream()
|
||||
.filter(h -> h != null)
|
||||
.map(h -> h.getId()).collect(Collectors.toList());
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the hosts or an empty list.
|
||||
*
|
||||
* @param hosts The host list.
|
||||
* @return The host list or an empty list if the parameter is null.
|
||||
*/
|
||||
private static List<Host> getSafeList(List<Host> hosts) {
|
||||
return hosts == null ? Collections.emptyList() : hosts;
|
||||
}
|
||||
|
||||
/**
|
||||
* Main constructor.
|
||||
*
|
||||
* @param eventName The name of the Case.Events enum value for the event
|
||||
* type.
|
||||
* @param dataModelObjects The list of hosts for the event.
|
||||
*/
|
||||
protected HostsEvent(String eventName, List<Host> dataModelObjects) {
|
||||
super(eventName, getIds(dataModelObjects), new ArrayList<>(getSafeList(dataModelObjects)));
|
||||
}
|
||||
|
||||
@Override
|
||||
protected List<Host> getDataModelObjects(SleuthkitCase caseDb, List<Long> ids) throws TskCoreException {
|
||||
HostManager hostManager = caseDb.getHostManager();
|
||||
List<Host> toRet = new ArrayList<>();
|
||||
if (ids != null) {
|
||||
for (Long id : ids) {
|
||||
if (id == null) {
|
||||
continue;
|
||||
}
|
||||
|
||||
Optional<Host> thisHostOpt = hostManager.getHost(id);
|
||||
thisHostOpt.ifPresent((h) -> toRet.add(h));
|
||||
}
|
||||
}
|
||||
|
||||
return toRet;
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,39 @@
|
||||
/*
|
||||
* Autopsy Forensic Browser
|
||||
*
|
||||
* Copyright 2021 Basis Technology Corp.
|
||||
* Contact: carrier <at> sleuthkit <dot> org
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.sleuthkit.autopsy.casemodule.events;
|
||||
|
||||
import java.util.List;
|
||||
import org.sleuthkit.autopsy.casemodule.Case;
|
||||
import org.sleuthkit.datamodel.Host;
|
||||
|
||||
/**
|
||||
* Event fired when hosts are removed.
|
||||
*/
|
||||
public class HostsRemovedEvent extends HostsEvent {
|
||||
|
||||
private static final long serialVersionUID = 1L;
|
||||
|
||||
/**
|
||||
* Main constructor.
|
||||
* @param dataModelObjects The list of hosts that have been deleted.
|
||||
*/
|
||||
public HostsRemovedEvent(List<Host> dataModelObjects) {
|
||||
super(Case.Events.HOSTS_DELETED.name(), dataModelObjects);
|
||||
}
|
||||
}
|
@ -0,0 +1,39 @@
|
||||
/*
|
||||
* Autopsy Forensic Browser
|
||||
*
|
||||
* Copyright 2021 Basis Technology Corp.
|
||||
* Contact: carrier <at> sleuthkit <dot> org
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.sleuthkit.autopsy.casemodule.events;
|
||||
|
||||
import java.util.List;
|
||||
import org.sleuthkit.autopsy.casemodule.Case;
|
||||
import org.sleuthkit.datamodel.Person;
|
||||
|
||||
/**
|
||||
* Event fired when new persons are added.
|
||||
*/
|
||||
public class PersonsAddedEvent extends PersonsEvent {
|
||||
|
||||
private static final long serialVersionUID = 1L;
|
||||
|
||||
/**
|
||||
* Main constructor.
|
||||
* @param dataModelObjects The persons that have been added.
|
||||
*/
|
||||
public PersonsAddedEvent(List<Person> dataModelObjects) {
|
||||
super(Case.Events.PERSONS_ADDED.name(), dataModelObjects);
|
||||
}
|
||||
}
|
@ -0,0 +1,41 @@
|
||||
/*
|
||||
* Autopsy Forensic Browser
|
||||
*
|
||||
* Copyright 2021 Basis Technology Corp.
|
||||
* Contact: carrier <at> sleuthkit <dot> org
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.sleuthkit.autopsy.casemodule.events;
|
||||
|
||||
import java.util.List;
|
||||
import org.sleuthkit.autopsy.casemodule.Case;
|
||||
import org.sleuthkit.datamodel.Person;
|
||||
|
||||
/**
|
||||
* Event fired when persons are changed.
|
||||
*/
|
||||
public class PersonsChangedEvent extends PersonsEvent {
|
||||
|
||||
private static final long serialVersionUID = 1L;
|
||||
|
||||
/**
|
||||
* Main constructor.
|
||||
*
|
||||
* @param dataModelObjects The new values for the persons that have been
|
||||
* changed.
|
||||
*/
|
||||
public PersonsChangedEvent(List<Person> dataModelObjects) {
|
||||
super(Case.Events.PERSONS_CHANGED.name(), dataModelObjects);
|
||||
}
|
||||
}
|
@ -0,0 +1,87 @@
|
||||
/*
|
||||
* Autopsy Forensic Browser
|
||||
*
|
||||
* Copyright 2021 Basis Technology Corp.
|
||||
* Contact: carrier <at> sleuthkit <dot> org
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.sleuthkit.autopsy.casemodule.events;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
import java.util.stream.Collectors;
|
||||
import org.sleuthkit.datamodel.Person;
|
||||
import org.sleuthkit.datamodel.PersonManager;
|
||||
import org.sleuthkit.datamodel.SleuthkitCase;
|
||||
import org.sleuthkit.datamodel.TskCoreException;
|
||||
|
||||
/**
|
||||
* Base event class for when something pertaining to persons changes.
|
||||
*/
|
||||
public class PersonsEvent extends TskDataModelChangeEvent<Person> {
|
||||
|
||||
/**
|
||||
* Retrieves a list of ids from a list of persons.
|
||||
*
|
||||
* @param persons The persons.
|
||||
* @return The list of ids.
|
||||
*/
|
||||
private static List<Long> getIds(List<Person> persons) {
|
||||
return getSafeList(persons).stream()
|
||||
.filter(h -> h != null)
|
||||
.map(h -> h.getId()).collect(Collectors.toList());
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the persons or an empty list.
|
||||
*
|
||||
* @param persons The person list.
|
||||
* @return The person list or an empty list if the parameter is null.
|
||||
*/
|
||||
private static List<Person> getSafeList(List<Person> persons) {
|
||||
return persons == null ? Collections.emptyList() : persons;
|
||||
}
|
||||
|
||||
/**
|
||||
* Main constructor.
|
||||
*
|
||||
* @param eventName The name of the Case.Events enum value for the event
|
||||
* type.
|
||||
* @param dataModelObjects The list of persons for the event.
|
||||
*/
|
||||
protected PersonsEvent(String eventName, List<Person> dataModelObjects) {
|
||||
super(eventName, getIds(dataModelObjects), new ArrayList<>(getSafeList(dataModelObjects)));
|
||||
}
|
||||
|
||||
@Override
|
||||
protected List<Person> getDataModelObjects(SleuthkitCase caseDb, List<Long> ids) throws TskCoreException {
|
||||
PersonManager personManager = caseDb.getPersonManager();
|
||||
List<Person> toRet = new ArrayList<>();
|
||||
if (ids != null) {
|
||||
for (Long id : ids) {
|
||||
if (id == null) {
|
||||
continue;
|
||||
}
|
||||
|
||||
Optional<Person> thisPersonOpt = personManager.getPerson(id);
|
||||
thisPersonOpt.ifPresent((h) -> toRet.add(h));
|
||||
}
|
||||
}
|
||||
|
||||
return toRet;
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,39 @@
|
||||
/*
|
||||
* Autopsy Forensic Browser
|
||||
*
|
||||
* Copyright 2021 Basis Technology Corp.
|
||||
* Contact: carrier <at> sleuthkit <dot> org
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.sleuthkit.autopsy.casemodule.events;
|
||||
|
||||
import java.util.List;
|
||||
import org.sleuthkit.autopsy.casemodule.Case;
|
||||
import org.sleuthkit.datamodel.Person;
|
||||
|
||||
/**
|
||||
* Event fired when persons are removed.
|
||||
*/
|
||||
public class PersonsRemovedEvent extends PersonsEvent {
|
||||
|
||||
private static final long serialVersionUID = 1L;
|
||||
|
||||
/**
|
||||
* Main constructor.
|
||||
* @param dataModelObjects The list of persons that have been deleted.
|
||||
*/
|
||||
public PersonsRemovedEvent(List<Person> dataModelObjects) {
|
||||
super(Case.Events.PERSONS_DELETED.name(), dataModelObjects);
|
||||
}
|
||||
}
|
@ -23,11 +23,11 @@ import java.beans.PropertyChangeListener;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.EnumSet;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
import java.util.Set;
|
||||
import java.util.logging.Level;
|
||||
import java.util.stream.Collectors;
|
||||
import org.apache.commons.collections.CollectionUtils;
|
||||
import org.openide.nodes.ChildFactory;
|
||||
import org.openide.nodes.Node;
|
||||
@ -47,6 +47,19 @@ import org.sleuthkit.datamodel.TskCoreException;
|
||||
*/
|
||||
public final class AutopsyTreeChildFactory extends ChildFactory.Detachable<Object> {
|
||||
|
||||
private static final Set<Case.Events> LISTENING_EVENTS = EnumSet.of(
|
||||
Case.Events.DATA_SOURCE_ADDED,
|
||||
Case.Events.HOSTS_ADDED,
|
||||
Case.Events.HOSTS_DELETED,
|
||||
Case.Events.PERSONS_ADDED,
|
||||
Case.Events.PERSONS_DELETED,
|
||||
Case.Events.PERSONS_CHANGED
|
||||
);
|
||||
|
||||
private static final Set<String> LISTENING_EVENT_NAMES = LISTENING_EVENTS.stream()
|
||||
.map(evt -> evt.name())
|
||||
.collect(Collectors.toSet());
|
||||
|
||||
private static final Logger logger = Logger.getLogger(AutopsyTreeChildFactory.class.getName());
|
||||
|
||||
/**
|
||||
@ -56,7 +69,7 @@ public final class AutopsyTreeChildFactory extends ChildFactory.Detachable<Objec
|
||||
@Override
|
||||
public void propertyChange(PropertyChangeEvent evt) {
|
||||
String eventType = evt.getPropertyName();
|
||||
if (eventType.equals(Case.Events.DATA_SOURCE_ADDED.toString())
|
||||
if (LISTENING_EVENT_NAMES.contains(eventType)
|
||||
&& Objects.equals(CasePreferences.getGroupItemsInTreeByDataSource(), true)) {
|
||||
refreshChildren();
|
||||
}
|
||||
@ -66,13 +79,13 @@ public final class AutopsyTreeChildFactory extends ChildFactory.Detachable<Objec
|
||||
@Override
|
||||
protected void addNotify() {
|
||||
super.addNotify();
|
||||
Case.addEventTypeSubscriber(EnumSet.of(Case.Events.DATA_SOURCE_ADDED), pcl);
|
||||
Case.addEventTypeSubscriber(LISTENING_EVENTS, pcl);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void removeNotify() {
|
||||
super.removeNotify();
|
||||
Case.removeEventTypeSubscriber(EnumSet.of(Case.Events.DATA_SOURCE_ADDED), pcl);
|
||||
Case.removeEventTypeSubscriber(LISTENING_EVENTS, pcl);
|
||||
}
|
||||
|
||||
/**
|
||||
@ -94,11 +107,11 @@ public final class AutopsyTreeChildFactory extends ChildFactory.Detachable<Objec
|
||||
.map(PersonGrouping::new)
|
||||
.sorted()
|
||||
.forEach(list::add);
|
||||
|
||||
|
||||
if (CollectionUtils.isNotEmpty(personManager.getHostsForPerson(null))) {
|
||||
list.add(new PersonGrouping(null));
|
||||
}
|
||||
|
||||
|
||||
return true;
|
||||
} else {
|
||||
// otherwise, just show host level
|
||||
|
@ -50,7 +50,7 @@ public class ContentNodeSelectionInfo implements NodeSelectionInfo {
|
||||
@Override
|
||||
public boolean matches(Node candidateNode) {
|
||||
Content content = candidateNode.getLookup().lookup(Content.class);
|
||||
return content.getId() == contentId;
|
||||
return (content != null && content.getId() == contentId);
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -33,7 +33,6 @@ import org.openide.util.lookup.Lookups;
|
||||
import org.sleuthkit.autopsy.casemodule.Case;
|
||||
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
|
||||
import org.sleuthkit.autopsy.coreutils.Logger;
|
||||
import org.sleuthkit.datamodel.Host.HostStatus;
|
||||
import org.sleuthkit.datamodel.TskCoreException;
|
||||
|
||||
/**
|
||||
@ -95,6 +94,13 @@ public class DataSourcesByTypeNode extends DisplayableItemNode {
|
||||
|
||||
private static final String NAME = Bundle.DataSourcesHostsNode_name();
|
||||
|
||||
/**
|
||||
* @return The name used to identify the node of this type with a lookup.
|
||||
*/
|
||||
public static String getNameIdentifier() {
|
||||
return NAME;
|
||||
}
|
||||
|
||||
/**
|
||||
* Main constructor.
|
||||
*/
|
||||
|
@ -43,7 +43,15 @@ import org.sleuthkit.datamodel.TskDataException;
|
||||
*/
|
||||
public class DataSourcesNode extends DisplayableItemNode {
|
||||
|
||||
public static final String NAME = NbBundle.getMessage(DataSourcesNode.class, "DataSourcesNode.name");
|
||||
private static final String NAME = NbBundle.getMessage(DataSourcesNode.class, "DataSourcesNode.name");
|
||||
|
||||
/**
|
||||
* @return The name used to identify the node of this type with a lookup.
|
||||
*/
|
||||
public static String getNameIdentifier() {
|
||||
return NAME;
|
||||
}
|
||||
|
||||
private final String displayName;
|
||||
|
||||
// NOTE: The images passed in via argument will be ignored.
|
||||
|
@ -33,9 +33,11 @@ import org.openide.nodes.Node;
|
||||
import org.openide.nodes.Sheet;
|
||||
import org.openide.util.NbBundle;
|
||||
import org.openide.util.NbBundle.Messages;
|
||||
import org.openide.util.WeakListeners;
|
||||
import org.openide.util.lookup.Lookups;
|
||||
import org.sleuthkit.autopsy.casemodule.Case;
|
||||
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
|
||||
import org.sleuthkit.autopsy.casemodule.events.HostsChangedEvent;
|
||||
import org.sleuthkit.autopsy.coreutils.Logger;
|
||||
import org.sleuthkit.autopsy.datamodel.hosts.AssociatePersonsMenuAction;
|
||||
import org.sleuthkit.autopsy.datamodel.hosts.RemoveParentPersonAction;
|
||||
@ -76,7 +78,7 @@ public class HostNode extends DisplayableItemNode {
|
||||
/**
|
||||
* Listener for handling DATA_SOURCE_ADDED events.
|
||||
*/
|
||||
private final PropertyChangeListener pcl = new PropertyChangeListener() {
|
||||
private final PropertyChangeListener dataSourceAddedPcl = new PropertyChangeListener() {
|
||||
@Override
|
||||
public void propertyChange(PropertyChangeEvent evt) {
|
||||
String eventType = evt.getPropertyName();
|
||||
@ -88,12 +90,12 @@ public class HostNode extends DisplayableItemNode {
|
||||
|
||||
@Override
|
||||
protected void addNotify() {
|
||||
Case.addEventTypeSubscriber(EnumSet.of(Case.Events.DATA_SOURCE_ADDED), pcl);
|
||||
Case.addEventTypeSubscriber(EnumSet.of(Case.Events.DATA_SOURCE_ADDED), dataSourceAddedPcl);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void removeNotify() {
|
||||
Case.removeEventTypeSubscriber(EnumSet.of(Case.Events.DATA_SOURCE_ADDED), pcl);
|
||||
Case.removeEventTypeSubscriber(EnumSet.of(Case.Events.DATA_SOURCE_ADDED), dataSourceAddedPcl);
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -163,7 +165,39 @@ public class HostNode extends DisplayableItemNode {
|
||||
return new DataSourceGroupingNode(key.getDataSource());
|
||||
};
|
||||
|
||||
/**
|
||||
* Listener for handling host change events.
|
||||
*/
|
||||
private final PropertyChangeListener hostChangePcl = new PropertyChangeListener() {
|
||||
@Override
|
||||
public void propertyChange(PropertyChangeEvent evt) {
|
||||
String eventType = evt.getPropertyName();
|
||||
if (hostId != null && eventType.equals(Case.Events.HOSTS_CHANGED.toString()) && evt instanceof HostsChangedEvent) {
|
||||
((HostsChangedEvent) evt).getNewValue().stream()
|
||||
.filter(h -> h != null && h.getId() == hostId)
|
||||
.findFirst()
|
||||
.ifPresent((newHost) -> {
|
||||
setName(newHost.getName());
|
||||
setDisplayName(newHost.getName());
|
||||
});
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
/*
|
||||
* Get the host name or 'unknown host' if null.
|
||||
*
|
||||
* @param host The host.
|
||||
* @return The display name.
|
||||
*/
|
||||
private static String getHostName(Host host) {
|
||||
return (host == null || host.getName() == null)
|
||||
? Bundle.HostGroupingNode_unknownHostNode_title()
|
||||
: host.getName();
|
||||
}
|
||||
|
||||
private final Host host;
|
||||
private final Long hostId;
|
||||
|
||||
/**
|
||||
* Main constructor for HostDataSources key where data source children
|
||||
@ -192,14 +226,25 @@ public class HostNode extends DisplayableItemNode {
|
||||
* @param host The host.
|
||||
*/
|
||||
private HostNode(Children children, Host host) {
|
||||
super(children, host == null ? null : Lookups.singleton(host));
|
||||
this(children, host, getHostName(host));
|
||||
}
|
||||
|
||||
String safeName = (host == null || host.getName() == null)
|
||||
? Bundle.HostGroupingNode_unknownHostNode_title()
|
||||
: host.getName();
|
||||
|
||||
super.setName(safeName);
|
||||
super.setDisplayName(safeName);
|
||||
/**
|
||||
* Constructor.
|
||||
*
|
||||
* @param children The children for this host node.
|
||||
* @param host The host.
|
||||
* @param displayName The displayName.
|
||||
*/
|
||||
private HostNode(Children children, Host host, String displayName) {
|
||||
super(children,
|
||||
host == null ? Lookups.fixed(displayName) : Lookups.fixed(host, displayName));
|
||||
|
||||
hostId = host == null ? null : host.getId();
|
||||
Case.addEventTypeSubscriber(EnumSet.of(Case.Events.HOSTS_CHANGED),
|
||||
WeakListeners.propertyChange(hostChangePcl, this));
|
||||
super.setName(displayName);
|
||||
super.setDisplayName(displayName);
|
||||
this.setIconBaseWithExtension(ICON_PATH);
|
||||
this.host = host;
|
||||
}
|
||||
|
@ -23,16 +23,20 @@ import java.beans.PropertyChangeListener;
|
||||
import java.util.Collections;
|
||||
import java.util.EnumSet;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.logging.Level;
|
||||
import java.util.stream.Collectors;
|
||||
import javax.swing.Action;
|
||||
import org.openide.nodes.ChildFactory;
|
||||
import org.openide.nodes.Children;
|
||||
import org.openide.nodes.Sheet;
|
||||
import org.openide.util.NbBundle;
|
||||
import org.openide.util.NbBundle.Messages;
|
||||
import org.openide.util.WeakListeners;
|
||||
import org.openide.util.lookup.Lookups;
|
||||
import org.sleuthkit.autopsy.casemodule.Case;
|
||||
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
|
||||
import org.sleuthkit.autopsy.casemodule.events.PersonsChangedEvent;
|
||||
import org.sleuthkit.autopsy.coreutils.Logger;
|
||||
import org.sleuthkit.autopsy.datamodel.persons.DeletePersonAction;
|
||||
import org.sleuthkit.autopsy.datamodel.persons.EditPersonAction;
|
||||
@ -48,6 +52,14 @@ import org.sleuthkit.datamodel.TskCoreException;
|
||||
public class PersonGroupingNode extends DisplayableItemNode {
|
||||
|
||||
private static final String ICON_PATH = "org/sleuthkit/autopsy/images/person.png";
|
||||
|
||||
/**
|
||||
* Returns the id of an unknown persons node. This can be used with a node lookup.
|
||||
* @return The id of an unknown persons node.
|
||||
*/
|
||||
public static String getUnknownPersonId() {
|
||||
return Bundle.PersonNode_unknownPersonNode_title();
|
||||
}
|
||||
|
||||
/**
|
||||
* Responsible for creating the host children of this person.
|
||||
@ -56,6 +68,15 @@ public class PersonGroupingNode extends DisplayableItemNode {
|
||||
|
||||
private static final Logger logger = Logger.getLogger(PersonChildren.class.getName());
|
||||
|
||||
private static final Set<Case.Events> CHILD_EVENTS = EnumSet.of(
|
||||
Case.Events.HOSTS_ADDED,
|
||||
Case.Events.HOSTS_DELETED,
|
||||
Case.Events.PERSONS_CHANGED);
|
||||
|
||||
private static final Set<String> CHILD_EVENTS_STR = CHILD_EVENTS.stream()
|
||||
.map(ev -> ev.name())
|
||||
.collect(Collectors.toSet());
|
||||
|
||||
private final Person person;
|
||||
|
||||
/**
|
||||
@ -68,15 +89,13 @@ public class PersonGroupingNode extends DisplayableItemNode {
|
||||
}
|
||||
|
||||
/**
|
||||
* Listener for handling DATA_SOURCE_ADDED and DATA_SOURCE_DELETED
|
||||
* events.
|
||||
* Listener for handling adding and removing host events.
|
||||
*/
|
||||
private final PropertyChangeListener pcl = new PropertyChangeListener() {
|
||||
private final PropertyChangeListener hostAddedDeletedPcl = new PropertyChangeListener() {
|
||||
@Override
|
||||
public void propertyChange(PropertyChangeEvent evt) {
|
||||
String eventType = evt.getPropertyName();
|
||||
if (eventType.equals(Case.Events.DATA_SOURCE_ADDED.toString())
|
||||
|| eventType.equals(Case.Events.DATA_SOURCE_DELETED.toString())) {
|
||||
if (eventType != null && CHILD_EVENTS_STR.contains(eventType)) {
|
||||
refresh(true);
|
||||
}
|
||||
}
|
||||
@ -84,12 +103,12 @@ public class PersonGroupingNode extends DisplayableItemNode {
|
||||
|
||||
@Override
|
||||
protected void addNotify() {
|
||||
Case.addEventTypeSubscriber(EnumSet.of(Case.Events.DATA_SOURCE_ADDED), pcl);
|
||||
Case.addEventTypeSubscriber(CHILD_EVENTS, hostAddedDeletedPcl);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void removeNotify() {
|
||||
Case.removeEventTypeSubscriber(EnumSet.of(Case.Events.DATA_SOURCE_ADDED), pcl);
|
||||
Case.removeEventTypeSubscriber(CHILD_EVENTS, hostAddedDeletedPcl);
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -117,6 +136,38 @@ public class PersonGroupingNode extends DisplayableItemNode {
|
||||
}
|
||||
|
||||
private final Person person;
|
||||
private final Long personId;
|
||||
|
||||
/**
|
||||
* Listener for handling person change events.
|
||||
*/
|
||||
private final PropertyChangeListener personChangePcl = new PropertyChangeListener() {
|
||||
@Override
|
||||
public void propertyChange(PropertyChangeEvent evt) {
|
||||
String eventType = evt.getPropertyName();
|
||||
if (personId != null && eventType.equals(Case.Events.PERSONS_CHANGED.toString()) && evt instanceof PersonsChangedEvent) {
|
||||
((PersonsChangedEvent) evt).getNewValue().stream()
|
||||
.filter(p -> p != null && p.getId() == personId)
|
||||
.findFirst()
|
||||
.ifPresent((newPerson) -> {
|
||||
setName(newPerson.getName());
|
||||
setDisplayName(newPerson.getName());
|
||||
});
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Gets the display name for this person or "Unknown Persons".
|
||||
*
|
||||
* @param person The person.
|
||||
* @return The non-empty string for the display name.
|
||||
*/
|
||||
private static String getDisplayName(Person person) {
|
||||
return (person == null || person.getName() == null)
|
||||
? getUnknownPersonId()
|
||||
: person.getName();
|
||||
}
|
||||
|
||||
/**
|
||||
* Main constructor.
|
||||
@ -124,16 +175,25 @@ public class PersonGroupingNode extends DisplayableItemNode {
|
||||
* @param person The person record to be represented.
|
||||
*/
|
||||
PersonGroupingNode(Person person) {
|
||||
super(Children.create(new PersonChildren(person), false), person == null ? null : Lookups.singleton(person));
|
||||
this(person, getDisplayName(person));
|
||||
}
|
||||
|
||||
String safeName = (person == null || person.getName() == null)
|
||||
? Bundle.PersonNode_unknownPersonNode_title()
|
||||
: person.getName();
|
||||
|
||||
super.setName(safeName);
|
||||
super.setDisplayName(safeName);
|
||||
/**
|
||||
* Constructor.
|
||||
*
|
||||
* @param person The person.
|
||||
* @param displayName The display name for the person.
|
||||
*/
|
||||
private PersonGroupingNode(Person person, String displayName) {
|
||||
super(Children.create(new PersonChildren(person), false),
|
||||
person == null ? Lookups.fixed(displayName) : Lookups.fixed(person, displayName));
|
||||
super.setName(displayName);
|
||||
super.setDisplayName(displayName);
|
||||
this.setIconBaseWithExtension(ICON_PATH);
|
||||
this.person = person;
|
||||
this.personId = person == null ? null : person.getId();
|
||||
Case.addEventTypeSubscriber(EnumSet.of(Case.Events.PERSONS_CHANGED),
|
||||
WeakListeners.propertyChange(personChangePcl, this));
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -31,8 +31,12 @@ import org.sleuthkit.datamodel.SleuthkitCase;
|
||||
public class ResultsNode extends DisplayableItemNode {
|
||||
|
||||
@NbBundle.Messages("ResultsNode.name.text=Results")
|
||||
public static final String NAME = Bundle.ResultsNode_name_text();
|
||||
private static final String NAME = Bundle.ResultsNode_name_text();
|
||||
|
||||
public static String getNameIdentifier() {
|
||||
return NAME;
|
||||
}
|
||||
|
||||
public ResultsNode(SleuthkitCase sleuthkitCase) {
|
||||
this(sleuthkitCase, 0);
|
||||
}
|
||||
|
@ -81,6 +81,7 @@ import org.sleuthkit.autopsy.datamodel.InterestingHits;
|
||||
import org.sleuthkit.autopsy.datamodel.KeywordHits;
|
||||
import org.sleuthkit.autopsy.datamodel.ResultsNode;
|
||||
import org.sleuthkit.autopsy.datamodel.AutopsyTreeChildFactory;
|
||||
import org.sleuthkit.autopsy.datamodel.PersonGroupingNode;
|
||||
import org.sleuthkit.autopsy.datamodel.Tags;
|
||||
import org.sleuthkit.autopsy.datamodel.ViewsNode;
|
||||
import org.sleuthkit.autopsy.datamodel.accounts.Accounts;
|
||||
@ -89,6 +90,9 @@ import org.sleuthkit.datamodel.Account;
|
||||
import org.sleuthkit.datamodel.BlackboardArtifact;
|
||||
import org.sleuthkit.datamodel.BlackboardAttribute;
|
||||
import org.sleuthkit.datamodel.Content;
|
||||
import org.sleuthkit.datamodel.DataSource;
|
||||
import org.sleuthkit.datamodel.Host;
|
||||
import org.sleuthkit.datamodel.Person;
|
||||
import org.sleuthkit.datamodel.TskCoreException;
|
||||
|
||||
/**
|
||||
@ -193,7 +197,7 @@ public final class DirectoryTreeTopComponent extends TopComponent implements Dat
|
||||
private void preExpandNodes(Children rootChildren) {
|
||||
BeanTreeView tree = getTree();
|
||||
|
||||
Node results = rootChildren.findChild(ResultsNode.NAME);
|
||||
Node results = rootChildren.findChild(ResultsNode.getNameIdentifier());
|
||||
if (!Objects.isNull(results)) {
|
||||
tree.expandNode(results);
|
||||
Children resultsChildren = results.getChildren();
|
||||
@ -265,7 +269,7 @@ public final class DirectoryTreeTopComponent extends TopComponent implements Dat
|
||||
* Setter to determine if rejected results should be shown or not.
|
||||
*
|
||||
* @param showRejectedResults True if showing rejected results; otherwise
|
||||
* false.
|
||||
* false.
|
||||
*/
|
||||
public void setShowRejectedResults(boolean showRejectedResults) {
|
||||
this.showRejectedResults = showRejectedResults;
|
||||
@ -797,7 +801,7 @@ public final class DirectoryTreeTopComponent extends TopComponent implements Dat
|
||||
} // change in node selection
|
||||
else if (changed.equals(ExplorerManager.PROP_SELECTED_NODES)) {
|
||||
respondSelection((Node[]) event.getOldValue(), (Node[]) event.getNewValue());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -1012,8 +1016,7 @@ public final class DirectoryTreeTopComponent extends TopComponent implements Dat
|
||||
* Set the selected node using a path to a previously selected node.
|
||||
*
|
||||
* @param previouslySelectedNodePath Path to a previously selected node.
|
||||
* @param rootNodeName Name of the root node to match, may be
|
||||
* null.
|
||||
* @param rootNodeName Name of the root node to match, may be null.
|
||||
*/
|
||||
private void setSelectedNode(final String[] previouslySelectedNodePath, final String rootNodeName) {
|
||||
if (previouslySelectedNodePath == null) {
|
||||
@ -1070,12 +1073,97 @@ public final class DirectoryTreeTopComponent extends TopComponent implements Dat
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Does dfs search of node while nodes are Host, Person, or
|
||||
* DataSourcesByType looking for the Results Node.
|
||||
*
|
||||
* @param node The node.
|
||||
* @return The child nodes that are at the data source level.
|
||||
*/
|
||||
private Node getResultsNodeSearch(Node node, long dataSourceId) {
|
||||
if (node == null) {
|
||||
return null;
|
||||
} else if (node.getLookup().lookup(Host.class) != null
|
||||
|| node.getLookup().lookup(Person.class) != null
|
||||
|| PersonGroupingNode.getUnknownPersonId().equals(node.getLookup().lookup(String.class))) {
|
||||
Children children = node.getChildren();
|
||||
Node[] childNodes = children == null ? null : children.getNodes();
|
||||
if (childNodes != null) {
|
||||
for (Node child : childNodes) {
|
||||
Node foundExtracted = getResultsNodeSearch(child, dataSourceId);
|
||||
if (foundExtracted != null) {
|
||||
return foundExtracted;
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
DataSource dataSource = node.getLookup().lookup(DataSource.class);
|
||||
if (dataSource != null && dataSource.getId() == dataSourceId) {
|
||||
Children dsChildren = node.getChildren();
|
||||
if (dsChildren != null) {
|
||||
return dsChildren.findChild(ResultsNode.getNameIdentifier());
|
||||
}
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Finds the results node for the specific artifact.
|
||||
*
|
||||
* @param art The artifact to find the relevant Results Node.
|
||||
* @return THe Results Node or null.
|
||||
*/
|
||||
private Node getResultsNode(final BlackboardArtifact art) {
|
||||
Children rootChilds = em.getRootContext().getChildren();
|
||||
|
||||
Node resultsNode = rootChilds.findChild(ResultsNode.getNameIdentifier());
|
||||
if (resultsNode != null) {
|
||||
return resultsNode;
|
||||
}
|
||||
|
||||
long dataSourceId;
|
||||
try {
|
||||
dataSourceId = art.getDataSource().getId();
|
||||
} catch (TskCoreException ex) {
|
||||
LOGGER.log(Level.WARNING, "There was an error fetching the data source id for artifact.", ex);
|
||||
return null;
|
||||
}
|
||||
|
||||
Node[] rootNodes = rootChilds.getNodes();
|
||||
if (rootNodes != null) {
|
||||
for (Node rootNode : rootNodes) {
|
||||
resultsNode = getResultsNodeSearch(rootNode, dataSourceId);
|
||||
if (resultsNode != null) {
|
||||
return resultsNode;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Navigates to artifact and shows in view.
|
||||
*
|
||||
* NOTE: This code will likely need updating in the event that the structure
|
||||
* of the nodes is changed (i.e. adding parent levels). Places to look when
|
||||
* changing node structure include:
|
||||
*
|
||||
* DirectoryTreeTopComponent.viewArtifact, ViewContextAction
|
||||
*
|
||||
* @param art The artifact.
|
||||
*/
|
||||
public void viewArtifact(final BlackboardArtifact art) {
|
||||
int typeID = art.getArtifactTypeID();
|
||||
String typeName = art.getArtifactTypeName();
|
||||
Children rootChilds = em.getRootContext().getChildren();
|
||||
Node treeNode = null;
|
||||
Node resultsNode = rootChilds.findChild(ResultsNode.NAME);
|
||||
|
||||
Node resultsNode = getResultsNode(art);
|
||||
if (resultsNode == null) {
|
||||
return;
|
||||
}
|
||||
|
||||
Children resultsChilds = resultsNode.getChildren();
|
||||
if (typeID == BlackboardArtifact.ARTIFACT_TYPE.TSK_HASHSET_HIT.getTypeID()) {
|
||||
Node hashsetRootNode = resultsChilds.findChild(typeName);
|
||||
@ -1160,11 +1248,16 @@ public final class DirectoryTreeTopComponent extends TopComponent implements Dat
|
||||
if (setNode == null) {
|
||||
return;
|
||||
}
|
||||
Children interestingChildren = setNode.getChildren();
|
||||
if (interestingChildren == null) {
|
||||
|
||||
Children fileArtifactChildren = setNode.getChildren();
|
||||
Node[] fileArtifactNodes = fileArtifactChildren == null ? null : fileArtifactChildren.getNodes();
|
||||
if (fileArtifactNodes == null || fileArtifactNodes.length != 2) {
|
||||
return;
|
||||
}
|
||||
treeNode = interestingChildren.findChild(art.getDisplayName());
|
||||
|
||||
treeNode = (typeID == BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT.getTypeID())
|
||||
? fileArtifactNodes[0]
|
||||
: fileArtifactNodes[1];
|
||||
} catch (TskCoreException ex) {
|
||||
LOGGER.log(Level.WARNING, "Error retrieving attributes", ex); //NON-NLS
|
||||
}
|
||||
|
@ -22,10 +22,13 @@ import java.awt.EventQueue;
|
||||
import java.awt.event.ActionEvent;
|
||||
import java.beans.PropertyVetoException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
import java.util.logging.Level;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.Stream;
|
||||
import org.sleuthkit.autopsy.coreutils.Logger;
|
||||
import javax.swing.AbstractAction;
|
||||
import org.openide.nodes.AbstractNode;
|
||||
@ -43,8 +46,10 @@ import org.sleuthkit.autopsy.datamodel.AbstractAbstractFileNode;
|
||||
import org.sleuthkit.autopsy.datamodel.AbstractFsContentNode;
|
||||
import org.sleuthkit.autopsy.datamodel.BlackboardArtifactNode;
|
||||
import org.sleuthkit.autopsy.datamodel.ContentNodeSelectionInfo;
|
||||
import org.sleuthkit.autopsy.datamodel.DataSourcesByTypeNode;
|
||||
import org.sleuthkit.autopsy.datamodel.DataSourcesNode;
|
||||
import org.sleuthkit.autopsy.datamodel.DisplayableItemNode;
|
||||
import org.sleuthkit.autopsy.datamodel.PersonGroupingNode;
|
||||
import org.sleuthkit.autopsy.datamodel.RootContentChildren;
|
||||
import org.sleuthkit.datamodel.AbstractFile;
|
||||
import org.sleuthkit.datamodel.BlackboardArtifact;
|
||||
@ -52,6 +57,8 @@ import org.sleuthkit.datamodel.Content;
|
||||
import org.sleuthkit.datamodel.ContentVisitor;
|
||||
import org.sleuthkit.datamodel.DataSource;
|
||||
import org.sleuthkit.datamodel.FileSystem;
|
||||
import org.sleuthkit.datamodel.Host;
|
||||
import org.sleuthkit.datamodel.Person;
|
||||
import org.sleuthkit.datamodel.SleuthkitCase;
|
||||
import org.sleuthkit.datamodel.TskCoreException;
|
||||
import org.sleuthkit.datamodel.TskData;
|
||||
@ -141,6 +148,12 @@ public class ViewContextAction extends AbstractAction {
|
||||
* branch of the tree view to the level of the parent of the content,
|
||||
* selecting the parent in the tree view, then selecting the content in the
|
||||
* results view.
|
||||
*
|
||||
* NOTE: This code will likely need updating in the event that the structure
|
||||
* of the nodes is changed (i.e. adding parent levels). Places to look when
|
||||
* changing node structure include:
|
||||
*
|
||||
* DirectoryTreeTopComponent.viewArtifact, ViewContextAction
|
||||
*
|
||||
* @param event The action event.
|
||||
*/
|
||||
@ -186,23 +199,26 @@ public class ViewContextAction extends AbstractAction {
|
||||
DataSource datasource = skCase.getDataSource(contentDSObjid);
|
||||
dsname = datasource.getName();
|
||||
Children rootChildren = treeViewExplorerMgr.getRootContext().getChildren();
|
||||
|
||||
|
||||
if (null != parentContent) {
|
||||
// the tree view needs to be searched to find the parent treeview node.
|
||||
/* NOTE: we can't do a lookup by data source name here, becase if there
|
||||
are multiple data sources with the same name, then "getChildren().findChild(dsname)"
|
||||
simply returns the first one that it finds. Instead we have to loop over all
|
||||
data sources with that name, and make sure we find the correct one.
|
||||
*/
|
||||
for (int i = 0; i < rootChildren.getNodesCount(); i++) {
|
||||
*/
|
||||
List<Node> dataSourceLevelNodes = Stream.of(rootChildren.getNodes())
|
||||
.flatMap(rootNode -> getDataSourceLevelNodes(rootNode).stream())
|
||||
.collect(Collectors.toList());
|
||||
|
||||
for (Node treeNode : dataSourceLevelNodes) {
|
||||
// in the root, look for a data source node with the name of interest
|
||||
Node treeNode = rootChildren.getNodeAt(i);
|
||||
if (!(treeNode.getName().equals(dsname))) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// for this data source, get the "Data Sources" child node
|
||||
Node datasourceGroupingNode = treeNode.getChildren().findChild(DataSourcesNode.NAME);
|
||||
Node datasourceGroupingNode = treeNode.getChildren().findChild(DataSourcesNode.getNameIdentifier());
|
||||
|
||||
// check whether this is the data source we are looking for
|
||||
parentTreeViewNode = findParentNodeInTree(parentContent, datasourceGroupingNode);
|
||||
@ -218,7 +234,7 @@ public class ViewContextAction extends AbstractAction {
|
||||
Node datasourceGroupingNode = rootChildren.findChild(dsname);
|
||||
if (!Objects.isNull(datasourceGroupingNode)) {
|
||||
Children dsChildren = datasourceGroupingNode.getChildren();
|
||||
parentTreeViewNode = dsChildren.findChild(DataSourcesNode.NAME);
|
||||
parentTreeViewNode = dsChildren.findChild(DataSourcesNode.getNameIdentifier());
|
||||
}
|
||||
}
|
||||
|
||||
@ -234,13 +250,19 @@ public class ViewContextAction extends AbstractAction {
|
||||
}
|
||||
} else { // Classic view
|
||||
// Start the search at the DataSourcesNode
|
||||
parentTreeViewNode = treeViewExplorerMgr.getRootContext().getChildren().findChild(DataSourcesNode.NAME);
|
||||
|
||||
if (null != parentContent) {
|
||||
// the tree view needs to be searched to find the parent treeview node.
|
||||
Node potentialParentTreeViewNode = findParentNodeInTree(parentContent, parentTreeViewNode);
|
||||
if (potentialParentTreeViewNode != null) {
|
||||
parentTreeViewNode = potentialParentTreeViewNode;
|
||||
Children rootChildren = treeViewExplorerMgr.getRootContext().getChildren();
|
||||
Node rootDsNode = rootChildren == null ? null : rootChildren.findChild(DataSourcesByTypeNode.getNameIdentifier());
|
||||
if (rootDsNode != null) {
|
||||
for (Node dataSourceLevelNode : getDataSourceLevelNodes(rootDsNode)) {
|
||||
DataSource dataSource = dataSourceLevelNode.getLookup().lookup(DataSource.class);
|
||||
if (dataSource != null) {
|
||||
// the tree view needs to be searched to find the parent treeview node.
|
||||
Node potentialParentTreeViewNode = findParentNodeInTree(parentContent, dataSourceLevelNode);
|
||||
if (potentialParentTreeViewNode != null) {
|
||||
parentTreeViewNode = potentialParentTreeViewNode;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -286,6 +308,34 @@ public class ViewContextAction extends AbstractAction {
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* If the node has lookup of host or person, returns children. If not, just
|
||||
* returns itself.
|
||||
*
|
||||
* @param node The node.
|
||||
* @return The child nodes that are at the data source level.
|
||||
*/
|
||||
private List<Node> getDataSourceLevelNodes(Node node) {
|
||||
if (node == null) {
|
||||
return Collections.emptyList();
|
||||
} else if (node.getLookup().lookup(Host.class) != null ||
|
||||
node.getLookup().lookup(Person.class) != null ||
|
||||
DataSourcesByTypeNode.getNameIdentifier().equals(node.getLookup().lookup(String.class)) ||
|
||||
PersonGroupingNode.getUnknownPersonId().equals(node.getLookup().lookup(String.class))) {
|
||||
Children children = node.getChildren();
|
||||
Node[] childNodes = children == null ? null : children.getNodes();
|
||||
if (childNodes == null) {
|
||||
return Collections.emptyList();
|
||||
}
|
||||
|
||||
return Stream.of(node.getChildren().getNodes())
|
||||
.flatMap(parent -> getDataSourceLevelNodes(parent).stream())
|
||||
.collect(Collectors.toList());
|
||||
} else {
|
||||
return Arrays.asList(node);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Searches tree for parent node by getting an ordered list of the ancestors
|
||||
* of the specified content.
|
||||
|
@ -1085,7 +1085,7 @@ public class PortableCaseReportModule implements ReportModule {
|
||||
Host newHost = null;
|
||||
if (content instanceof DataSource) {
|
||||
Host oldHost = ((DataSource)content).getHost();
|
||||
newHost = portableSkCase.getHostManager().getOrCreateHost(oldHost.getName());
|
||||
newHost = portableSkCase.getHostManager().createHost(oldHost.getName());
|
||||
}
|
||||
|
||||
CaseDbTransaction trans = portableSkCase.beginTransaction();
|
||||
|
@ -659,7 +659,7 @@ public final class ImageGalleryController {
|
||||
|
||||
private static ListeningExecutorService getNewDBExecutor() {
|
||||
return MoreExecutors.listeningDecorator(Executors.newSingleThreadExecutor(
|
||||
new ThreadFactoryBuilder().setNameFormat("DB-Worker-Thread-%d").build()));
|
||||
new ThreadFactoryBuilder().setNameFormat("ImageGallery-DB-Worker-Thread-%d").build()));
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -148,7 +148,7 @@ abstract class AdHocSearchPanel extends javax.swing.JPanel {
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a list of data source display name.
|
||||
* Get a list of data source display names.
|
||||
*
|
||||
* @return The list of data source name
|
||||
*/
|
||||
|
@ -9,8 +9,20 @@ DropdownListSearchPanel.selected=Ad Hoc Search data source filter is selected
|
||||
DropdownSingleTermSearchPanel.selected=Ad Hoc Search data source filter is selected
|
||||
DropdownSingleTermSearchPanel.warning.text=Boundary characters ^ and $ do not match word boundaries. Consider\nreplacing with an explicit list of boundary characters, such as [ \\.,]
|
||||
DropdownSingleTermSearchPanel.warning.title=Warning
|
||||
ExtractAllTermsReport.description.text=Extracts all unique words out of the current case. NOTE: The extracted words are lower-cased.
|
||||
ExtractAllTermsReport.error.noOpenCase=No currently open case.
|
||||
ExtractAllTermsReport.export.error=Error During Unique Word Extraction
|
||||
ExtractAllTermsReport.exportComplete=Unique Word Extraction Complete
|
||||
ExtractAllTermsReport.getName.text=Extract Unique Words
|
||||
# {0} - Number of extracted terms
|
||||
ExtractAllTermsReport.numberExtractedTerms=Extracted {0} terms...
|
||||
ExtractAllTermsReport.search.ingestInProgressBody=<html>Keyword Search Ingest is currently running.<br />Not all files have been indexed and unique word extraction might yield incomplete results.<br />Do you want to proceed with unique word extraction anyway?</html>
|
||||
# {0} - Keyword search commit frequency
|
||||
ExtractAllTermsReport.search.noFilesInIdxMsg=No files are in index yet. Try again later. Index is updated every {0} minutes.
|
||||
ExtractAllTermsReport.search.noFilesInIdxMsg2=No files are in index yet. Try again later
|
||||
ExtractAllTermsReport.search.searchIngestInProgressTitle=Keyword Search Ingest in Progress
|
||||
ExtractAllTermsReport.startExport=Starting Unique Word Extraction
|
||||
ExtractedContentPanel.setMarkup.panelTxt=<span style='font-style:italic'>Loading text... Please wait</span>
|
||||
# {0} - Content name
|
||||
ExtractedContentPanel.SetMarkup.progress.loading=Loading text for {0}
|
||||
GlobalEditListPanel.editKeyword.title=Edit Keyword
|
||||
GlobalEditListPanel.warning.text=Boundary characters ^ and $ do not match word boundaries. Consider\nreplacing with an explicit list of boundary characters, such as [ \\.,]
|
||||
@ -218,6 +230,7 @@ Server.deleteCore.exception.msg=Failed to delete Solr colelction {0}
|
||||
Server.exceptionMessage.unableToBackupCollection=Unable to backup Solr collection
|
||||
Server.exceptionMessage.unableToCreateCollection=Unable to create Solr collection
|
||||
Server.exceptionMessage.unableToRestoreCollection=Unable to restore Solr collection
|
||||
Server.getAllTerms.error=Extraction of all unique Solr terms failed:
|
||||
Server.start.exception.cantStartSolr.msg=Could not start Solr server process
|
||||
Server.start.exception.cantStartSolr.msg2=Could not start Solr server process
|
||||
Server.isRunning.exception.errCheckSolrRunning.msg=Error checking if Solr server is running
|
||||
|
134
KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/ExtractAllTermsReport.java
Executable file
134
KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/ExtractAllTermsReport.java
Executable file
@ -0,0 +1,134 @@
|
||||
/*
|
||||
* Autopsy Forensic Browser
|
||||
*
|
||||
* Copyright 2021 Basis Technology Corp.
|
||||
* Contact: carrier <at> sleuthkit <dot> org
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.sleuthkit.autopsy.keywordsearch;
|
||||
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.Paths;
|
||||
import java.util.logging.Level;
|
||||
import org.openide.util.NbBundle;
|
||||
import org.openide.util.lookup.ServiceProvider;
|
||||
import org.sleuthkit.autopsy.casemodule.Case;
|
||||
import org.sleuthkit.autopsy.coreutils.Logger;
|
||||
import org.sleuthkit.autopsy.ingest.IngestManager;
|
||||
import org.sleuthkit.autopsy.report.GeneralReportModule;
|
||||
import org.sleuthkit.autopsy.report.GeneralReportSettings;
|
||||
import org.sleuthkit.autopsy.report.ReportProgressPanel;
|
||||
|
||||
/**
|
||||
* Instances of this class plug in to the reporting infrastructure to provide a
|
||||
* convenient way to extract all unique terms from Solr index.
|
||||
*/
|
||||
@ServiceProvider(service = GeneralReportModule.class)
|
||||
public class ExtractAllTermsReport implements GeneralReportModule {
|
||||
|
||||
private static final Logger logger = Logger.getLogger(ExtractAllTermsReport.class.getName());
|
||||
private static final String OUTPUT_FILE_NAME = "Unique Words.txt";
|
||||
|
||||
@NbBundle.Messages({
|
||||
"ExtractAllTermsReport.getName.text=Extract Unique Words"})
|
||||
@Override
|
||||
public String getName() {
|
||||
return Bundle.ExtractAllTermsReport_getName_text();
|
||||
}
|
||||
|
||||
@NbBundle.Messages({
|
||||
"ExtractAllTermsReport.error.noOpenCase=No currently open case.",
|
||||
"# {0} - Keyword search commit frequency",
|
||||
"ExtractAllTermsReport.search.noFilesInIdxMsg=No files are in index yet. Try again later. Index is updated every {0} minutes.",
|
||||
"ExtractAllTermsReport.search.noFilesInIdxMsg2=No files are in index yet. Try again later",
|
||||
"ExtractAllTermsReport.search.searchIngestInProgressTitle=Keyword Search Ingest in Progress",
|
||||
"ExtractAllTermsReport.search.ingestInProgressBody=<html>Keyword Search Ingest is currently running.<br />Not all files have been indexed and unique word extraction might yield incomplete results.<br />Do you want to proceed with unique word extraction anyway?</html>",
|
||||
"ExtractAllTermsReport.startExport=Starting Unique Word Extraction",
|
||||
"ExtractAllTermsReport.export.error=Error During Unique Word Extraction",
|
||||
"ExtractAllTermsReport.exportComplete=Unique Word Extraction Complete"
|
||||
})
|
||||
@Override
|
||||
public void generateReport(GeneralReportSettings settings, ReportProgressPanel progressPanel) {
|
||||
|
||||
if (!Case.isCaseOpen()) {
|
||||
logger.log(Level.SEVERE, "No open case when attempting to run {0} report", Bundle.ExtractAllTermsReport_getName_text()); //NON-NLS
|
||||
progressPanel.complete(ReportProgressPanel.ReportStatus.ERROR, Bundle.ExtractAllTermsReport_error_noOpenCase());
|
||||
return;
|
||||
}
|
||||
|
||||
progressPanel.setIndeterminate(true);
|
||||
progressPanel.start();
|
||||
progressPanel.updateStatusLabel("Extracting unique words...");
|
||||
|
||||
boolean isIngestRunning = IngestManager.getInstance().isIngestRunning();
|
||||
|
||||
int filesIndexed = 0;
|
||||
try { // see if there are any indexed files
|
||||
filesIndexed = KeywordSearch.getServer().queryNumIndexedFiles();
|
||||
} catch (KeywordSearchModuleException | NoOpenCoreException ignored) {
|
||||
}
|
||||
|
||||
if (filesIndexed == 0) {
|
||||
if (isIngestRunning) {
|
||||
progressPanel.complete(ReportProgressPanel.ReportStatus.ERROR, Bundle.ExtractAllTermsReport_search_noFilesInIdxMsg(KeywordSearchSettings.getUpdateFrequency().getTime()));
|
||||
} else {
|
||||
progressPanel.complete(ReportProgressPanel.ReportStatus.ERROR, Bundle.ExtractAllTermsReport_search_noFilesInIdxMsg2());
|
||||
}
|
||||
progressPanel.setIndeterminate(false);
|
||||
return;
|
||||
}
|
||||
|
||||
// check if keyword search module ingest is running (indexing, etc)
|
||||
if (isIngestRunning) {
|
||||
if (KeywordSearchUtil.displayConfirmDialog(Bundle.ExtractAllTermsReport_search_searchIngestInProgressTitle(),
|
||||
Bundle.ExtractAllTermsReport_search_ingestInProgressBody(), KeywordSearchUtil.DIALOG_MESSAGE_TYPE.WARN) == false) {
|
||||
progressPanel.cancel();
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
final Server server = KeywordSearch.getServer();
|
||||
try {
|
||||
progressPanel.updateStatusLabel(Bundle.ExtractAllTermsReport_startExport());
|
||||
Path outputFile = Paths.get(settings.getReportDirectoryPath(), getRelativeFilePath());
|
||||
server.extractAllTermsForDataSource(outputFile, progressPanel);
|
||||
} catch (KeywordSearchModuleException | NoOpenCoreException ex) {
|
||||
logger.log(Level.SEVERE, "Exception while extracting unique terms", ex); //NON-NLS
|
||||
progressPanel.setIndeterminate(false);
|
||||
progressPanel.complete(ReportProgressPanel.ReportStatus.ERROR, Bundle.ExtractAllTermsReport_export_error());
|
||||
return;
|
||||
}
|
||||
|
||||
progressPanel.setIndeterminate(false);
|
||||
progressPanel.complete(ReportProgressPanel.ReportStatus.COMPLETE, Bundle.ExtractAllTermsReport_exportComplete());
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean supportsDataSourceSelection() {
|
||||
return false;
|
||||
}
|
||||
|
||||
@NbBundle.Messages({
|
||||
"ExtractAllTermsReport.description.text=Extracts all unique words out of the current case. NOTE: The extracted words are lower-cased."})
|
||||
@Override
|
||||
public String getDescription() {
|
||||
return Bundle.ExtractAllTermsReport_description_text();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getRelativeFilePath() {
|
||||
return OUTPUT_FILE_NAME;
|
||||
}
|
||||
|
||||
}
|
@ -21,10 +21,8 @@ package org.sleuthkit.autopsy.keywordsearch;
|
||||
import java.io.File;
|
||||
import java.nio.file.Paths;
|
||||
import java.util.List;
|
||||
import java.util.logging.Level;
|
||||
import org.apache.commons.lang.math.NumberUtils;
|
||||
import org.sleuthkit.autopsy.casemodule.Case;
|
||||
import org.sleuthkit.autopsy.coreutils.Logger;
|
||||
import org.sleuthkit.autopsy.appservices.AutopsyService;
|
||||
|
||||
/**
|
||||
@ -32,7 +30,6 @@ import org.sleuthkit.autopsy.appservices.AutopsyService;
|
||||
*/
|
||||
class IndexFinder {
|
||||
|
||||
private static final Logger logger = Logger.getLogger(IndexFinder.class.getName());
|
||||
private static final String KWS_OUTPUT_FOLDER_NAME = "keywordsearch";
|
||||
private static final String KWS_DATA_FOLDER_NAME = "data";
|
||||
private static final String INDEX_FOLDER_NAME = "index";
|
||||
@ -48,7 +45,7 @@ class IndexFinder {
|
||||
return CURRENT_SOLR_SCHEMA_VERSION;
|
||||
}
|
||||
|
||||
static Index findLatestVersionIndexDir(List<Index> allIndexes) {
|
||||
static Index findLatestVersionIndex(List<Index> allIndexes) {
|
||||
for (Index index : allIndexes) {
|
||||
if (index.getSolrVersion().equals(CURRENT_SOLR_VERSION) && index.getSchemaVersion().equals(CURRENT_SOLR_SCHEMA_VERSION)) {
|
||||
return index;
|
||||
@ -57,7 +54,7 @@ class IndexFinder {
|
||||
return null;
|
||||
}
|
||||
|
||||
static Index createLatestVersionIndexDir(Case theCase) throws AutopsyService.AutopsyServiceException {
|
||||
static Index createLatestVersionIndex(Case theCase) throws AutopsyService.AutopsyServiceException {
|
||||
String indexFolderName = "solr" + CURRENT_SOLR_VERSION + "_schema" + CURRENT_SOLR_SCHEMA_VERSION;
|
||||
// new index should be stored in "\ModuleOutput\keywordsearch\data\solrX_schemaY\index"
|
||||
File targetDirPath = Paths.get(theCase.getModuleDirectory(), KWS_OUTPUT_FOLDER_NAME, KWS_DATA_FOLDER_NAME, indexFolderName, INDEX_FOLDER_NAME).toFile(); //NON-NLS
|
||||
|
@ -36,6 +36,7 @@ import java.net.ServerSocket;
|
||||
import java.net.SocketException;
|
||||
import java.nio.charset.Charset;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.OpenOption;
|
||||
import static java.nio.file.StandardCopyOption.REPLACE_EXISTING;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.Paths;
|
||||
@ -52,6 +53,7 @@ import java.util.logging.Level;
|
||||
import javax.swing.AbstractAction;
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import java.util.concurrent.TimeoutException;
|
||||
import java.util.stream.Collectors;
|
||||
import static java.util.stream.Collectors.toList;
|
||||
import org.apache.solr.client.solrj.SolrQuery;
|
||||
import org.apache.solr.client.solrj.SolrRequest;
|
||||
@ -66,8 +68,10 @@ import org.apache.solr.client.solrj.response.CollectionAdminResponse;
|
||||
import org.apache.solr.client.solrj.request.CoreAdminRequest;
|
||||
import org.apache.solr.client.solrj.response.CoreAdminResponse;
|
||||
import org.apache.solr.client.solrj.impl.BaseHttpSolrClient.RemoteSolrException;
|
||||
import org.apache.solr.client.solrj.request.QueryRequest;
|
||||
import org.apache.solr.client.solrj.response.QueryResponse;
|
||||
import org.apache.solr.client.solrj.response.TermsResponse;
|
||||
import org.apache.solr.client.solrj.response.TermsResponse.Term;
|
||||
import org.apache.solr.common.SolrDocument;
|
||||
import org.apache.solr.common.SolrDocumentList;
|
||||
import org.apache.solr.common.SolrException;
|
||||
@ -80,6 +84,7 @@ import org.openide.windows.WindowManager;
|
||||
import org.sleuthkit.autopsy.casemodule.Case;
|
||||
import org.sleuthkit.autopsy.casemodule.Case.CaseType;
|
||||
import org.sleuthkit.autopsy.casemodule.CaseMetadata;
|
||||
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
|
||||
import org.sleuthkit.autopsy.core.UserPreferences;
|
||||
import org.sleuthkit.autopsy.coreutils.FileUtil;
|
||||
import org.sleuthkit.autopsy.coreutils.Logger;
|
||||
@ -90,6 +95,8 @@ import org.sleuthkit.autopsy.coreutils.ThreadUtils;
|
||||
import org.sleuthkit.autopsy.healthmonitor.HealthMonitor;
|
||||
import org.sleuthkit.autopsy.healthmonitor.TimingMetric;
|
||||
import org.sleuthkit.autopsy.keywordsearchservice.KeywordSearchServiceException;
|
||||
import org.sleuthkit.autopsy.report.GeneralReportSettings;
|
||||
import org.sleuthkit.autopsy.report.ReportProgressPanel;
|
||||
import org.sleuthkit.datamodel.Content;
|
||||
|
||||
/**
|
||||
@ -1785,6 +1792,34 @@ public class Server {
|
||||
currentCoreLock.writeLock().unlock();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract all unique terms/words from current index.
|
||||
*
|
||||
* @param outputFile Absolute path to the output file
|
||||
* @param progressPanel ReportProgressPanel to update
|
||||
*
|
||||
* @throws NoOpenCoreException
|
||||
*/
|
||||
@NbBundle.Messages({
|
||||
"Server.getAllTerms.error=Extraction of all unique Solr terms failed:"})
|
||||
void extractAllTermsForDataSource(Path outputFile, ReportProgressPanel progressPanel) throws KeywordSearchModuleException, NoOpenCoreException {
|
||||
try {
|
||||
currentCoreLock.writeLock().lock();
|
||||
if (null == currentCollection) {
|
||||
throw new NoOpenCoreException();
|
||||
}
|
||||
try {
|
||||
currentCollection.extractAllTermsForDataSource(outputFile, progressPanel);
|
||||
} catch (Exception ex) {
|
||||
// intentional "catch all" as Solr is known to throw all kinds of Runtime exceptions
|
||||
logger.log(Level.SEVERE, "Extraction of all unique Solr terms failed: ", ex); //NON-NLS
|
||||
throw new KeywordSearchModuleException(Bundle.Server_getAllTerms_error(), ex);
|
||||
}
|
||||
} finally {
|
||||
currentCoreLock.writeLock().unlock();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the text contents of the given file as stored in SOLR.
|
||||
@ -2132,6 +2167,71 @@ public class Server {
|
||||
|
||||
queryClient.deleteByQuery(deleteQuery);
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract all unique terms/words from current index. Gets 1,000 terms at a time and
|
||||
* writes them to output file. Updates ReportProgressPanel status.
|
||||
*
|
||||
* @param outputFile Absolute path to the output file
|
||||
* @param progressPanel ReportProgressPanel to update
|
||||
* @throws IOException
|
||||
* @throws SolrServerException
|
||||
* @throws NoCurrentCaseException
|
||||
* @throws KeywordSearchModuleException
|
||||
*/
|
||||
@NbBundle.Messages({
|
||||
"# {0} - Number of extracted terms",
|
||||
"ExtractAllTermsReport.numberExtractedTerms=Extracted {0} terms..."
|
||||
})
|
||||
private void extractAllTermsForDataSource(Path outputFile, ReportProgressPanel progressPanel) throws IOException, SolrServerException, NoCurrentCaseException, KeywordSearchModuleException {
|
||||
|
||||
Files.deleteIfExists(outputFile);
|
||||
OpenOption[] options = new OpenOption[] { java.nio.file.StandardOpenOption.CREATE, java.nio.file.StandardOpenOption.APPEND };
|
||||
|
||||
// step through the terms
|
||||
int termStep = 1000;
|
||||
long numExtractedTerms = 0;
|
||||
String firstTerm = "";
|
||||
while (true) {
|
||||
SolrQuery query = new SolrQuery();
|
||||
query.setRequestHandler("/terms");
|
||||
query.setTerms(true);
|
||||
query.setTermsLimit(termStep);
|
||||
query.setTermsLower(firstTerm);
|
||||
query.setTermsLowerInclusive(false);
|
||||
|
||||
// Returned terms sorted by "index" order, which is the fastest way. Per Solr documentation:
|
||||
// "Retrieving terms in index order is very fast since the implementation directly uses Lucene’s TermEnum to iterate over the term dictionary."
|
||||
// All other sort criteria return very inconsistent and overlapping resuts.
|
||||
query.setTermsSortString("index");
|
||||
|
||||
// "text" field is the schema field that we populate with (lowercased) terms
|
||||
query.addTermsField(Server.Schema.TEXT.toString());
|
||||
query.setTermsMinCount(0);
|
||||
|
||||
// Unfortunatelly Solr "terms queries" do not support any filtering so we can't filter by data source this way.
|
||||
// query.addFilterQuery(Server.Schema.IMAGE_ID.toString() + ":" + dataSourceId);
|
||||
|
||||
QueryRequest request = new QueryRequest(query);
|
||||
TermsResponse response = request.process(queryClient).getTermsResponse();
|
||||
List<Term> terms = response.getTerms(Server.Schema.TEXT.toString());
|
||||
|
||||
if (terms == null || terms.isEmpty()) {
|
||||
numExtractedTerms += terms.size();
|
||||
progressPanel.updateStatusLabel(Bundle.ExtractAllTermsReport_numberExtractedTerms(numExtractedTerms));
|
||||
break;
|
||||
}
|
||||
|
||||
// set the first term for the next query
|
||||
firstTerm = terms.get(terms.size()-1).getTerm();
|
||||
|
||||
List<String> listTerms = terms.stream().map(Term::getTerm).collect(Collectors.toList());
|
||||
Files.write(outputFile, listTerms, options);
|
||||
|
||||
numExtractedTerms += termStep;
|
||||
progressPanel.updateStatusLabel(Bundle.ExtractAllTermsReport_numberExtractedTerms(numExtractedTerms));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a Solr document for indexing. Documents get batched instead of
|
||||
@ -2168,7 +2268,6 @@ public class Server {
|
||||
*
|
||||
* @throws KeywordSearchModuleException
|
||||
*/
|
||||
// ELTODO DECIDE ON SYNCHRONIZATION
|
||||
private void sendBufferedDocs(List<SolrInputDocument> docBuffer) throws KeywordSearchModuleException {
|
||||
|
||||
if (docBuffer.isEmpty()) {
|
||||
|
@ -309,14 +309,14 @@ public class SolrSearchService implements KeywordSearchService, AutopsyService {
|
||||
// new case that doesn't have an existing index. create new index folder
|
||||
progressUnitsCompleted++;
|
||||
progress.progress(Bundle.SolrSearch_creatingNewIndex_msg(), progressUnitsCompleted);
|
||||
currentVersionIndex = IndexFinder.createLatestVersionIndexDir(theCase);
|
||||
currentVersionIndex = IndexFinder.createLatestVersionIndex(theCase);
|
||||
// add current index to the list of indexes that exist for this case
|
||||
indexes.add(currentVersionIndex);
|
||||
} else {
|
||||
// check if one of the existing indexes is for latest Solr version and schema
|
||||
progressUnitsCompleted++;
|
||||
progress.progress(Bundle.SolrSearch_checkingForLatestIndex_msg(), progressUnitsCompleted);
|
||||
currentVersionIndex = IndexFinder.findLatestVersionIndexDir(indexes);
|
||||
currentVersionIndex = IndexFinder.findLatestVersionIndex(indexes);
|
||||
if (currentVersionIndex == null) {
|
||||
// found existing index(es) but none were for latest Solr version and schema version
|
||||
progressUnitsCompleted++;
|
||||
|
@ -2315,8 +2315,9 @@ class ExtractRegistry extends Extract {
|
||||
// "Default Admin User", "Custom Limited Acct"
|
||||
// and "Default Guest Acct"
|
||||
value = userInfo.get(ACCOUNT_TYPE_KEY);
|
||||
if (value != null && !value.isEmpty()) {
|
||||
osAccount.setIsAdmin(value.toLowerCase().contains("Admin"));
|
||||
if (value != null && !value.isEmpty() && value.toLowerCase().contains("admin")) {
|
||||
attributes.add(createOsAccountAttribute(ATTRIBUTE_TYPE.TSK_IS_ADMIN,
|
||||
1, osAccount, host, regFile));
|
||||
}
|
||||
|
||||
value = userInfo.get(USER_COMMENT_KEY);
|
||||
|
@ -329,7 +329,8 @@ class TskDbDiff(object):
|
||||
id_legacy_artifact_types = build_id_legacy_artifact_types_table(conn.cursor(), isMultiUser)
|
||||
id_reports_table = build_id_reports_table(conn.cursor(), isMultiUser)
|
||||
id_images_table = build_id_image_names_table(conn.cursor(), isMultiUser)
|
||||
id_obj_path_table = build_id_obj_path_table(id_files_table, id_objects_table, id_artifact_types_table, id_reports_table, id_images_table)
|
||||
id_accounts_table = build_id_accounts_table(conn.cursor(), isMultiUser)
|
||||
id_obj_path_table = build_id_obj_path_table(id_files_table, id_objects_table, id_artifact_types_table, id_reports_table, id_images_table, id_accounts_table)
|
||||
|
||||
if isMultiUser: # Use PostgreSQL
|
||||
os.environ['PGPASSWORD']=pgSettings.password
|
||||
@ -352,7 +353,7 @@ class TskDbDiff(object):
|
||||
if 'INSERT INTO image_gallery_groups_seen' in dump_line:
|
||||
dump_line = ''
|
||||
continue;
|
||||
dump_line = normalize_db_entry(dump_line, id_obj_path_table, id_vs_parts_table, id_vs_info_table, id_fs_info_table, id_objects_table, id_reports_table, id_images_table, id_legacy_artifact_types)
|
||||
dump_line = normalize_db_entry(dump_line, id_obj_path_table, id_vs_parts_table, id_vs_info_table, id_fs_info_table, id_objects_table, id_reports_table, id_images_table, id_legacy_artifact_types, id_accounts_table)
|
||||
db_log.write('%s\n' % dump_line)
|
||||
dump_line = ''
|
||||
postgreSQL_db.close()
|
||||
@ -366,7 +367,7 @@ class TskDbDiff(object):
|
||||
for line in conn.iterdump():
|
||||
if 'INSERT INTO "image_gallery_groups_seen"' in line:
|
||||
continue
|
||||
line = normalize_db_entry(line, id_obj_path_table, id_vs_parts_table, id_vs_info_table, id_fs_info_table, id_objects_table, id_reports_table, id_images_table, id_legacy_artifact_types)
|
||||
line = normalize_db_entry(line, id_obj_path_table, id_vs_parts_table, id_vs_info_table, id_fs_info_table, id_objects_table, id_reports_table, id_images_table, id_legacy_artifact_types, id_accounts_table)
|
||||
db_log.write('%s\n' % line)
|
||||
# Now sort the file
|
||||
srtcmdlst = ["sort", dump_file, "-o", dump_file]
|
||||
@ -419,7 +420,7 @@ class PGSettings(object):
|
||||
return self.password
|
||||
|
||||
|
||||
def normalize_db_entry(line, files_table, vs_parts_table, vs_info_table, fs_info_table, objects_table, reports_table, images_table, artifact_table):
|
||||
def normalize_db_entry(line, files_table, vs_parts_table, vs_info_table, fs_info_table, objects_table, reports_table, images_table, artifact_table, accounts_table):
|
||||
""" Make testing more consistent and reasonable by doctoring certain db entries.
|
||||
|
||||
Args:
|
||||
@ -442,6 +443,7 @@ def normalize_db_entry(line, files_table, vs_parts_table, vs_info_table, fs_info
|
||||
ig_groups_index = line.find('INSERT INTO "image_gallery_groups"') > -1 or line.find('INSERT INTO image_gallery_groups ') > -1
|
||||
ig_groups_seen_index = line.find('INSERT INTO "image_gallery_groups_seen"') > -1 or line.find('INSERT INTO image_gallery_groups_seen ') > -1
|
||||
os_account_index = line.find('INSERT INTO "tsk_os_accounts"') > -1 or line.find('INSERT INTO tsk_os_accounts') > -1
|
||||
os_account_attr_index = line.find('INSERT INTO "tsk_os_account_attributes"') > -1 or line.find('INSERT INTO tsk_os_account_attributes') > -1
|
||||
|
||||
parens = line[line.find('(') + 1 : line.rfind(')')]
|
||||
no_space_parens = parens.replace(" ", "")
|
||||
@ -569,6 +571,8 @@ def normalize_db_entry(line, files_table, vs_parts_table, vs_info_table, fs_info
|
||||
parent_path = fs_info_table[parent_id]
|
||||
elif parent_id in images_table.keys():
|
||||
parent_path = images_table[parent_id]
|
||||
elif parent_id in accounts_table.keys():
|
||||
parent_path = accounts_table[parent_id]
|
||||
elif parent_id == 'NULL':
|
||||
parent_path = "NULL"
|
||||
|
||||
@ -636,7 +640,29 @@ def normalize_db_entry(line, files_table, vs_parts_table, vs_info_table, fs_info
|
||||
newLine = ('INSERT INTO "tsk_event_descriptions" VALUES(' + ','.join(fields_list[1:]) + ');') # remove report_id
|
||||
return newLine
|
||||
elif os_account_index:
|
||||
newLine = ('INSERT INTO "tsk_os_accounts" VALUES(' + ','.join(fields_list[1:]) + ');') # remove id
|
||||
newLine = ('INSERT INTO "tsk_os_accounts" VALUES(' + ','.join(fields_list[1:]) + ');') # remove id since value that would be substituted is in diff line already
|
||||
return newLine
|
||||
elif os_account_attr_index:
|
||||
#substitue the account object id for a non changing value
|
||||
os_account_id = int(fields_list[1])
|
||||
fields_list[1] = accounts_table[os_account_id]
|
||||
#substitue the source object id for a non changing value
|
||||
source_obj_id = int(fields_list[3])
|
||||
if source_obj_id in files_table.keys():
|
||||
fields_list[3] = files_table[source_obj_id]
|
||||
elif source_obj_id in vs_parts_table.keys():
|
||||
fields_list[3] = vs_parts_table[source_obj_id]
|
||||
elif source_obj_id in vs_info_table.keys():
|
||||
fields_list[3] = vs_info_table[source_obj_id]
|
||||
elif source_obj_id in fs_info_table.keys():
|
||||
fields_list[3] = fs_info_table[source_obj_id]
|
||||
elif source_obj_id in images_table.keys():
|
||||
fields_list[3] = images_table[source_obj_id]
|
||||
elif source_obj_id in accounts_table.keys():
|
||||
fields_list[3] = accounts_table[source_obj_id]
|
||||
elif source_obj_id == 'NULL':
|
||||
fields_list[3] = "NULL"
|
||||
newLine = ('INSERT INTO "tsk_os_account_attributes" VALUES(' + ','.join(fields_list[1:]) + ');') # remove id
|
||||
return newLine
|
||||
else:
|
||||
return line
|
||||
@ -758,8 +784,18 @@ def build_id_reports_table(db_cursor, isPostgreSQL):
|
||||
mapping = dict([(row[0], row[1]) for row in sql_select_execute(db_cursor, isPostgreSQL, "SELECT obj_id, path FROM reports")])
|
||||
return mapping
|
||||
|
||||
def build_id_accounts_table(db_cursor, isPostgreSQL):
|
||||
"""Build the map of object ids to OS account SIDs.
|
||||
|
||||
def build_id_obj_path_table(files_table, objects_table, artifacts_table, reports_table, images_table):
|
||||
Args:
|
||||
db_cursor: the database cursor
|
||||
"""
|
||||
# for each row in the db, take the object id and account SID then creates a tuple in the dictionary
|
||||
# with the object id as the key and the OS Account's SID as the value
|
||||
mapping = dict([(row[0], row[1]) for row in sql_select_execute(db_cursor, isPostgreSQL, "SELECT os_account_obj_id, unique_id FROM tsk_os_accounts")])
|
||||
return mapping
|
||||
|
||||
def build_id_obj_path_table(files_table, objects_table, artifacts_table, reports_table, images_table, accounts_table):
|
||||
"""Build the map of object ids to artifact ids.
|
||||
|
||||
Args:
|
||||
@ -767,6 +803,8 @@ def build_id_obj_path_table(files_table, objects_table, artifacts_table, reports
|
||||
objects_table: obj_id, par_obj_id, type
|
||||
artifacts_table: obj_id, artifact_type_name
|
||||
reports_table: obj_id, path
|
||||
images_table: obj_id, name
|
||||
accounts_table: obj_id, unique_id
|
||||
"""
|
||||
# make a copy of files_table and update it with new data from artifacts_table and reports_table
|
||||
mapping = files_table.copy()
|
||||
@ -786,6 +824,8 @@ def build_id_obj_path_table(files_table, objects_table, artifacts_table, reports
|
||||
elif par_obj_id in images_table.keys():
|
||||
path = images_table[par_obj_id]
|
||||
mapping[k] = path + "/" + artifacts_table[k]
|
||||
elif k in accounts_table.keys(): # For an OS Account object ID we use its unique_id field which is the account SID
|
||||
mapping[k] = accounts_table[k]
|
||||
elif v[0] not in mapping.keys():
|
||||
if v[0] in artifacts_table.keys():
|
||||
par_obj_id = objects_table[v[0]]
|
||||
|
Loading…
x
Reference in New Issue
Block a user