mirror of
https://github.com/overcuriousity/autopsy-flatpak.git
synced 2025-07-12 16:06:15 +00:00
Merge branch 'release-4.13.0' of https://github.com/sleuthkit/autopsy into 5492-refactor-appdbparserhelper
This commit is contained in:
commit
4af45b522f
@ -70,7 +70,7 @@ final class ContactNode extends BlackboardArtifactNode {
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected Sheet createSheet() {
|
protected Sheet createSheet() {
|
||||||
Sheet sheet = super.createSheet();
|
Sheet sheet = new Sheet();
|
||||||
|
|
||||||
final BlackboardArtifact artifact = getArtifact();
|
final BlackboardArtifact artifact = getArtifact();
|
||||||
BlackboardArtifact.ARTIFACT_TYPE fromID = BlackboardArtifact.ARTIFACT_TYPE.fromID(artifact.getArtifactTypeID());
|
BlackboardArtifact.ARTIFACT_TYPE fromID = BlackboardArtifact.ARTIFACT_TYPE.fromID(artifact.getArtifactTypeID());
|
||||||
@ -116,10 +116,6 @@ final class ContactNode extends BlackboardArtifactNode {
|
|||||||
sheetSet.put(new NodeProperty<>(bba.getAttributeType().getTypeName(), bba.getAttributeType().getDisplayName(), "", bba.getDisplayString()));
|
sheetSet.put(new NodeProperty<>(bba.getAttributeType().getTypeName(), bba.getAttributeType().getDisplayName(), "", bba.getDisplayString()));
|
||||||
}
|
}
|
||||||
|
|
||||||
// Don't need these values to appear in the Contact property sheet.
|
|
||||||
sheetSet.remove("S");
|
|
||||||
sheetSet.remove("C");
|
|
||||||
|
|
||||||
List<Content> children = artifact.getChildren();
|
List<Content> children = artifact.getChildren();
|
||||||
if(children != null) {
|
if(children != null) {
|
||||||
int count = 0;
|
int count = 0;
|
||||||
|
@ -117,7 +117,7 @@ public class FileViewer extends javax.swing.JPanel implements DataContentViewer
|
|||||||
}
|
}
|
||||||
|
|
||||||
AbstractFile file = selectedNode.getLookup().lookup(AbstractFile.class);
|
AbstractFile file = selectedNode.getLookup().lookup(AbstractFile.class);
|
||||||
if (file == null) {
|
if ((file == null) || (file.isDir())) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -189,7 +189,7 @@ public class FileViewer extends javax.swing.JPanel implements DataContentViewer
|
|||||||
}
|
}
|
||||||
|
|
||||||
AbstractFile aFile = node.getLookup().lookup(AbstractFile.class);
|
AbstractFile aFile = node.getLookup().lookup(AbstractFile.class);
|
||||||
if (aFile == null) {
|
if ((aFile == null) || (aFile.isDir())) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -892,6 +892,8 @@ public class DataResultViewerTable extends AbstractDataResultViewer {
|
|||||||
"DataResultViewerTable.goToPageTextField.msgDlg=Please enter a valid page number between 1 and {0}",
|
"DataResultViewerTable.goToPageTextField.msgDlg=Please enter a valid page number between 1 and {0}",
|
||||||
"DataResultViewerTable.goToPageTextField.err=Invalid page number"})
|
"DataResultViewerTable.goToPageTextField.err=Invalid page number"})
|
||||||
void gotoPage() {
|
void gotoPage() {
|
||||||
|
int originalPage = currentPage;
|
||||||
|
|
||||||
try {
|
try {
|
||||||
currentPage = Integer.decode(gotoPageTextField.getText());
|
currentPage = Integer.decode(gotoPageTextField.getText());
|
||||||
} catch (NumberFormatException e) {
|
} catch (NumberFormatException e) {
|
||||||
@ -900,7 +902,7 @@ public class DataResultViewerTable extends AbstractDataResultViewer {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (currentPage > totalPages || currentPage < 1) {
|
if (currentPage > totalPages || currentPage < 1) {
|
||||||
currentPage = 1;
|
currentPage = originalPage;
|
||||||
JOptionPane.showMessageDialog(DataResultViewerTable.this,
|
JOptionPane.showMessageDialog(DataResultViewerTable.this,
|
||||||
Bundle.DataResultViewerTable_goToPageTextField_msgDlg(totalPages),
|
Bundle.DataResultViewerTable_goToPageTextField_msgDlg(totalPages),
|
||||||
Bundle.DataResultViewerTable_goToPageTextField_err(),
|
Bundle.DataResultViewerTable_goToPageTextField_err(),
|
||||||
|
@ -110,10 +110,12 @@ public class ArtifactSelectionDialog extends javax.swing.JDialog {
|
|||||||
@Override
|
@Override
|
||||||
public void mousePressed(MouseEvent evt) {
|
public void mousePressed(MouseEvent evt) {
|
||||||
int index = artifactList.locationToIndex(evt.getPoint());
|
int index = artifactList.locationToIndex(evt.getPoint());
|
||||||
|
if (index >= 0) {
|
||||||
BlackboardArtifact.Type type = model.getElementAt(index);
|
BlackboardArtifact.Type type = model.getElementAt(index);
|
||||||
artifactTypeSelections.put(type, !artifactTypeSelections.get(type));
|
artifactTypeSelections.put(type, !artifactTypeSelections.get(type));
|
||||||
artifactList.repaint();
|
artifactList.repaint();
|
||||||
}
|
}
|
||||||
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -35,9 +35,7 @@ import javafx.beans.InvalidationListener;
|
|||||||
import javafx.beans.property.ReadOnlyObjectProperty;
|
import javafx.beans.property.ReadOnlyObjectProperty;
|
||||||
import javafx.beans.property.ReadOnlyObjectWrapper;
|
import javafx.beans.property.ReadOnlyObjectWrapper;
|
||||||
import javafx.collections.FXCollections;
|
import javafx.collections.FXCollections;
|
||||||
import javafx.collections.ObservableList;
|
|
||||||
import javafx.collections.ObservableMap;
|
import javafx.collections.ObservableMap;
|
||||||
import javafx.collections.ObservableSet;
|
|
||||||
import static org.apache.commons.collections4.CollectionUtils.emptyIfNull;
|
import static org.apache.commons.collections4.CollectionUtils.emptyIfNull;
|
||||||
import static org.apache.commons.collections4.CollectionUtils.isNotEmpty;
|
import static org.apache.commons.collections4.CollectionUtils.isNotEmpty;
|
||||||
import org.joda.time.DateTimeZone;
|
import org.joda.time.DateTimeZone;
|
||||||
@ -58,7 +56,6 @@ import org.sleuthkit.autopsy.timeline.events.TagsAddedEvent;
|
|||||||
import org.sleuthkit.autopsy.timeline.events.TagsDeletedEvent;
|
import org.sleuthkit.autopsy.timeline.events.TagsDeletedEvent;
|
||||||
import org.sleuthkit.autopsy.timeline.ui.filtering.datamodel.FilterState;
|
import org.sleuthkit.autopsy.timeline.ui.filtering.datamodel.FilterState;
|
||||||
import org.sleuthkit.autopsy.timeline.ui.filtering.datamodel.RootFilterState;
|
import org.sleuthkit.autopsy.timeline.ui.filtering.datamodel.RootFilterState;
|
||||||
import org.sleuthkit.autopsy.timeline.ui.filtering.datamodel.TagsFilterState;
|
|
||||||
import org.sleuthkit.autopsy.timeline.utils.CacheLoaderImpl;
|
import org.sleuthkit.autopsy.timeline.utils.CacheLoaderImpl;
|
||||||
import org.sleuthkit.autopsy.timeline.utils.FilterUtils;
|
import org.sleuthkit.autopsy.timeline.utils.FilterUtils;
|
||||||
import org.sleuthkit.autopsy.timeline.zooming.ZoomState;
|
import org.sleuthkit.autopsy.timeline.zooming.ZoomState;
|
||||||
@ -70,7 +67,6 @@ import org.sleuthkit.datamodel.ContentTag;
|
|||||||
import org.sleuthkit.datamodel.DataSource;
|
import org.sleuthkit.datamodel.DataSource;
|
||||||
import org.sleuthkit.datamodel.SleuthkitCase;
|
import org.sleuthkit.datamodel.SleuthkitCase;
|
||||||
import org.sleuthkit.datamodel.Tag;
|
import org.sleuthkit.datamodel.Tag;
|
||||||
import org.sleuthkit.datamodel.TagName;
|
|
||||||
import org.sleuthkit.datamodel.TimelineManager;
|
import org.sleuthkit.datamodel.TimelineManager;
|
||||||
import org.sleuthkit.datamodel.TskCoreException;
|
import org.sleuthkit.datamodel.TskCoreException;
|
||||||
import org.sleuthkit.datamodel.TimelineEvent;
|
import org.sleuthkit.datamodel.TimelineEvent;
|
||||||
@ -81,10 +77,8 @@ import org.sleuthkit.datamodel.TimelineFilter.DataSourcesFilter;
|
|||||||
import org.sleuthkit.datamodel.TimelineFilter.EventTypeFilter;
|
import org.sleuthkit.datamodel.TimelineFilter.EventTypeFilter;
|
||||||
import org.sleuthkit.datamodel.TimelineFilter.FileTypesFilter;
|
import org.sleuthkit.datamodel.TimelineFilter.FileTypesFilter;
|
||||||
import org.sleuthkit.datamodel.TimelineFilter.HashHitsFilter;
|
import org.sleuthkit.datamodel.TimelineFilter.HashHitsFilter;
|
||||||
import org.sleuthkit.datamodel.TimelineFilter.HashSetFilter;
|
|
||||||
import org.sleuthkit.datamodel.TimelineFilter.HideKnownFilter;
|
import org.sleuthkit.datamodel.TimelineFilter.HideKnownFilter;
|
||||||
import org.sleuthkit.datamodel.TimelineFilter.RootFilter;
|
import org.sleuthkit.datamodel.TimelineFilter.RootFilter;
|
||||||
import org.sleuthkit.datamodel.TimelineFilter.TagNameFilter;
|
|
||||||
import org.sleuthkit.datamodel.TimelineFilter.TagsFilter;
|
import org.sleuthkit.datamodel.TimelineFilter.TagsFilter;
|
||||||
import org.sleuthkit.datamodel.TimelineFilter.TextFilter;
|
import org.sleuthkit.datamodel.TimelineFilter.TextFilter;
|
||||||
|
|
||||||
@ -129,8 +123,6 @@ public final class FilteredEventsModel {
|
|||||||
private final LoadingCache<ZoomState, Map<TimelineEventType, Long>> eventCountsCache;
|
private final LoadingCache<ZoomState, Map<TimelineEventType, Long>> eventCountsCache;
|
||||||
/** Map from datasource id to datasource name. */
|
/** Map from datasource id to datasource name. */
|
||||||
private final ObservableMap<Long, String> datasourcesMap = FXCollections.observableHashMap();
|
private final ObservableMap<Long, String> datasourcesMap = FXCollections.observableHashMap();
|
||||||
private final ObservableSet< String> hashSets = FXCollections.observableSet();
|
|
||||||
private final ObservableList<TagName> tagNames = FXCollections.observableArrayList();
|
|
||||||
// end caches
|
// end caches
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -171,8 +163,6 @@ public final class FilteredEventsModel {
|
|||||||
};
|
};
|
||||||
|
|
||||||
datasourcesMap.addListener(filterSyncListener);
|
datasourcesMap.addListener(filterSyncListener);
|
||||||
hashSets.addListener(filterSyncListener);
|
|
||||||
tagNames.addListener(filterSyncListener);
|
|
||||||
|
|
||||||
requestedFilter.set(getDefaultFilter());
|
requestedFilter.set(getDefaultFilter());
|
||||||
|
|
||||||
@ -248,15 +238,11 @@ public final class FilteredEventsModel {
|
|||||||
*/
|
*/
|
||||||
synchronized private void populateFilterData() throws TskCoreException {
|
synchronized private void populateFilterData() throws TskCoreException {
|
||||||
SleuthkitCase skCase = autoCase.getSleuthkitCase();
|
SleuthkitCase skCase = autoCase.getSleuthkitCase();
|
||||||
hashSets.addAll(eventManager.getHashSetNames());
|
|
||||||
|
|
||||||
//because there is no way to remove a datasource we only add to this map.
|
//because there is no way to remove a datasource we only add to this map.
|
||||||
for (DataSource ds : skCase.getDataSources()) {
|
for (DataSource ds : skCase.getDataSources()) {
|
||||||
datasourcesMap.putIfAbsent(ds.getId(), ds.getName());
|
datasourcesMap.putIfAbsent(ds.getId(), ds.getName());
|
||||||
}
|
}
|
||||||
|
|
||||||
//should this only be tags applied to files or event bearing artifacts?
|
|
||||||
tagNames.setAll(skCase.getTagNamesInUse());
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -269,22 +255,8 @@ public final class FilteredEventsModel {
|
|||||||
* with the tags in use in the case
|
* with the tags in use in the case
|
||||||
*/
|
*/
|
||||||
public void syncFilters(RootFilterState rootFilterState) {
|
public void syncFilters(RootFilterState rootFilterState) {
|
||||||
TagsFilterState tagsFilterState = rootFilterState.getTagsFilterState();
|
|
||||||
for (TagName tagName : tagNames) {
|
|
||||||
tagsFilterState.getFilter().addSubFilter(new TagNameFilter(tagName));
|
|
||||||
}
|
|
||||||
for (FilterState<? extends TagNameFilter> tagFilterState : rootFilterState.getTagsFilterState().getSubFilterStates()) {
|
|
||||||
// disable states for tag names that don't exist in case.
|
|
||||||
tagFilterState.setDisabled(tagNames.contains(tagFilterState.getFilter().getTagName()) == false);
|
|
||||||
}
|
|
||||||
|
|
||||||
DataSourcesFilter dataSourcesFilter = rootFilterState.getDataSourcesFilterState().getFilter();
|
DataSourcesFilter dataSourcesFilter = rootFilterState.getDataSourcesFilterState().getFilter();
|
||||||
datasourcesMap.entrySet().forEach(entry -> dataSourcesFilter.addSubFilter(newDataSourceFromMapEntry(entry)));
|
datasourcesMap.entrySet().forEach(entry -> dataSourcesFilter.addSubFilter(newDataSourceFromMapEntry(entry)));
|
||||||
|
|
||||||
HashHitsFilter hashSetsFilter = rootFilterState.getHashHitsFilterState().getFilter();
|
|
||||||
for (String hashSet : hashSets) {
|
|
||||||
hashSetsFilter.addSubFilter(new HashSetFilter(hashSet));
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -351,10 +323,8 @@ public final class FilteredEventsModel {
|
|||||||
-> dataSourcesFilter.addSubFilter(newDataSourceFromMapEntry(dataSourceEntry)));
|
-> dataSourcesFilter.addSubFilter(newDataSourceFromMapEntry(dataSourceEntry)));
|
||||||
|
|
||||||
HashHitsFilter hashHitsFilter = new HashHitsFilter();
|
HashHitsFilter hashHitsFilter = new HashHitsFilter();
|
||||||
hashSets.stream().map(HashSetFilter::new).forEach(hashHitsFilter::addSubFilter);
|
|
||||||
|
|
||||||
TagsFilter tagsFilter = new TagsFilter();
|
TagsFilter tagsFilter = new TagsFilter();
|
||||||
tagNames.stream().map(TagNameFilter::new).forEach(tagsFilter::addSubFilter);
|
|
||||||
|
|
||||||
FileTypesFilter fileTypesFilter = FilterUtils.createDefaultFileTypesFilter();
|
FileTypesFilter fileTypesFilter = FilterUtils.createDefaultFileTypesFilter();
|
||||||
|
|
||||||
@ -388,20 +358,6 @@ public final class FilteredEventsModel {
|
|||||||
return events;
|
return events;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* get a count of tagnames applied to the given event ids as a map from
|
|
||||||
* tagname displayname to count of tag applications
|
|
||||||
*
|
|
||||||
* @param eventIDsWithTags the event ids to get the tag counts map for
|
|
||||||
*
|
|
||||||
* @return a map from tagname displayname to count of applications
|
|
||||||
*
|
|
||||||
* @throws org.sleuthkit.datamodel.TskCoreException
|
|
||||||
*/
|
|
||||||
public Map<String, Long> getTagCountsByTagName(Set<Long> eventIDsWithTags) throws TskCoreException {
|
|
||||||
return eventManager.getTagCountsByTagName(eventIDsWithTags);
|
|
||||||
}
|
|
||||||
|
|
||||||
public List<Long> getEventIDs(Interval timeRange, FilterState<? extends TimelineFilter> filter) throws TskCoreException {
|
public List<Long> getEventIDs(Interval timeRange, FilterState<? extends TimelineFilter> filter) throws TskCoreException {
|
||||||
|
|
||||||
final Interval overlap;
|
final Interval overlap;
|
||||||
|
@ -24,15 +24,12 @@ import com.google.common.collect.Sets;
|
|||||||
import com.google.common.eventbus.Subscribe;
|
import com.google.common.eventbus.Subscribe;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
import java.util.Collection;
|
import java.util.Collection;
|
||||||
import java.util.HashMap;
|
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.Optional;
|
import java.util.Optional;
|
||||||
import java.util.Set;
|
|
||||||
import java.util.concurrent.ConcurrentHashMap;
|
import java.util.concurrent.ConcurrentHashMap;
|
||||||
import java.util.concurrent.ExecutionException;
|
import java.util.concurrent.ExecutionException;
|
||||||
import java.util.logging.Level;
|
import java.util.logging.Level;
|
||||||
import java.util.stream.Collectors;
|
|
||||||
import javafx.animation.KeyFrame;
|
import javafx.animation.KeyFrame;
|
||||||
import javafx.animation.KeyValue;
|
import javafx.animation.KeyValue;
|
||||||
import javafx.animation.Timeline;
|
import javafx.animation.Timeline;
|
||||||
@ -82,9 +79,7 @@ import static org.sleuthkit.autopsy.timeline.ui.detailview.EventNodeBase.show;
|
|||||||
import static org.sleuthkit.autopsy.timeline.ui.detailview.MultiEventNodeBase.CORNER_RADII_3;
|
import static org.sleuthkit.autopsy.timeline.ui.detailview.MultiEventNodeBase.CORNER_RADII_3;
|
||||||
import org.sleuthkit.autopsy.timeline.ui.detailview.datamodel.DetailViewEvent;
|
import org.sleuthkit.autopsy.timeline.ui.detailview.datamodel.DetailViewEvent;
|
||||||
import org.sleuthkit.datamodel.SleuthkitCase;
|
import org.sleuthkit.datamodel.SleuthkitCase;
|
||||||
import org.sleuthkit.datamodel.TskCoreException;
|
|
||||||
import org.sleuthkit.datamodel.TimelineEventType;
|
import org.sleuthkit.datamodel.TimelineEventType;
|
||||||
import org.sleuthkit.datamodel.TimelineEvent;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
*
|
*
|
||||||
@ -269,7 +264,7 @@ public abstract class EventNodeBase<Type extends DetailViewEvent> extends StackP
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* defer tooltip content creation till needed, this had a surprisingly large
|
* defer tooltip content creation until needed, this had a surprisingly large
|
||||||
* impact on speed of loading the chart
|
* impact on speed of loading the chart
|
||||||
*/
|
*/
|
||||||
@NbBundle.Messages({"# {0} - counts",
|
@NbBundle.Messages({"# {0} - counts",
|
||||||
@ -293,37 +288,9 @@ public abstract class EventNodeBase<Type extends DetailViewEvent> extends StackP
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected String call() throws Exception {
|
protected String call() throws Exception {
|
||||||
HashMap<String, Long> hashSetCounts = new HashMap<>();
|
|
||||||
if (tlEvent.getEventIDsWithHashHits().isEmpty() == false) {
|
|
||||||
try {
|
|
||||||
//TODO:push this to DB
|
|
||||||
for (TimelineEvent tle : eventsModel.getEventsById(tlEvent.getEventIDsWithHashHits())) {
|
|
||||||
Set<String> hashSetNames = sleuthkitCase.getContentById(tle.getFileObjID()).getHashSetNames();
|
|
||||||
for (String hashSetName : hashSetNames) {
|
|
||||||
hashSetCounts.merge(hashSetName, 1L, Long::sum);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} catch (TskCoreException ex) {
|
|
||||||
LOGGER.log(Level.SEVERE, "Error getting hashset hit info for event.", ex); //NON-NLS
|
|
||||||
}
|
|
||||||
}
|
|
||||||
String hashSetCountsString = hashSetCounts.entrySet().stream()
|
|
||||||
.map((Map.Entry<String, Long> t) -> t.getKey() + " : " + t.getValue())
|
|
||||||
.collect(Collectors.joining("\n"));
|
|
||||||
|
|
||||||
Map<String, Long> tagCounts = new HashMap<>();
|
|
||||||
if (tlEvent.getEventIDsWithTags().isEmpty() == false) {
|
|
||||||
tagCounts.putAll(eventsModel.getTagCountsByTagName(tlEvent.getEventIDsWithTags()));
|
|
||||||
}
|
|
||||||
String tagCountsString = tagCounts.entrySet().stream()
|
|
||||||
.map((Map.Entry<String, Long> t) -> t.getKey() + " : " + t.getValue())
|
|
||||||
.collect(Collectors.joining("\n"));
|
|
||||||
|
|
||||||
return Bundle.EventNodeBase_tooltip_text(getEventIDs().size(), getEventType(), getDescription(),
|
return Bundle.EventNodeBase_tooltip_text(getEventIDs().size(), getEventType(), getDescription(),
|
||||||
TimeLineController.getZonedFormatter().print(getStartMillis()),
|
TimeLineController.getZonedFormatter().print(getStartMillis()),
|
||||||
TimeLineController.getZonedFormatter().print(getEndMillis() + 1000))
|
TimeLineController.getZonedFormatter().print(getEndMillis() + 1000));
|
||||||
+ (hashSetCountsString.isEmpty() ? "" : Bundle.EventNodeBase_toolTip_hashSetHits(hashSetCountsString))
|
|
||||||
+ (tagCountsString.isEmpty() ? "" : Bundle.EventNodeBase_toolTip_tags(tagCountsString));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -35,10 +35,10 @@ import org.sleuthkit.datamodel.TimelineFilter.EventTypeFilter;
|
|||||||
import org.sleuthkit.datamodel.TimelineFilter.FileTypeFilter;
|
import org.sleuthkit.datamodel.TimelineFilter.FileTypeFilter;
|
||||||
import org.sleuthkit.datamodel.TimelineFilter.FileTypesFilter;
|
import org.sleuthkit.datamodel.TimelineFilter.FileTypesFilter;
|
||||||
import org.sleuthkit.datamodel.TimelineFilter.HashHitsFilter;
|
import org.sleuthkit.datamodel.TimelineFilter.HashHitsFilter;
|
||||||
import org.sleuthkit.datamodel.TimelineFilter.HashSetFilter;
|
|
||||||
import org.sleuthkit.datamodel.TimelineFilter.HideKnownFilter;
|
import org.sleuthkit.datamodel.TimelineFilter.HideKnownFilter;
|
||||||
import org.sleuthkit.datamodel.TimelineFilter.RootFilter;
|
import org.sleuthkit.datamodel.TimelineFilter.RootFilter;
|
||||||
import org.sleuthkit.datamodel.TimelineFilter.TextFilter;
|
import org.sleuthkit.datamodel.TimelineFilter.TextFilter;
|
||||||
|
import org.sleuthkit.datamodel.TimelineFilter.TagsFilter;
|
||||||
|
|
||||||
/** A FilterState for RootFilters. Provides named access to the sub
|
/** A FilterState for RootFilters. Provides named access to the sub
|
||||||
* filter states.
|
* filter states.
|
||||||
@ -48,8 +48,8 @@ public class RootFilterState extends CompoundFilterState<TimelineFilter, RootFil
|
|||||||
private final CompoundFilterState<EventTypeFilter, EventTypeFilter> eventTypeFilterState;
|
private final CompoundFilterState<EventTypeFilter, EventTypeFilter> eventTypeFilterState;
|
||||||
private final SqlFilterState<HideKnownFilter> knownFilterState;
|
private final SqlFilterState<HideKnownFilter> knownFilterState;
|
||||||
private final SqlFilterState<TextFilter> textFilterState;
|
private final SqlFilterState<TextFilter> textFilterState;
|
||||||
private final TagsFilterState tagsFilterState;
|
private final SqlFilterState<TagsFilter> tagsFilterState;
|
||||||
private final CompoundFilterState<HashSetFilter, HashHitsFilter> hashHitsFilterState;
|
private final SqlFilterState<HashHitsFilter> hashHitsFilterState;
|
||||||
private final CompoundFilterState<DataSourceFilter, DataSourcesFilter> dataSourcesFilterState;
|
private final CompoundFilterState<DataSourceFilter, DataSourcesFilter> dataSourcesFilterState;
|
||||||
private final CompoundFilterState<TimelineFilter.FileTypeFilter, TimelineFilter.FileTypesFilter> fileTypesFilterState;
|
private final CompoundFilterState<TimelineFilter.FileTypeFilter, TimelineFilter.FileTypesFilter> fileTypesFilterState;
|
||||||
|
|
||||||
@ -63,8 +63,8 @@ public class RootFilterState extends CompoundFilterState<TimelineFilter, RootFil
|
|||||||
new CompoundFilterState<>(delegate.getEventTypeFilter()),
|
new CompoundFilterState<>(delegate.getEventTypeFilter()),
|
||||||
new SqlFilterState<>(delegate.getKnownFilter()),
|
new SqlFilterState<>(delegate.getKnownFilter()),
|
||||||
new SqlFilterState<>(delegate.getTextFilter()),
|
new SqlFilterState<>(delegate.getTextFilter()),
|
||||||
new TagsFilterState(delegate.getTagsFilter()),
|
new SqlFilterState<>(delegate.getTagsFilter()),
|
||||||
new CompoundFilterState<>(delegate.getHashHitsFilter()),
|
new SqlFilterState<>(delegate.getHashHitsFilter()),
|
||||||
new CompoundFilterState<>(delegate.getDataSourcesFilter()),
|
new CompoundFilterState<>(delegate.getDataSourcesFilter()),
|
||||||
new CompoundFilterState<>(delegate.getFileTypesFilter())
|
new CompoundFilterState<>(delegate.getFileTypesFilter())
|
||||||
);
|
);
|
||||||
@ -74,8 +74,8 @@ public class RootFilterState extends CompoundFilterState<TimelineFilter, RootFil
|
|||||||
CompoundFilterState<EventTypeFilter, EventTypeFilter> eventTypeFilterState,
|
CompoundFilterState<EventTypeFilter, EventTypeFilter> eventTypeFilterState,
|
||||||
SqlFilterState<HideKnownFilter> knownFilterState,
|
SqlFilterState<HideKnownFilter> knownFilterState,
|
||||||
SqlFilterState<TextFilter> textFilterState,
|
SqlFilterState<TextFilter> textFilterState,
|
||||||
TagsFilterState tagsFilterState,
|
SqlFilterState<TagsFilter> tagsFilterState,
|
||||||
CompoundFilterState<HashSetFilter, HashHitsFilter> hashHitsFilterState,
|
SqlFilterState<HashHitsFilter> hashHitsFilterState,
|
||||||
CompoundFilterState<DataSourceFilter, DataSourcesFilter> dataSourcesFilterState,
|
CompoundFilterState<DataSourceFilter, DataSourcesFilter> dataSourcesFilterState,
|
||||||
CompoundFilterState<FileTypeFilter, FileTypesFilter> fileTypesFilterState) {
|
CompoundFilterState<FileTypeFilter, FileTypesFilter> fileTypesFilterState) {
|
||||||
super(filter, Arrays.asList(eventTypeFilterState, knownFilterState, textFilterState, tagsFilterState, hashHitsFilterState, dataSourcesFilterState, fileTypesFilterState));
|
super(filter, Arrays.asList(eventTypeFilterState, knownFilterState, textFilterState, tagsFilterState, hashHitsFilterState, dataSourcesFilterState, fileTypesFilterState));
|
||||||
@ -133,11 +133,11 @@ public class RootFilterState extends CompoundFilterState<TimelineFilter, RootFil
|
|||||||
return textFilterState;
|
return textFilterState;
|
||||||
}
|
}
|
||||||
|
|
||||||
public TagsFilterState getTagsFilterState() {
|
public SqlFilterState<TagsFilter> getTagsFilterState() {
|
||||||
return tagsFilterState;
|
return tagsFilterState;
|
||||||
}
|
}
|
||||||
|
|
||||||
public CompoundFilterState<HashSetFilter, HashHitsFilter> getHashHitsFilterState() {
|
public SqlFilterState<HashHitsFilter> getHashHitsFilterState() {
|
||||||
return hashHitsFilterState;
|
return hashHitsFilterState;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -161,18 +161,6 @@ public class RootFilterState extends CompoundFilterState<TimelineFilter, RootFil
|
|||||||
Lists.transform(getSubFilterStates(), FilterState::getActiveFilter));
|
Lists.transform(getSubFilterStates(), FilterState::getActiveFilter));
|
||||||
}
|
}
|
||||||
|
|
||||||
@SuppressWarnings("rawtypes")
|
|
||||||
public boolean hasActiveHashFilters() {
|
|
||||||
return hashHitsFilterState.isActive()
|
|
||||||
&& hashHitsFilterState.getSubFilterStates().stream().anyMatch(FilterState::isActive);
|
|
||||||
}
|
|
||||||
|
|
||||||
@SuppressWarnings("rawtypes")
|
|
||||||
public boolean hasActiveTagsFilters() {
|
|
||||||
return tagsFilterState.isActive()
|
|
||||||
&& tagsFilterState.getSubFilterStates().stream().anyMatch(FilterState::isActive);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public ObservableList<FilterState<? extends TimelineFilter>> getSubFilterStates() {
|
public ObservableList<FilterState<? extends TimelineFilter>> getSubFilterStates() {
|
||||||
ImmutableMap<FilterState<? extends TimelineFilter>, Integer> filterOrder
|
ImmutableMap<FilterState<? extends TimelineFilter>, Integer> filterOrder
|
||||||
|
@ -36,6 +36,15 @@ public class SqlFilterState<FilterType extends TimelineFilter> extends AbstractF
|
|||||||
// the "Hide Known Filters", "Tags", "Hashsets" and "Text".
|
// the "Hide Known Filters", "Tags", "Hashsets" and "Text".
|
||||||
// There are better ways to do this, but this works in a pinch
|
// There are better ways to do this, but this works in a pinch
|
||||||
this(filter, !(filter instanceof TimelineFilter.HideKnownFilter || filter instanceof TimelineFilter.TagsFilter || filter instanceof TimelineFilter.HashHitsFilter || filter instanceof TimelineFilter.TextFilter));
|
this(filter, !(filter instanceof TimelineFilter.HideKnownFilter || filter instanceof TimelineFilter.TagsFilter || filter instanceof TimelineFilter.HashHitsFilter || filter instanceof TimelineFilter.TextFilter));
|
||||||
|
|
||||||
|
selectedProperty().addListener(selectedProperty -> {
|
||||||
|
if (filter instanceof TimelineFilter.TagsFilter) {
|
||||||
|
((TimelineFilter.TagsFilter)filter).setTagged(isSelected());
|
||||||
|
} else if (filter instanceof TimelineFilter.HashHitsFilter) {
|
||||||
|
((TimelineFilter.HashHitsFilter)filter).setTagged(isSelected());
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -1,78 +0,0 @@
|
|||||||
/*
|
|
||||||
* Autopsy Forensic Browser
|
|
||||||
*
|
|
||||||
* Copyright 2018-2019 Basis Technology Corp.
|
|
||||||
* Contact: carrier <at> sleuthkit <dot> org
|
|
||||||
*
|
|
||||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
* you may not use this file except in compliance with the License.
|
|
||||||
* You may obtain a copy of the License at
|
|
||||||
*
|
|
||||||
* http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
*
|
|
||||||
* Unless required by applicable law or agreed to in writing, software
|
|
||||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
* See the License for the specific language governing permissions and
|
|
||||||
* limitations under the License.
|
|
||||||
*/
|
|
||||||
package org.sleuthkit.autopsy.timeline.ui.filtering.datamodel;
|
|
||||||
|
|
||||||
import com.google.common.collect.Lists;
|
|
||||||
import java.util.Collection;
|
|
||||||
import javafx.collections.ListChangeListener;
|
|
||||||
import org.sleuthkit.datamodel.TimelineFilter.TagNameFilter;
|
|
||||||
import org.sleuthkit.datamodel.TimelineFilter.TagsFilter;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Specialization of CompoundFilterState for TagName/Tags-Filter.
|
|
||||||
*
|
|
||||||
* Newly added subfilters made to be SELECTED when they are added.
|
|
||||||
*/
|
|
||||||
public class TagsFilterState extends CompoundFilterState<TagNameFilter, TagsFilter> {
|
|
||||||
|
|
||||||
public TagsFilterState(TagsFilter delegate) {
|
|
||||||
super(delegate);
|
|
||||||
installSelectNewFiltersListener();
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
public TagsFilterState(TagsFilter delegate, Collection<FilterState<? extends TagNameFilter>> subFilterStates) {
|
|
||||||
super(delegate, subFilterStates);
|
|
||||||
installSelectNewFiltersListener();
|
|
||||||
}
|
|
||||||
|
|
||||||
private void installSelectNewFiltersListener() {
|
|
||||||
getSubFilterStates().addListener((ListChangeListener.Change<? extends FilterState<? extends TagNameFilter>> change) -> {
|
|
||||||
while (change.next()) {
|
|
||||||
change.getAddedSubList().forEach(filterState -> filterState.setSelected(true));
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public TagsFilterState copyOf() {
|
|
||||||
TagsFilterState copy = new TagsFilterState(getFilter().copyOf(),
|
|
||||||
Lists.transform(getSubFilterStates(), FilterState::copyOf));
|
|
||||||
|
|
||||||
copy.setSelected(isSelected());
|
|
||||||
copy.setDisabled(isDisabled());
|
|
||||||
return copy;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public TagsFilter getActiveFilter() {
|
|
||||||
if (isActive() == false) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
TagsFilter copy = new TagsFilter();
|
|
||||||
//add active subfilters to copy.
|
|
||||||
getSubFilterStates().stream()
|
|
||||||
.filter(FilterState::isActive)
|
|
||||||
.map(FilterState::getActiveFilter)
|
|
||||||
.forEach(copy::addSubFilter);
|
|
||||||
|
|
||||||
return copy;
|
|
||||||
}
|
|
||||||
}
|
|
@ -51,6 +51,7 @@ ExtractOs.windowsVolume.label=OS Drive (Windows)
|
|||||||
ExtractOs.yellowDogLinuxOs.label=Linux (Yellow Dog)
|
ExtractOs.yellowDogLinuxOs.label=Linux (Yellow Dog)
|
||||||
ExtractOs.yellowDogLinuxVolume.label=OS Drive (Linux Yellow Dog)
|
ExtractOs.yellowDogLinuxVolume.label=OS Drive (Linux Yellow Dog)
|
||||||
ExtractOS_progressMessage=Checking for OS
|
ExtractOS_progressMessage=Checking for OS
|
||||||
|
ExtractRecycleBin_module_name=Recycle Bin
|
||||||
ExtractSafari_Error_Getting_History=An error occurred while processing Safari history files.
|
ExtractSafari_Error_Getting_History=An error occurred while processing Safari history files.
|
||||||
ExtractSafari_Error_Parsing_Bookmark=An error occured while processing Safari Bookmark files
|
ExtractSafari_Error_Parsing_Bookmark=An error occured while processing Safari Bookmark files
|
||||||
ExtractSafari_Error_Parsing_Cookies=An error occured while processing Safari Cookies files
|
ExtractSafari_Error_Parsing_Cookies=An error occured while processing Safari Cookies files
|
||||||
|
620
RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractRecycleBin.java
Executable file
620
RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractRecycleBin.java
Executable file
@ -0,0 +1,620 @@
|
|||||||
|
/*
|
||||||
|
*
|
||||||
|
* Autopsy Forensic Browser
|
||||||
|
*
|
||||||
|
* Copyright 2019 Basis Technology Corp.
|
||||||
|
*
|
||||||
|
* Copyright 2012 42six Solutions.
|
||||||
|
* Contact: aebadirad <at> 42six <dot> com
|
||||||
|
* Project Contact/Architect: carrier <at> sleuthkit <dot> org
|
||||||
|
*
|
||||||
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
* you may not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
package org.sleuthkit.autopsy.recentactivity;
|
||||||
|
|
||||||
|
import java.io.File;
|
||||||
|
import java.io.FileNotFoundException;
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.nio.ByteBuffer;
|
||||||
|
import java.nio.ByteOrder;
|
||||||
|
import java.nio.file.Files;
|
||||||
|
import java.nio.file.Path;
|
||||||
|
import java.nio.file.Paths;
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.Arrays;
|
||||||
|
import java.util.HashMap;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Map;
|
||||||
|
import java.util.logging.Level;
|
||||||
|
import org.joda.time.Instant;
|
||||||
|
import org.openide.util.NbBundle.Messages;
|
||||||
|
import org.sleuthkit.autopsy.casemodule.Case;
|
||||||
|
import org.sleuthkit.autopsy.casemodule.services.FileManager;
|
||||||
|
import org.sleuthkit.autopsy.coreutils.Logger;
|
||||||
|
import org.sleuthkit.autopsy.datamodel.ContentUtils;
|
||||||
|
import org.sleuthkit.autopsy.ingest.DataSourceIngestModuleProgress;
|
||||||
|
import org.sleuthkit.autopsy.ingest.IngestJobContext;
|
||||||
|
import org.sleuthkit.datamodel.AbstractFile;
|
||||||
|
import org.sleuthkit.datamodel.BlackboardArtifact;
|
||||||
|
import static org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE.TSK_OS_ACCOUNT;
|
||||||
|
import org.sleuthkit.datamodel.BlackboardAttribute;
|
||||||
|
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DATETIME_DELETED;
|
||||||
|
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PATH;
|
||||||
|
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_USER_ID;
|
||||||
|
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_USER_NAME;
|
||||||
|
import org.sleuthkit.datamodel.Content;
|
||||||
|
import org.sleuthkit.datamodel.FsContent;
|
||||||
|
import org.sleuthkit.datamodel.SleuthkitCase;
|
||||||
|
import org.sleuthkit.datamodel.TskCoreException;
|
||||||
|
import org.sleuthkit.datamodel.TskData;
|
||||||
|
import org.sleuthkit.datamodel.TskDataException;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This module is based on the RecycleBin python module from Mark McKinnon.
|
||||||
|
*
|
||||||
|
* @see
|
||||||
|
* <a href="https://github.com/markmckinnon/Autopsy-Plugins/blob/master/Recycle_Bin/Recycle_Bin.py">Recycle_Bin.py</a>
|
||||||
|
*
|
||||||
|
*/
|
||||||
|
final class ExtractRecycleBin extends Extract {
|
||||||
|
|
||||||
|
private static final Logger logger = Logger.getLogger(ExtractRecycleBin.class.getName());
|
||||||
|
|
||||||
|
private static final String RECYCLE_BIN_ARTIFACT_NAME = "TSK_RECYCLE_BIN"; //NON-NLS
|
||||||
|
|
||||||
|
private static final int V1_FILE_NAME_OFFSET = 24;
|
||||||
|
private static final int V2_FILE_NAME_OFFSET = 28;
|
||||||
|
|
||||||
|
@Messages({
|
||||||
|
"ExtractRecycleBin_module_name=Recycle Bin"
|
||||||
|
})
|
||||||
|
ExtractRecycleBin() {
|
||||||
|
this.moduleName = Bundle.ExtractRecycleBin_module_name();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
void process(Content dataSource, IngestJobContext context, DataSourceIngestModuleProgress progressBar) {
|
||||||
|
// At this time it was decided that we would not include TSK_RECYCLE_BIN
|
||||||
|
// in the default list of BlackboardArtifact types.
|
||||||
|
try {
|
||||||
|
createRecycleBinArtifactType();
|
||||||
|
} catch (TskCoreException ex) {
|
||||||
|
logger.log(Level.WARNING, String.format("%s may not have been created.", RECYCLE_BIN_ARTIFACT_NAME), ex);
|
||||||
|
}
|
||||||
|
|
||||||
|
BlackboardArtifact.Type recycleBinArtifactType;
|
||||||
|
|
||||||
|
try {
|
||||||
|
recycleBinArtifactType = tskCase.getArtifactType(RECYCLE_BIN_ARTIFACT_NAME);
|
||||||
|
} catch (TskCoreException ex) {
|
||||||
|
logger.log(Level.WARNING, String.format("Unable to retrive custom artifact type %s", RECYCLE_BIN_ARTIFACT_NAME), ex); // NON-NLS
|
||||||
|
// If this doesn't work bail.
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// map SIDs to user names so that we can include that in the artifact
|
||||||
|
Map<String, String> userNameMap;
|
||||||
|
try {
|
||||||
|
userNameMap = makeUserNameMap(dataSource);
|
||||||
|
} catch (TskCoreException ex) {
|
||||||
|
logger.log(Level.WARNING, "Unable to create OS Account user name map", ex);
|
||||||
|
// This is not the end of the world we will just continue without
|
||||||
|
// user names
|
||||||
|
userNameMap = new HashMap<>();
|
||||||
|
}
|
||||||
|
|
||||||
|
FileManager fileManager = Case.getCurrentCase().getServices().getFileManager();
|
||||||
|
|
||||||
|
// Collect all of the $R files so that we can later easily map them to corresponding $I file
|
||||||
|
Map<String, List<AbstractFile>> rFileMap;
|
||||||
|
try {
|
||||||
|
rFileMap = makeRFileMap(dataSource);
|
||||||
|
} catch (TskCoreException ex) {
|
||||||
|
logger.log(Level.WARNING, String.format("Unable to create $R file map for dataSource: %s", dataSource.getName()), ex);
|
||||||
|
return; // No $R files, no need to continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get the $I files
|
||||||
|
List<AbstractFile> iFiles;
|
||||||
|
try {
|
||||||
|
iFiles = fileManager.findFiles(dataSource, "$I%"); //NON-NLS
|
||||||
|
} catch (TskCoreException ex) {
|
||||||
|
logger.log(Level.WARNING, "Unable to find recycle bin I files.", ex); //NON-NLS
|
||||||
|
return; // No need to continue
|
||||||
|
}
|
||||||
|
|
||||||
|
String tempRARecycleBinPath = RAImageIngestModule.getRATempPath(Case.getCurrentCase(), "recyclebin"); //NON-NLS
|
||||||
|
|
||||||
|
// cycle through the $I files and process each.
|
||||||
|
for (AbstractFile iFile : iFiles) {
|
||||||
|
|
||||||
|
if (context.dataSourceIngestIsCancelled()) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
processIFile(context, recycleBinArtifactType, iFile, userNameMap, rFileMap, tempRARecycleBinPath);
|
||||||
|
}
|
||||||
|
|
||||||
|
(new File(tempRARecycleBinPath)).delete();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Process each individual iFile.
|
||||||
|
*
|
||||||
|
* @param context
|
||||||
|
* @param recycleBinArtifactType Module created artifact type
|
||||||
|
* @param iFile The AbstractFile to process
|
||||||
|
* @param userNameMap Map of user ids to names
|
||||||
|
* @param tempRARecycleBinPath Temp directory path
|
||||||
|
*/
|
||||||
|
private void processIFile(IngestJobContext context, BlackboardArtifact.Type recycleBinArtifactType, AbstractFile iFile, Map<String, String> userNameMap, Map<String, List<AbstractFile>> rFileMap, String tempRARecycleBinPath) {
|
||||||
|
String tempFilePath = tempRARecycleBinPath + File.separator + Instant.now().getMillis() + iFile.getName();
|
||||||
|
try {
|
||||||
|
try {
|
||||||
|
ContentUtils.writeToFile(iFile, new File(tempFilePath));
|
||||||
|
} catch (IOException ex) {
|
||||||
|
logger.log(Level.WARNING, String.format("Unable to write %s to temp directory. File name: %s", iFile.getName(), tempFilePath), ex); //NON-NLS
|
||||||
|
// if we cannot make a copy of the $I file for later processing
|
||||||
|
// move onto the next file
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// get the original name, dates, etc. from the $I file
|
||||||
|
RecycledFileMetaData metaData;
|
||||||
|
try {
|
||||||
|
metaData = parseIFile(tempFilePath);
|
||||||
|
} catch (IOException ex) {
|
||||||
|
logger.log(Level.WARNING, String.format("Unable to parse iFile %s", iFile.getName()), ex); //NON-NLS
|
||||||
|
// Unable to parse the $I file move onto the next file
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// each user has its own Recyle Bin folder. Figure out the user name based on its name .
|
||||||
|
String userID = getUserIDFromPath(iFile.getParentPath());
|
||||||
|
String userName = "";
|
||||||
|
if (!userID.isEmpty()) {
|
||||||
|
userName = userNameMap.get(userID);
|
||||||
|
} else {
|
||||||
|
// If the iFile doesn't have a user ID in its parent
|
||||||
|
// directory structure then it is not from the recyle bin
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// get the corresponding $R file, which is in the same folder and has the file content
|
||||||
|
String rFileName = iFile.getName().replace("$I", "$R"); //NON-NLS
|
||||||
|
List<AbstractFile> rFiles = rFileMap.get(rFileName);
|
||||||
|
if (rFiles == null) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
SleuthkitCase skCase = Case.getCurrentCase().getSleuthkitCase();
|
||||||
|
for (AbstractFile rFile : rFiles) {
|
||||||
|
if (context.dataSourceIngestIsCancelled()) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (iFile.getParentPath().equals(rFile.getParentPath())
|
||||||
|
&& iFile.getMetaFlagsAsString().equals(rFile.getMetaFlagsAsString())) {
|
||||||
|
try {
|
||||||
|
postArtifact(createArtifact(rFile, recycleBinArtifactType, metaData.getFullWindowsPath(), userName, metaData.getDeletedTimeStamp()));
|
||||||
|
|
||||||
|
// If we are processing a disk image, we will also make a deleted file entry so that the user
|
||||||
|
// sees the deleted file in its original folder. We re-use the metadata address so that the user
|
||||||
|
// can see the content.
|
||||||
|
if (rFile instanceof FsContent) {
|
||||||
|
// if the user deleted a folder, then we need to recusively go into it. Note the contents of the $R folder
|
||||||
|
// do not have corresponding $I files anymore. Only the $R folder does.
|
||||||
|
if (rFile.isDir()) {
|
||||||
|
AbstractFile directory = getOrMakeFolder(Case.getCurrentCase().getSleuthkitCase(), (FsContent) rFile, metaData.getFullWindowsPath());
|
||||||
|
popuplateDeletedDirectory(Case.getCurrentCase().getSleuthkitCase(), directory, rFile.getChildren(), metaData.getFullWindowsPath(), metaData.getDeletedTimeStamp());
|
||||||
|
|
||||||
|
} else {
|
||||||
|
AbstractFile folder = getOrMakeFolder(Case.getCurrentCase().getSleuthkitCase(), (FsContent) rFile.getParent(), Paths.get(metaData.getFullWindowsPath()).getParent().toString());
|
||||||
|
addFileSystemFile(skCase, (FsContent)rFile, folder, Paths.get(metaData.getFullWindowsPath()).getFileName().toString(), metaData.getDeletedTimeStamp());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (TskCoreException ex) {
|
||||||
|
logger.log(Level.WARNING, String.format("Unable to add attributes to artifact %s", rFile.getName()), ex); //NON-NLS
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} finally {
|
||||||
|
(new File(tempFilePath)).delete();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Add the children of recycled $R folder to the folder.
|
||||||
|
*
|
||||||
|
* @param skCase The current Sleuthkit case
|
||||||
|
* @param parentFolder The folder to folder the deleted files are to be
|
||||||
|
* added.
|
||||||
|
* @param children The recycled children of the $R folder
|
||||||
|
* @param parentPath String path to the directory the children were
|
||||||
|
* deleted from
|
||||||
|
* @param deletedTimeStamp The time at which the files were deleted,
|
||||||
|
* inherited from the $R file.
|
||||||
|
*
|
||||||
|
* @throws TskCoreException
|
||||||
|
*/
|
||||||
|
private void popuplateDeletedDirectory(SleuthkitCase skCase, AbstractFile parentFolder, List<Content> recycledChildren, String parentPath, long deletedTimeStamp) throws TskCoreException {
|
||||||
|
if (recycledChildren == null) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
for (Content child : recycledChildren) {
|
||||||
|
if (child instanceof FsContent) {
|
||||||
|
FsContent fsContent = (FsContent) child;
|
||||||
|
if (fsContent.isFile()) {
|
||||||
|
addFileSystemFile(skCase, fsContent, parentFolder, fsContent.getName(), deletedTimeStamp);
|
||||||
|
} else if (fsContent.isDir()) {
|
||||||
|
String newPath = parentPath + "\\" + fsContent.getName();
|
||||||
|
AbstractFile childFolder = getOrMakeFolder(skCase, (FsContent) fsContent, parentPath);
|
||||||
|
popuplateDeletedDirectory(skCase, childFolder, fsContent.getChildren(), newPath, deletedTimeStamp);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parse the $I file.
|
||||||
|
*
|
||||||
|
* File format prior to Windows 10:
|
||||||
|
* <table>
|
||||||
|
* <tr><th>Offset</th><th>Size</th><th>Description</th></tr>
|
||||||
|
* <tr><td>0</td><td>8</td><td>Header</td></tr>
|
||||||
|
* <tr><td>8</td><td>8</td><td>File Size</td></tr>
|
||||||
|
* <tr><td>16</td><td>8</td><td>Deleted Timestamp</td></tr>
|
||||||
|
* <tr><td>24</td><td>520</td><td>File Name</td></tr>
|
||||||
|
* </table>
|
||||||
|
*
|
||||||
|
* File format Windows 10+
|
||||||
|
* <table>
|
||||||
|
* <tr><th>Offset</th><th>Size</th><th>Description</th></tr>
|
||||||
|
* <tr><td>0</td><td>8</td><td>Header</td></tr>
|
||||||
|
* <tr><td>8</td><td>8</td><td>File Size</td></tr>
|
||||||
|
* <tr><td>16</td><td>8</td><td>Deleted TimeStamp</td></tr>
|
||||||
|
* <tr><td>24</td><td>4</td><td>File Name Length</td></tr>
|
||||||
|
* <tr><td>28</td><td>var</td><td>File Name</td></tr>
|
||||||
|
* </table>
|
||||||
|
*
|
||||||
|
* For versions of Windows prior to 10, header = 0x01. Windows 10+ header ==
|
||||||
|
* 0x02
|
||||||
|
*
|
||||||
|
* @param iFilePath Path to local copy of file in temp folder
|
||||||
|
*
|
||||||
|
* @throws FileNotFoundException
|
||||||
|
* @throws IOException
|
||||||
|
*/
|
||||||
|
private RecycledFileMetaData parseIFile(String iFilePath) throws FileNotFoundException, IOException {
|
||||||
|
byte[] allBytes = Files.readAllBytes(Paths.get(iFilePath));
|
||||||
|
|
||||||
|
ByteBuffer byteBuffer = ByteBuffer.wrap(allBytes);
|
||||||
|
byteBuffer.order(ByteOrder.LITTLE_ENDIAN);
|
||||||
|
|
||||||
|
long version = byteBuffer.getLong();
|
||||||
|
long fileSize = byteBuffer.getLong();
|
||||||
|
long timestamp = byteBuffer.getLong();
|
||||||
|
|
||||||
|
// Convert from windows FILETIME to Unix Epoch seconds
|
||||||
|
timestamp = Util.filetimeToMillis(timestamp) / 1000;
|
||||||
|
|
||||||
|
byte[] stringBytes;
|
||||||
|
|
||||||
|
if (version == 1) {
|
||||||
|
stringBytes = Arrays.copyOfRange(allBytes, V1_FILE_NAME_OFFSET, allBytes.length);
|
||||||
|
} else {
|
||||||
|
int fileNameLength = byteBuffer.getInt() * 2; //Twice the bytes for unicode
|
||||||
|
stringBytes = Arrays.copyOfRange(allBytes, V2_FILE_NAME_OFFSET, V2_FILE_NAME_OFFSET + fileNameLength);
|
||||||
|
}
|
||||||
|
|
||||||
|
String fileName = new String(stringBytes, "UTF-16LE"); //NON-NLS
|
||||||
|
|
||||||
|
return new RecycledFileMetaData(fileSize, timestamp, fileName);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a map of userids to usernames from the OS Accounts.
|
||||||
|
*
|
||||||
|
* @param dataSource
|
||||||
|
*
|
||||||
|
* @return A Map of userIDs and userNames
|
||||||
|
*
|
||||||
|
* @throws TskCoreException
|
||||||
|
*/
|
||||||
|
private Map<String, String> makeUserNameMap(Content dataSource) throws TskCoreException {
|
||||||
|
Map<String, String> userNameMap = new HashMap<>();
|
||||||
|
|
||||||
|
List<BlackboardArtifact> accounts = blackboard.getArtifacts(TSK_OS_ACCOUNT.getTypeID(), dataSource.getId());
|
||||||
|
|
||||||
|
for (BlackboardArtifact account : accounts) {
|
||||||
|
BlackboardAttribute nameAttribute = getAttributeForArtifact(account, TSK_USER_NAME);
|
||||||
|
BlackboardAttribute idAttribute = getAttributeForArtifact(account, TSK_USER_ID);
|
||||||
|
|
||||||
|
String userName = nameAttribute != null ? nameAttribute.getDisplayString() : "";
|
||||||
|
String userID = idAttribute != null ? idAttribute.getDisplayString() : "";
|
||||||
|
|
||||||
|
if (!userID.isEmpty()) {
|
||||||
|
userNameMap.put(userID, userName);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return userNameMap;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get a list of files that start with $R and create a map of the file to
|
||||||
|
* their name.
|
||||||
|
*
|
||||||
|
* @param dataSource
|
||||||
|
*
|
||||||
|
* @return File map
|
||||||
|
*
|
||||||
|
* @throws TskCoreException
|
||||||
|
*/
|
||||||
|
private Map<String, List<AbstractFile>> makeRFileMap(Content dataSource) throws TskCoreException {
|
||||||
|
FileManager fileManager = Case.getCurrentCase().getServices().getFileManager();
|
||||||
|
List<AbstractFile> rFiles = fileManager.findFiles(dataSource, "$R%");
|
||||||
|
Map<String, List<AbstractFile>> fileMap = new HashMap<>();
|
||||||
|
|
||||||
|
for (AbstractFile rFile : rFiles) {
|
||||||
|
String fileName = rFile.getName();
|
||||||
|
List<AbstractFile> fileList = fileMap.get(fileName);
|
||||||
|
|
||||||
|
if (fileList == null) {
|
||||||
|
fileList = new ArrayList<>();
|
||||||
|
fileMap.put(fileName, fileList);
|
||||||
|
}
|
||||||
|
|
||||||
|
fileList.add(rFile);
|
||||||
|
}
|
||||||
|
|
||||||
|
return fileMap;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Helper functions to get the user ID from the iFile parent path. User ids
|
||||||
|
* will be of the form S-<more characters>.
|
||||||
|
*
|
||||||
|
* @param iFileParentPath String parent path of the iFile
|
||||||
|
*
|
||||||
|
* @return String user id
|
||||||
|
*/
|
||||||
|
private String getUserIDFromPath(String iFileParentPath) {
|
||||||
|
int index = iFileParentPath.indexOf('-') - 1;
|
||||||
|
if (index >= 0) {
|
||||||
|
return (iFileParentPath.substring(index)).replace("/", "");
|
||||||
|
} else {
|
||||||
|
return "";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Gets the attribute for the given type from the given artifact.
|
||||||
|
*
|
||||||
|
* @param artifact BlackboardArtifact to get the attribute from
|
||||||
|
* @param type The BlackboardAttribute Type to get
|
||||||
|
*
|
||||||
|
* @return BlackboardAttribute for given artifact and type
|
||||||
|
*
|
||||||
|
* @throws TskCoreException
|
||||||
|
*/
|
||||||
|
private BlackboardAttribute getAttributeForArtifact(BlackboardArtifact artifact, BlackboardAttribute.ATTRIBUTE_TYPE type) throws TskCoreException {
|
||||||
|
return artifact.getAttribute(new BlackboardAttribute.Type(BlackboardAttribute.ATTRIBUTE_TYPE.fromID(type.getTypeID())));
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create TSK_RECYCLE_BIN artifact type.
|
||||||
|
*
|
||||||
|
* @throws TskCoreException
|
||||||
|
*/
|
||||||
|
private void createRecycleBinArtifactType() throws TskCoreException {
|
||||||
|
try {
|
||||||
|
tskCase.addBlackboardArtifactType(RECYCLE_BIN_ARTIFACT_NAME, "Recycle Bin"); //NON-NLS
|
||||||
|
} catch (TskDataException ex) {
|
||||||
|
logger.log(Level.INFO, String.format("%s may have already been defined for this case", RECYCLE_BIN_ARTIFACT_NAME));
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create the new artifact for the give rFile
|
||||||
|
*
|
||||||
|
* @param rFile AbstractFile to create the artifact for
|
||||||
|
* @param type Type of artifact to create
|
||||||
|
* @param fileName The original path of the deleted file
|
||||||
|
* @param userName The name of the user that deleted the file
|
||||||
|
* @param dateTime The time in epoch seconds that the file was deleted
|
||||||
|
*
|
||||||
|
* @return Newly created artifact
|
||||||
|
*
|
||||||
|
* @throws TskCoreException
|
||||||
|
*/
|
||||||
|
private BlackboardArtifact createArtifact(AbstractFile rFile, BlackboardArtifact.Type type, String fileName, String userName, long dateTime) throws TskCoreException {
|
||||||
|
BlackboardArtifact bba = rFile.newArtifact(type.getTypeID());
|
||||||
|
bba.addAttribute(new BlackboardAttribute(TSK_PATH, getName(), fileName));
|
||||||
|
bba.addAttribute(new BlackboardAttribute(TSK_DATETIME_DELETED, getName(), dateTime));
|
||||||
|
bba.addAttribute(new BlackboardAttribute(TSK_USER_NAME, getName(), userName == null || userName.isEmpty() ? "" : userName));
|
||||||
|
return bba;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns a folder for the given path. If the path does not exist the
|
||||||
|
* the folder is created. Recursively makes as many parent folders as needed.
|
||||||
|
*
|
||||||
|
* @param skCase
|
||||||
|
* @param dataSource
|
||||||
|
* @param path
|
||||||
|
*
|
||||||
|
* @return AbstractFile for the given path.
|
||||||
|
*
|
||||||
|
* @throws TskCoreException
|
||||||
|
*/
|
||||||
|
private AbstractFile getOrMakeFolder(SleuthkitCase skCase, FsContent dataSource, String path) throws TskCoreException {
|
||||||
|
|
||||||
|
String parentPath = getParentPath(path);
|
||||||
|
String folderName = getFileName(path);
|
||||||
|
|
||||||
|
List<AbstractFile> files = null;
|
||||||
|
if (parentPath != null) {
|
||||||
|
if (!parentPath.equals("/")) {
|
||||||
|
parentPath = parentPath + "/";
|
||||||
|
}
|
||||||
|
|
||||||
|
files = skCase.findAllFilesWhere(String.format("fs_obj_id=%s AND parent_path='%s' AND name='%s'",
|
||||||
|
dataSource.getFileSystemId(), SleuthkitCase.escapeSingleQuotes(parentPath), folderName != null ? SleuthkitCase.escapeSingleQuotes(folderName) : ""));
|
||||||
|
} else {
|
||||||
|
files = skCase.findAllFilesWhere(String.format("fs_obj_id=%s AND parent_path='/' AND name=''", dataSource.getFileSystemId()));
|
||||||
|
}
|
||||||
|
|
||||||
|
if (files == null || files.isEmpty()) {
|
||||||
|
AbstractFile parent = getOrMakeFolder(skCase, dataSource, parentPath);
|
||||||
|
return skCase.addVirtualDirectory(parent.getId(), folderName);
|
||||||
|
} else {
|
||||||
|
return files.get(0);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Adds a new file system file that is unallocated and maps to the original
|
||||||
|
* file in recycle bin directory.
|
||||||
|
*
|
||||||
|
* @param skCase The current case.
|
||||||
|
* @param recycleBinFile The file from the recycle bin.
|
||||||
|
* @param parentDir The directory that the recycled file was deleted.
|
||||||
|
* @param fileName The name of the file.
|
||||||
|
* @param deletedTime The time the file was deleted.
|
||||||
|
*
|
||||||
|
* @throws TskCoreException
|
||||||
|
*/
|
||||||
|
private void addFileSystemFile(SleuthkitCase skCase, FsContent recycleBinFile, Content parentDir, String fileName, long deletedTime) throws TskCoreException {
|
||||||
|
skCase.addFileSystemFile(
|
||||||
|
recycleBinFile.getDataSourceObjectId(),
|
||||||
|
recycleBinFile.getFileSystemId(),
|
||||||
|
fileName,
|
||||||
|
recycleBinFile.getMetaAddr(),
|
||||||
|
(int) recycleBinFile.getMetaSeq(),
|
||||||
|
recycleBinFile.getAttrType(),
|
||||||
|
recycleBinFile.getAttributeId(),
|
||||||
|
TskData.TSK_FS_NAME_FLAG_ENUM.UNALLOC,
|
||||||
|
(short) (TskData.TSK_FS_META_FLAG_ENUM.UNALLOC.getValue() | TskData.TSK_FS_META_FLAG_ENUM.USED.getValue()),
|
||||||
|
recycleBinFile.getSize(),
|
||||||
|
recycleBinFile.getCtime(), recycleBinFile.getCrtime(), recycleBinFile.getAtime(), deletedTime,
|
||||||
|
true, parentDir);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Clean up the windows path string to match what the autopsy db uses.
|
||||||
|
*
|
||||||
|
* @param path The file\folder path to normalize
|
||||||
|
*
|
||||||
|
* @return New path string with the root removed (ie X:) and the slashes
|
||||||
|
* changed from windows to unix.
|
||||||
|
*/
|
||||||
|
String normalizeFilePath(String pathString) {
|
||||||
|
if (pathString == null || pathString.isEmpty()) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
Path path = Paths.get(pathString);
|
||||||
|
int nameCount = path.getNameCount();
|
||||||
|
if(nameCount > 0) {
|
||||||
|
String rootless = "/" + path.subpath(0, nameCount);
|
||||||
|
return rootless.replace("\\", "/");
|
||||||
|
} else {
|
||||||
|
return "/";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Helper function get from the given path either the file name or
|
||||||
|
* the last directory in the path.
|
||||||
|
*
|
||||||
|
* @param filePath The file\directory path
|
||||||
|
*
|
||||||
|
* @return If file path, returns the file name. If directory path the
|
||||||
|
* The last directory in the path is returned.
|
||||||
|
*/
|
||||||
|
String getFileName(String filePath) {
|
||||||
|
Path fileNamePath = Paths.get(filePath).getFileName();
|
||||||
|
if (fileNamePath != null) {
|
||||||
|
return fileNamePath.toString();
|
||||||
|
}
|
||||||
|
return filePath;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the parent path for the given path.
|
||||||
|
*
|
||||||
|
* @param path Path string
|
||||||
|
*
|
||||||
|
* @return The parent path for the given path.
|
||||||
|
*/
|
||||||
|
String getParentPath(String path) {
|
||||||
|
Path parentPath = Paths.get(path).getParent();
|
||||||
|
if (parentPath != null) {
|
||||||
|
return normalizeFilePath(parentPath.toString());
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Stores the data from the $I files.
|
||||||
|
*/
|
||||||
|
final class RecycledFileMetaData {
|
||||||
|
|
||||||
|
private final long fileSize;
|
||||||
|
private final long deletedTimeStamp;
|
||||||
|
private final String fileName;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Constructs a new instance.
|
||||||
|
*
|
||||||
|
* @param fileSize Size of the deleted file.
|
||||||
|
* @param deletedTimeStamp Time the file was deleted.
|
||||||
|
* @param fileName Name of the deleted file.
|
||||||
|
*/
|
||||||
|
RecycledFileMetaData(Long fileSize, long deletedTimeStamp, String fileName) {
|
||||||
|
this.fileSize = fileSize;
|
||||||
|
this.deletedTimeStamp = deletedTimeStamp;
|
||||||
|
this.fileName = fileName;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the size of the recycled file.
|
||||||
|
*
|
||||||
|
* @return Size of deleted file
|
||||||
|
*/
|
||||||
|
long getFileSize() {
|
||||||
|
return fileSize;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the time the file was deleted.
|
||||||
|
*
|
||||||
|
* @return deleted time in epoch seconds.
|
||||||
|
*/
|
||||||
|
long getDeletedTimeStamp() {
|
||||||
|
return deletedTimeStamp;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the full path to the deleted file or folder. This path will
|
||||||
|
* include the drive letter, ie C:\
|
||||||
|
*
|
||||||
|
* @return String name of the deleted file
|
||||||
|
*/
|
||||||
|
String getFullWindowsPath() {
|
||||||
|
return fileName.trim();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
@ -77,6 +77,7 @@ public final class RAImageIngestModule implements DataSourceIngestModule {
|
|||||||
Extract dataSourceAnalyzer = new DataSourceUsageAnalyzer();
|
Extract dataSourceAnalyzer = new DataSourceUsageAnalyzer();
|
||||||
Extract safari = new ExtractSafari();
|
Extract safari = new ExtractSafari();
|
||||||
Extract zoneInfo = new ExtractZoneIdentifier();
|
Extract zoneInfo = new ExtractZoneIdentifier();
|
||||||
|
Extract recycleBin = new ExtractRecycleBin();
|
||||||
|
|
||||||
extractors.add(chrome);
|
extractors.add(chrome);
|
||||||
extractors.add(firefox);
|
extractors.add(firefox);
|
||||||
@ -89,6 +90,7 @@ public final class RAImageIngestModule implements DataSourceIngestModule {
|
|||||||
extractors.add(osExtract); // this needs to run before the DataSourceUsageAnalyzer
|
extractors.add(osExtract); // this needs to run before the DataSourceUsageAnalyzer
|
||||||
extractors.add(dataSourceAnalyzer); //this needs to run after ExtractRegistry and ExtractOs
|
extractors.add(dataSourceAnalyzer); //this needs to run after ExtractRegistry and ExtractOs
|
||||||
extractors.add(zoneInfo); // this needs to run after the web browser modules
|
extractors.add(zoneInfo); // this needs to run after the web browser modules
|
||||||
|
extractors.add(recycleBin); // this needs to run after ExtractRegistry and ExtractOS
|
||||||
|
|
||||||
browserExtractors.add(chrome);
|
browserExtractors.add(chrome);
|
||||||
browserExtractors.add(firefox);
|
browserExtractors.add(firefox);
|
||||||
|
@ -52,6 +52,12 @@ class Util {
|
|||||||
|
|
||||||
private static Logger logger = Logger.getLogger(Util.class.getName());
|
private static Logger logger = Logger.getLogger(Util.class.getName());
|
||||||
|
|
||||||
|
/** Difference between Filetime epoch and Unix epoch (in ms). */
|
||||||
|
private static final long FILETIME_EPOCH_DIFF = 11644473600000L;
|
||||||
|
|
||||||
|
/** One millisecond expressed in units of 100s of nanoseconds. */
|
||||||
|
private static final long FILETIME_ONE_MILLISECOND = 10 * 1000;
|
||||||
|
|
||||||
private Util() {
|
private Util() {
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -176,4 +182,16 @@ class Util {
|
|||||||
}
|
}
|
||||||
return results;
|
return results;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Converts a windows FILETIME to java-unix epoch milliseconds
|
||||||
|
*
|
||||||
|
* @param filetime 100 nanosecond intervals from jan 1, 1601
|
||||||
|
*
|
||||||
|
* @return java-unix epoch milliseconds
|
||||||
|
*/
|
||||||
|
static long filetimeToMillis(final long filetime) {
|
||||||
|
return (filetime / FILETIME_ONE_MILLISECOND) - FILETIME_EPOCH_DIFF;
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -9,10 +9,19 @@ The following need to be done at least once. They do not need to be repeated for
|
|||||||
-- Linux: % sudo apt-get install testdisk
|
-- Linux: % sudo apt-get install testdisk
|
||||||
-- OS X: % brew install testdisk
|
-- OS X: % brew install testdisk
|
||||||
|
|
||||||
- Install Oracle Java and set JAVA_HOME.
|
- Install a Java 8 JRE and JavaFX 8 and set JAVA_HOME.
|
||||||
-- Linux: Use the instructions here: https://medium.com/coderscorner/installing-oracle-java-8-in-ubuntu-16-10-845507b13343
|
-- Linux: Any Java 8 version of OpenJDK/OpenJFX distribution should suffice. The following instructions use the Zulu Community distribution.
|
||||||
|
1. Download a 64 bit Java 8 JRE for your specific platform from https://www.azul.com/downloads/zulu-community
|
||||||
|
2. Install the JRE. e.g. 'sudo apt install ./zulu8.40.0.25-ca-jre8.0.222-linux_amd64.deb'
|
||||||
|
3. Download a 64 bit Java 8 JavaFX for your specific platform from the same location.
|
||||||
|
4. Extract the contents of the JavaFX archive into the folder where the JRE was installed.
|
||||||
|
e.g. 'cd /usr/lib/jvm/zre-8-amd64; sudo tar xzf ~/Downloads/zulu8.40.0.25-ca-fx-jre8.0.222-linux_x64.tar.gz --strip-components=1'
|
||||||
|
5. Confirm Java 8 is being found by running 'java -version'
|
||||||
|
6. Set JAVA_HOME environment variable to location of JRE installation (e.g. /usr/lib/jvm/zre-8-amd64)
|
||||||
|
|
||||||
NOTE: You may need to log out and back in again after setting JAVA_HOME before the Autopsy
|
NOTE: You may need to log out and back in again after setting JAVA_HOME before the Autopsy
|
||||||
unix_setup.sh script can see the value.
|
unix_setup.sh script can see the value.
|
||||||
|
|
||||||
-- OS X: Use The Oracle website: https://www.java.com/
|
-- OS X: Use The Oracle website: https://www.java.com/
|
||||||
Set JAVA_HOME with something like: export JAVA_HOME=`/usr/libexec/java_home` in .bash_profile
|
Set JAVA_HOME with something like: export JAVA_HOME=`/usr/libexec/java_home` in .bash_profile
|
||||||
|
|
||||||
|
@ -38,7 +38,7 @@ PROJECT_NAME = "Autopsy User Documentation"
|
|||||||
# could be handy for archiving the generated documentation or if some version
|
# could be handy for archiving the generated documentation or if some version
|
||||||
# control system is used.
|
# control system is used.
|
||||||
|
|
||||||
PROJECT_NUMBER = 4.12.0
|
PROJECT_NUMBER = 4.13.0
|
||||||
|
|
||||||
# Using the PROJECT_BRIEF tag one can provide an optional one line description
|
# Using the PROJECT_BRIEF tag one can provide an optional one line description
|
||||||
# for a project that appears at the top of each page and should give viewer a
|
# for a project that appears at the top of each page and should give viewer a
|
||||||
@ -1025,7 +1025,7 @@ GENERATE_HTML = YES
|
|||||||
# The default directory is: html.
|
# The default directory is: html.
|
||||||
# This tag requires that the tag GENERATE_HTML is set to YES.
|
# This tag requires that the tag GENERATE_HTML is set to YES.
|
||||||
|
|
||||||
HTML_OUTPUT = 4.12.0
|
HTML_OUTPUT = 4.13.0
|
||||||
|
|
||||||
# The HTML_FILE_EXTENSION tag can be used to specify the file extension for each
|
# The HTML_FILE_EXTENSION tag can be used to specify the file extension for each
|
||||||
# generated HTML page (for example: .htm, .php, .asp).
|
# generated HTML page (for example: .htm, .php, .asp).
|
||||||
|
@ -38,7 +38,7 @@ PROJECT_NAME = "Autopsy"
|
|||||||
# could be handy for archiving the generated documentation or if some version
|
# could be handy for archiving the generated documentation or if some version
|
||||||
# control system is used.
|
# control system is used.
|
||||||
|
|
||||||
PROJECT_NUMBER = 4.12.0
|
PROJECT_NUMBER = 4.13.0
|
||||||
|
|
||||||
# Using the PROJECT_BRIEF tag one can provide an optional one line description
|
# Using the PROJECT_BRIEF tag one can provide an optional one line description
|
||||||
# for a project that appears a the top of each page and should give viewer a
|
# for a project that appears a the top of each page and should give viewer a
|
||||||
@ -1063,7 +1063,7 @@ GENERATE_HTML = YES
|
|||||||
# The default directory is: html.
|
# The default directory is: html.
|
||||||
# This tag requires that the tag GENERATE_HTML is set to YES.
|
# This tag requires that the tag GENERATE_HTML is set to YES.
|
||||||
|
|
||||||
HTML_OUTPUT = api-docs/4.12.0/
|
HTML_OUTPUT = api-docs/4.13.0/
|
||||||
|
|
||||||
# The HTML_FILE_EXTENSION tag can be used to specify the file extension for each
|
# The HTML_FILE_EXTENSION tag can be used to specify the file extension for each
|
||||||
# generated HTML page (for example: .htm, .php, .asp).
|
# generated HTML page (for example: .htm, .php, .asp).
|
||||||
|
@ -4,7 +4,7 @@ app.title=Autopsy
|
|||||||
### lowercase version of above
|
### lowercase version of above
|
||||||
app.name=${branding.token}
|
app.name=${branding.token}
|
||||||
### if left unset, version will default to today's date
|
### if left unset, version will default to today's date
|
||||||
app.version=4.12.0
|
app.version=4.13.0
|
||||||
### build.type must be one of: DEVELOPMENT, RELEASE
|
### build.type must be one of: DEVELOPMENT, RELEASE
|
||||||
#build.type=RELEASE
|
#build.type=RELEASE
|
||||||
build.type=DEVELOPMENT
|
build.type=DEVELOPMENT
|
||||||
|
@ -430,8 +430,8 @@ def normalize_db_entry(line, files_table, vs_parts_table, vs_info_table, fs_info
|
|||||||
report_index = line.find('INSERT INTO "reports"') > -1 or line.find('INSERT INTO reports ') > -1
|
report_index = line.find('INSERT INTO "reports"') > -1 or line.find('INSERT INTO reports ') > -1
|
||||||
layout_index = line.find('INSERT INTO "tsk_file_layout"') > -1 or line.find('INSERT INTO tsk_file_layout ') > -1
|
layout_index = line.find('INSERT INTO "tsk_file_layout"') > -1 or line.find('INSERT INTO tsk_file_layout ') > -1
|
||||||
data_source_info_index = line.find('INSERT INTO "data_source_info"') > -1 or line.find('INSERT INTO data_source_info ') > -1
|
data_source_info_index = line.find('INSERT INTO "data_source_info"') > -1 or line.find('INSERT INTO data_source_info ') > -1
|
||||||
event_description_index = line.find('INSERT INTO "tsk_event_descriptions"') > -1 or line.find('INSERT INTO data_source_info ') > -1
|
event_description_index = line.find('INSERT INTO "tsk_event_descriptions"') > -1 or line.find('INSERT INTO tsk_event_descriptions ') > -1
|
||||||
events_index = line.find('INSERT INTO "tsk_events"') > -1 or line.find('INSERT INTO data_source_info ') > -1
|
events_index = line.find('INSERT INTO "tsk_events"') > -1 or line.find('INSERT INTO tsk_events ') > -1
|
||||||
ingest_job_index = line.find('INSERT INTO "ingest_jobs"') > -1 or line.find('INSERT INTO ingest_jobs ') > -1
|
ingest_job_index = line.find('INSERT INTO "ingest_jobs"') > -1 or line.find('INSERT INTO ingest_jobs ') > -1
|
||||||
examiners_index = line.find('INSERT INTO "tsk_examiners"') > -1 or line.find('INSERT INTO tsk_examiners ') > -1
|
examiners_index = line.find('INSERT INTO "tsk_examiners"') > -1 or line.find('INSERT INTO tsk_examiners ') > -1
|
||||||
ig_groups_index = line.find('INSERT INTO "image_gallery_groups"') > -1 or line.find('INSERT INTO image_gallery_groups ') > -1
|
ig_groups_index = line.find('INSERT INTO "image_gallery_groups"') > -1 or line.find('INSERT INTO image_gallery_groups ') > -1
|
||||||
|
Loading…
x
Reference in New Issue
Block a user