- fix columns to show for props of child nodes

- generalize getting abstractfsnode property map
- fix typo in getString in binary to string conversion
- add missing license comments
This commit is contained in:
adam-m 2011-12-30 15:54:24 -05:00
parent 50106d5a90
commit 8e2b2fcaa2
6 changed files with 316 additions and 55 deletions

View File

@ -25,7 +25,9 @@ import java.io.IOException;
import java.lang.reflect.InvocationTargetException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Set;
import javax.swing.JTable;
import javax.swing.ListSelectionModel;
import org.openide.explorer.ExplorerManager;
@ -135,6 +137,41 @@ public class DataResultViewerTable extends AbstractDataResultViewer {
}
}
/**
* Gets regular Bean property set properties from all first children and, recursively, subchildren of Node.
* Note: won't work out the box for lazy load - you need to set all children props for the parent by hand
* @param parent Node with at least one child to get properties from
* @return Properties,
*/
private Node.Property[] getAllChildPropertyHeaders(Node parent) {
Node firstChild = parent.getChildren().getNodeAt(0);
Property[] properties = null;
if (firstChild == null) {
throw new IllegalArgumentException("Couldn't get a child Node from the given parent.");
} else {
Set<Property> allProperties = new LinkedHashSet<Property>();
while (firstChild != null) {
for (PropertySet ps : firstChild.getPropertySets()) {
//if (ps.getName().equals(Sheet.PROPERTIES)) {
//return ps.getProperties();
final Property [] props = ps.getProperties();
final int propsNum = props.length;
for (int i = 0; i< propsNum; ++i)
allProperties.add(props[i]);
//}
}
firstChild = firstChild.getChildren().getNodeAt(0);
}
properties = allProperties.toArray(new Property[0]);
//throw new IllegalArgumentException("Child Node doesn't have the regular PropertySet.");
}
return properties;
}
@Override
public void setNode(Node selectedNode) {
// change the cursor to "waiting cursor" for this operation
@ -163,7 +200,7 @@ public class DataResultViewerTable extends AbstractDataResultViewer {
OutlineView ov = ((OutlineView) this.tableScrollPanel);
List<Node.Property> tempProps = new ArrayList<Node.Property>(Arrays.asList(getChildPropertyHeaders(selectedNode)));
List<Node.Property> tempProps = new ArrayList<Node.Property>(Arrays.asList(getAllChildPropertyHeaders(selectedNode)));
tempProps.remove(0);
@ -245,12 +282,11 @@ public class DataResultViewerTable extends AbstractDataResultViewer {
this.setCursor(null);
}
}
private static Object[][] getRowValues(Node node, int rows) {
// how many rows are we returning
int maxRows = Math.min(rows, node.getChildren().getNodesCount());
Object[][] objs = new Object[maxRows][];
for (int i = 0; i < maxRows; i++) {
@ -266,7 +302,7 @@ public class DataResultViewerTable extends AbstractDataResultViewer {
objs[i][j] = "n/a";
} catch (InvocationTargetException ignore) {
objs[i][j] = "n/a";
}
}
}
}
return objs;

View File

@ -18,6 +18,8 @@
*/
package org.sleuthkit.autopsy.datamodel;
import java.util.LinkedHashMap;
import java.util.Map;
import org.openide.nodes.Sheet;
import org.sleuthkit.datamodel.FsContent;
@ -25,16 +27,139 @@ import org.sleuthkit.datamodel.FsContent;
* Abstract class that implements the commonality between File and Directory
* Nodes (same properties).
*/
abstract class AbstractFsContentNode<T extends FsContent> extends AbstractContentNode<T> {
public abstract class AbstractFsContentNode<T extends FsContent> extends AbstractContentNode<T> {
/**
* Name of the property that holds the name.
*/
public static final String PROPERTY_NAME = "Name";
/**
* Name of the property that holds the path.
*/
public static final String PROPERTY_LOCATION = "Location";
// Note: this order matters for the search result, changed it if the order of property headers on the "KeywordSearchNode"changed
public static enum FsContentPropertyType {
NAME {
@Override
public String toString() {
return "Name";
}
},
LOCATION {
@Override
public String toString() {
return "Location";
}
},
MOD_TIME {
@Override
public String toString() {
return "Mod. Time";
}
},
CHANGED_TIME {
@Override
public String toString() {
return "Change Time";
}
},
ACCESS_TIME {
@Override
public String toString() {
return "Access Time";
}
},
CREATED_TIME {
@Override
public String toString() {
return "Created Time";
}
},
SIZE {
@Override
public String toString() {
return "Size";
}
},
FLAGS_DIR {
@Override
public String toString() {
return "Flags(Dir)";
}
},
FLAGS_META {
@Override
public String toString() {
return "Flags(Meta)";
}
},
MODE {
@Override
public String toString() {
return "Mode";
}
},
USER_ID {
@Override
public String toString() {
return "UserID";
}
},
GROUP_ID {
@Override
public String toString() {
return "GroupID";
}
},
META_ADDR {
@Override
public String toString() {
return "Meta Addr.";
}
},
ATTR_ADDR {
@Override
public String toString() {
return "Attr. Addr.";
}
},
TYPE_DIR {
@Override
public String toString() {
return "Type(Dir)";
}
},
TYPE_META {
@Override
public String toString() {
return "Type(Meta)";
}
},
KNOWN {
@Override
public String toString() {
return "Known";
}
},
}
AbstractFsContentNode(T fsContent) {
super(fsContent);
@ -49,25 +174,43 @@ abstract class AbstractFsContentNode<T extends FsContent> extends AbstractConten
s.put(ss);
}
// Note: this order matters for the search result, changed it if the order of property headers on the "KeywordSearchNode"changed
ss.put(new NodeProperty(PROPERTY_NAME, "Name", "no description", content.getName()));
ss.put(new NodeProperty(PROPERTY_LOCATION, "Location", "no description", DataConversion.getformattedPath(ContentUtils.getDisplayPath(content), 0)));
ss.put(new NodeProperty("Modified Time", "Modified Time", "no description", content.getMtimeAsDate()));
ss.put(new NodeProperty("Changed Time", "Changed Time", "no description", content.getCtimeAsDate()));
ss.put(new NodeProperty("Access Time", "Access Time", "no description", content.getAtimeAsDate()));
ss.put(new NodeProperty("Created Time", "Created Time", "no description", content.getCrtimeAsDate()));
ss.put(new NodeProperty("Size", "Size", "no description", content.getSize()));
ss.put(new NodeProperty("Flags (Directory)", "Flags (Directory)", "no description", content.getDirFlagsAsString()));
ss.put(new NodeProperty("Flags (Meta)", "Flags (Meta)", "no description", content.getMetaFlagsAsString()));
ss.put(new NodeProperty("Mode ", "Mode", "no description", content.getModeAsString()));
ss.put(new NodeProperty("User ID", "User ID", "no description", content.getUid()));
ss.put(new NodeProperty("Group ID", "Group ID", "no description", content.getGid()));
ss.put(new NodeProperty("Metadata Address", "Metadata Addr", "no description", content.getMeta_addr()));
ss.put(new NodeProperty("Attribute Address", "Attribute Addr", "no description", Long.toString(content.getAttr_type()) + "-" + Long.toString(content.getAttr_id())));
ss.put(new NodeProperty("Type (Directory)", "Type (Directory)", "no description", content.getDirTypeAsString()));
ss.put(new NodeProperty("Type (Meta)", "Type (Meta)", "no description", content.getMetaTypeAsString()));
ss.put(new NodeProperty("Known", "Known", "no description", content.getKnown().getName()));
Map<String, Object> map = new LinkedHashMap<String, Object>();
fillPropertyMap(map, content);
FsContentPropertyType[] fsTypes = FsContentPropertyType.values();
final int FS_PROPS_LEN = fsTypes.length;
final String NO_DESCR = "no description";
for (int i = 0; i < FS_PROPS_LEN; ++i) {
final FsContentPropertyType propType = FsContentPropertyType.values()[i];
final String propString = propType.toString();
ss.put(new NodeProperty(propString, propString, NO_DESCR, map.get(propString)));
}
return s;
}
/**
* Fill map with FsContent properties
* @param map, with preserved ordering, where property names/values are put
* @param content to extract properties from
*/
public static void fillPropertyMap(Map<String, Object> map, FsContent content) {
map.put(FsContentPropertyType.NAME.toString(), content.getName());
map.put(FsContentPropertyType.LOCATION.toString(), DataConversion.getformattedPath(ContentUtils.getDisplayPath(content), 0));
map.put(FsContentPropertyType.MOD_TIME.toString(), content.getMtimeAsDate());
map.put(FsContentPropertyType.CHANGED_TIME.toString(), content.getCtimeAsDate());
map.put(FsContentPropertyType.ACCESS_TIME.toString(), content.getAtimeAsDate());
map.put(FsContentPropertyType.CREATED_TIME.toString(), content.getCrtimeAsDate());
map.put(FsContentPropertyType.SIZE.toString(), Long.toString(content.getSize()));
map.put(FsContentPropertyType.FLAGS_DIR.toString(), content.getDirFlagsAsString());
map.put(FsContentPropertyType.FLAGS_META.toString(), content.getMetaFlagsAsString());
map.put(FsContentPropertyType.MODE.toString(), content.getModeAsString());
map.put(FsContentPropertyType.USER_ID.toString(), Long.toString(content.getUid()));
map.put(FsContentPropertyType.GROUP_ID.toString(), Long.toString(content.getGid()));
map.put(FsContentPropertyType.META_ADDR.toString(), Long.toString(content.getMeta_addr()));
map.put(FsContentPropertyType.ATTR_ADDR.toString(), Long.toString(content.getAttr_type()) + "-" + Long.toString(content.getAttr_id()));
map.put(FsContentPropertyType.TYPE_DIR.toString(), content.getDirTypeAsString());
map.put(FsContentPropertyType.TYPE_META.toString(), content.getMetaTypeAsString());
map.put(FsContentPropertyType.KNOWN.toString(), content.getKnown().getName());
}
}

View File

@ -147,13 +147,13 @@ public class DataConversion {
// the printable ASCII chars are dec 32-126
// and we want to include TAB as well (dec 9)
if (!((dec < 32 || dec > 126) && dec != 9)) {
temp.append(NLS);
temp.append(tempChar);
++counter;
} else {
if (counter >= parameter) {
// add to the result and also add the new line at the end
result.append(temp);
result.append(Character.toString(NL));
result.append(NLS);
// reset the temp and counter
temp = new StringBuilder();

View File

@ -1,4 +1,21 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2011 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.datamodel;
import java.util.Map;

View File

@ -1,3 +1,22 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2011 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.datamodel;
import java.util.Map;

View File

@ -46,9 +46,12 @@ import org.openide.nodes.Children;
import org.openide.nodes.Node;
import org.openide.windows.TopComponent;
import org.sleuthkit.autopsy.corecomponents.DataResultTopComponent;
import org.sleuthkit.autopsy.datamodel.AbstractFsContentNode;
import org.sleuthkit.autopsy.datamodel.AbstractFsContentNode.FsContentPropertyType;
import org.sleuthkit.autopsy.datamodel.KeyValueNode;
import org.sleuthkit.autopsy.datamodel.KeyValueThing;
import org.sleuthkit.datamodel.Content;
import org.sleuthkit.datamodel.File;
import org.sleuthkit.datamodel.FsContent;
public class RegexQuery implements KeywordSearchQuery {
@ -60,6 +63,30 @@ public class RegexQuery implements KeywordSearchQuery {
private static final int TERMS_TIMEOUT = 90 * 1000; //in ms
private String regexQuery;
private static Logger logger = Logger.getLogger(RegexQuery.class.getName());
//common properties (superset of all Node properties) to be displayed as columns
//these are merged with FsContentPropertyType defined properties
private static enum CommonPropertyTypes {
QUERY {
@Override
public String toString() {
return "Query";
}
},
MATCH {
@Override
public String toString() {
return "Match";
}
},
/* MATCH_RANK {
@Override
public String toString() {
return "Match Rank";
}
},*/
}
public RegexQuery(String query) {
this.regexQuery = query;
@ -109,14 +136,16 @@ public class RegexQuery implements KeywordSearchQuery {
Iterator<Term> it = terms.iterator();
int termID = 0;
long totalMatches = 0;
//long totalMatches = 0;
while (it.hasNext()) {
Term term = it.next();
Map<String, Object> kvs = new LinkedHashMap<String, Object>();
long matches = term.getFrequency();
kvs.put("#exact matches", matches);
things.add(new KeyValueThing(term.getTerm(), kvs, ++termID));
totalMatches += matches;
final String match = term.getTerm();
setCommonProperty(kvs, CommonPropertyTypes.MATCH, match);
//setCommonProperty(kvs, CommonPropertyTypes.MATCH_RANK, Long.toString(matches));
things.add(new KeyValueThing(match, kvs, ++termID));
//totalMatches += matches;
}
Node rootNode = null;
@ -138,6 +167,32 @@ public class RegexQuery implements KeywordSearchQuery {
}
/**
* call this at least for the parent Node, to make sure all common
* properties are displayed as columns (since we are doing lazy child Node load
* we need to preinitialize properties when sending parent Node)
* @param toSet property set map for a Node
*/
private static void initCommonProperties(Map<String, Object> toSet) {
CommonPropertyTypes [] commonTypes = CommonPropertyTypes.values();
final int COMMON_PROPS_LEN = commonTypes.length;
for (int i = 0; i< COMMON_PROPS_LEN; ++i) {
toSet.put(commonTypes[i].toString(), "");
}
FsContentPropertyType [] fsTypes = FsContentPropertyType.values();
final int FS_PROPS_LEN = fsTypes.length;
for (int i = 0; i< FS_PROPS_LEN; ++i) {
toSet.put(fsTypes[i].toString(), "");
}
}
private static void setCommonProperty(Map<String, Object> toSet, CommonPropertyTypes type, String value) {
final String typeStr = type.toString();
toSet.put(typeStr, value);
}
/**
* factory produces top level result nodes showing query used
*/
@ -162,8 +217,9 @@ public class RegexQuery implements KeywordSearchQuery {
protected boolean createKeys(List<KeyValueThing> toPopulate) {
int id = 0;
for (String query : queries) {
LinkedHashMap<String, Object> map = new LinkedHashMap<String, Object>();
map.put("Query", query);
Map<String, Object> map = new LinkedHashMap<String, Object>();
initCommonProperties(map);
setCommonProperty(map, CommonPropertyTypes.QUERY, query);
toPopulate.add(new KeyValueThing(query, map, ++id));
}
@ -231,25 +287,15 @@ public class RegexQuery implements KeywordSearchQuery {
uniqueMatches.addAll(matches);
int resID = 0;
for (FsContent f : uniqueMatches) {
for (FsContent f : uniqueMatches) {
Map<String, Object> resMap = new LinkedHashMap<String, Object>();
//final String name = f.getName();
final long id = f.getId();
//build dir name
String dirName = KeywordSearchUtil.buildDirName(f);
resMap.put("dir", dirName);
resMap.put("id", Long.toString(id));
final String name = dirName + f.getName();
resMap.put("name", name);
toPopulate.add(new KeyValueThingContent(name, resMap, ++resID, f, keywordQuery));
AbstractFsContentNode.fillPropertyMap(resMap, (File)f);
toPopulate.add(new KeyValueThingContent(f.getName(), resMap, ++resID, f, keywordQuery));
}
//TODO fix showing of 2nd level child attributes in the GUI (DataResultViewerTable issue?)
return true;
}
@Override
protected Node createNodeForKey(KeyValueThing thing) {