mirror of
https://github.com/overcuriousity/autopsy-flatpak.git
synced 2025-07-10 07:09:32 +00:00
Merge branch 'master' of https://github.com/sleuthkit/autopsy
This commit is contained in:
commit
c93a57a9d9
@ -23,6 +23,7 @@ import java.awt.Component;
|
|||||||
import java.awt.Dimension;
|
import java.awt.Dimension;
|
||||||
import java.awt.EventQueue;
|
import java.awt.EventQueue;
|
||||||
import java.awt.image.BufferedImage;
|
import java.awt.image.BufferedImage;
|
||||||
|
import java.io.ByteArrayInputStream;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.io.InputStream;
|
import java.io.InputStream;
|
||||||
import java.nio.IntBuffer;
|
import java.nio.IntBuffer;
|
||||||
@ -34,10 +35,12 @@ import java.util.concurrent.TimeUnit;
|
|||||||
import java.util.logging.Level;
|
import java.util.logging.Level;
|
||||||
import javafx.application.Platform;
|
import javafx.application.Platform;
|
||||||
import javafx.embed.swing.JFXPanel;
|
import javafx.embed.swing.JFXPanel;
|
||||||
|
import javafx.embed.swing.SwingFXUtils;
|
||||||
import javafx.scene.Group;
|
import javafx.scene.Group;
|
||||||
import javafx.scene.Scene;
|
import javafx.scene.Scene;
|
||||||
import javafx.scene.image.Image;
|
import javafx.scene.image.Image;
|
||||||
import javafx.scene.image.ImageView;
|
import javafx.scene.image.ImageView;
|
||||||
|
import javax.imageio.ImageIO;
|
||||||
import org.sleuthkit.autopsy.coreutils.Logger;
|
import org.sleuthkit.autopsy.coreutils.Logger;
|
||||||
import javax.swing.BoxLayout;
|
import javax.swing.BoxLayout;
|
||||||
import javax.swing.SwingUtilities;
|
import javax.swing.SwingUtilities;
|
||||||
@ -52,10 +55,12 @@ import org.netbeans.api.progress.ProgressHandle;
|
|||||||
import org.netbeans.api.progress.ProgressHandleFactory;
|
import org.netbeans.api.progress.ProgressHandleFactory;
|
||||||
import org.openide.nodes.Node;
|
import org.openide.nodes.Node;
|
||||||
import org.openide.util.Cancellable;
|
import org.openide.util.Cancellable;
|
||||||
|
import org.openide.util.Exceptions;
|
||||||
import org.openide.util.lookup.ServiceProvider;
|
import org.openide.util.lookup.ServiceProvider;
|
||||||
import org.openide.util.lookup.ServiceProviders;
|
import org.openide.util.lookup.ServiceProviders;
|
||||||
import org.sleuthkit.autopsy.casemodule.Case;
|
import org.sleuthkit.autopsy.casemodule.Case;
|
||||||
import org.sleuthkit.autopsy.corecomponentinterfaces.DataContentViewer;
|
import org.sleuthkit.autopsy.corecomponentinterfaces.DataContentViewer;
|
||||||
|
import org.sleuthkit.autopsy.corelibs.ScalrWrapper;
|
||||||
import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil;
|
import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil;
|
||||||
import org.sleuthkit.autopsy.datamodel.ContentUtils;
|
import org.sleuthkit.autopsy.datamodel.ContentUtils;
|
||||||
import org.sleuthkit.datamodel.AbstractFile;
|
import org.sleuthkit.datamodel.AbstractFile;
|
||||||
@ -71,7 +76,7 @@ import org.sleuthkit.datamodel.TskData.TSK_FS_NAME_FLAG_ENUM;
|
|||||||
})
|
})
|
||||||
public class DataContentViewerMedia extends javax.swing.JPanel implements DataContentViewer, FrameCapture {
|
public class DataContentViewerMedia extends javax.swing.JPanel implements DataContentViewer, FrameCapture {
|
||||||
|
|
||||||
private static final String[] IMAGES = new String[]{".jpg", ".jpeg", ".png", ".gif", ".jpe", ".bmp"};
|
private String[] IMAGES; // use javafx supported
|
||||||
private static final String[] VIDEOS = new String[]{".mov", ".m4v", ".flv", ".mp4", ".3gp", ".avi", ".mpg", ".mpeg"};
|
private static final String[] VIDEOS = new String[]{".mov", ".m4v", ".flv", ".mp4", ".3gp", ".avi", ".mpg", ".mpeg"};
|
||||||
private static final String[] AUDIOS = new String[]{".mp3", ".wav", ".wma"};
|
private static final String[] AUDIOS = new String[]{".mp3", ".wav", ".wma"};
|
||||||
private static final int NUM_FRAMES = 12;
|
private static final int NUM_FRAMES = 12;
|
||||||
@ -88,6 +93,7 @@ public class DataContentViewerMedia extends javax.swing.JPanel implements DataCo
|
|||||||
private BufferedImage currentImage = null;
|
private BufferedImage currentImage = null;
|
||||||
private boolean gstInited = false;
|
private boolean gstInited = false;
|
||||||
private AbstractFile lastFile;
|
private AbstractFile lastFile;
|
||||||
|
private boolean inImageMode; //keeps track if already in image mode to minimize UI setup
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Creates new form DataContentViewerVideo
|
* Creates new form DataContentViewerVideo
|
||||||
@ -98,6 +104,7 @@ public class DataContentViewerMedia extends javax.swing.JPanel implements DataCo
|
|||||||
}
|
}
|
||||||
|
|
||||||
private void customizeComponents() {
|
private void customizeComponents() {
|
||||||
|
inImageMode = false;
|
||||||
|
|
||||||
Platform.setImplicitExit(false);
|
Platform.setImplicitExit(false);
|
||||||
PlatformImpl.startup(new Runnable() {
|
PlatformImpl.startup(new Runnable() {
|
||||||
@ -106,6 +113,17 @@ public class DataContentViewerMedia extends javax.swing.JPanel implements DataCo
|
|||||||
logger.log(Level.INFO, "Initializing JavaFX for image viewing");
|
logger.log(Level.INFO, "Initializing JavaFX for image viewing");
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
logger.log(Level.INFO, "Supported image formats by javafx image viewer: ");
|
||||||
|
|
||||||
|
//initialize supported image types
|
||||||
|
//TODO use mime-types instead once we have support
|
||||||
|
String[] fxSupportedImagesSuffixes = ImageIO.getReaderFileSuffixes();
|
||||||
|
IMAGES = new String[fxSupportedImagesSuffixes.length];
|
||||||
|
for (int i = 0; i < fxSupportedImagesSuffixes.length; ++i) {
|
||||||
|
String suffix = fxSupportedImagesSuffixes[i];
|
||||||
|
logger.log(Level.INFO, "suffix: " + suffix);
|
||||||
|
IMAGES[i] = "." + suffix;
|
||||||
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
logger.log(Level.INFO, "Initializing gstreamer for video/audio viewing");
|
logger.log(Level.INFO, "Initializing gstreamer for video/audio viewing");
|
||||||
@ -245,7 +263,7 @@ public class DataContentViewerMedia extends javax.swing.JPanel implements DataCo
|
|||||||
if (selectedNode == null) {
|
if (selectedNode == null) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
AbstractFile file = selectedNode.getLookup().lookup(AbstractFile.class);
|
AbstractFile file = selectedNode.getLookup().lookup(AbstractFile.class);
|
||||||
if (file == null) {
|
if (file == null) {
|
||||||
return;
|
return;
|
||||||
@ -259,7 +277,7 @@ public class DataContentViewerMedia extends javax.swing.JPanel implements DataCo
|
|||||||
|
|
||||||
reset();
|
reset();
|
||||||
setComponentsVisibility(false);
|
setComponentsVisibility(false);
|
||||||
|
|
||||||
// get rid of any existing videoProgressWorker thread
|
// get rid of any existing videoProgressWorker thread
|
||||||
if (videoProgressWorker != null) {
|
if (videoProgressWorker != null) {
|
||||||
videoProgressWorker.cancel(true);
|
videoProgressWorker.cancel(true);
|
||||||
@ -272,6 +290,7 @@ public class DataContentViewerMedia extends javax.swing.JPanel implements DataCo
|
|||||||
showImageFx(file);
|
showImageFx(file);
|
||||||
} else if (gstInited
|
} else if (gstInited
|
||||||
&& (containsExt(file.getName(), VIDEOS) || containsExt(file.getName(), AUDIOS))) {
|
&& (containsExt(file.getName(), VIDEOS) || containsExt(file.getName(), AUDIOS))) {
|
||||||
|
inImageMode = false;
|
||||||
setupVideo(file);
|
setupVideo(file);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -281,18 +300,45 @@ public class DataContentViewerMedia extends javax.swing.JPanel implements DataCo
|
|||||||
*
|
*
|
||||||
* @param file
|
* @param file
|
||||||
*/
|
*/
|
||||||
private void showImageFx(AbstractFile file) {
|
private void showImageFx(final AbstractFile file) {
|
||||||
|
final String fileName = file.getName();
|
||||||
final InputStream inputStream = new ReadContentInputStream(file);
|
|
||||||
|
|
||||||
// load the image
|
// load the image
|
||||||
PlatformImpl.runLater(new Runnable() {
|
PlatformImpl.runLater(new Runnable() {
|
||||||
@Override
|
@Override
|
||||||
public void run() {
|
public void run() {
|
||||||
Image fxImage = new Image(inputStream);
|
|
||||||
|
|
||||||
Dimension dims = DataContentViewerMedia.this.getSize();
|
Dimension dims = DataContentViewerMedia.this.getSize();
|
||||||
|
|
||||||
|
final InputStream inputStream = new ReadContentInputStream(file);
|
||||||
|
|
||||||
|
final Image fxImage;
|
||||||
|
try {
|
||||||
|
//original input stream
|
||||||
|
BufferedImage bi = ImageIO.read(inputStream);
|
||||||
|
//scale image using Scalr
|
||||||
|
BufferedImage biScaled = ScalrWrapper.resizeHighQuality(bi, (int) dims.getWidth(), (int) dims.getHeight());
|
||||||
|
//convert from awt imageto fx image
|
||||||
|
fxImage = SwingFXUtils.toFXImage(biScaled, null);
|
||||||
|
} catch (IOException ex) {
|
||||||
|
logger.log(Level.WARNING, "Could not load image file into media view: " + fileName, ex);
|
||||||
|
return;
|
||||||
|
} catch (OutOfMemoryError ex) {
|
||||||
|
logger.log(Level.WARNING, "Could not load image file into media view (too large): " + fileName, ex);
|
||||||
|
MessageNotifyUtil.Notify.warn("Could not load image file (too large): " + file.getName(), ex.getMessage());
|
||||||
|
return;
|
||||||
|
} finally {
|
||||||
|
try {
|
||||||
|
inputStream.close();
|
||||||
|
} catch (IOException ex) {
|
||||||
|
logger.log(Level.WARNING, "Could not close input stream after loading image in media view: " + fileName, ex);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (fxImage == null) {
|
||||||
|
logger.log(Level.WARNING, "Could not load image file into media view: " + fileName);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
// simple displays ImageView the image as is
|
// simple displays ImageView the image as is
|
||||||
ImageView fxImageView = new ImageView();
|
ImageView fxImageView = new ImageView();
|
||||||
fxImageView.setImage(fxImage);
|
fxImageView.setImage(fxImage);
|
||||||
@ -307,22 +353,38 @@ public class DataContentViewerMedia extends javax.swing.JPanel implements DataCo
|
|||||||
|
|
||||||
Group fxRoot = new Group();
|
Group fxRoot = new Group();
|
||||||
|
|
||||||
Scene fxScene = new Scene(fxRoot, dims.getWidth(), dims.getHeight(), javafx.scene.paint.Color.BLACK);
|
//Scene fxScene = new Scene(fxRoot, dims.getWidth(), dims.getHeight(), javafx.scene.paint.Color.BLACK);
|
||||||
|
Scene fxScene = new Scene(fxRoot, javafx.scene.paint.Color.BLACK);
|
||||||
fxRoot.getChildren().add(fxImageView);
|
fxRoot.getChildren().add(fxImageView);
|
||||||
|
|
||||||
final JFXPanel fxPanel = new JFXPanel();
|
if (inImageMode) {
|
||||||
fxPanel.setScene(fxScene);
|
final JFXPanel fxPanel = (JFXPanel) videoPanel.getComponent(0);
|
||||||
|
fxPanel.setScene(fxScene);
|
||||||
|
videoPanel.setVisible(true);
|
||||||
|
} else {
|
||||||
|
final JFXPanel fxPanel = new JFXPanel();
|
||||||
|
fxPanel.setScene(fxScene);
|
||||||
|
|
||||||
|
|
||||||
//when done, join with the swing panel
|
//when done, join with the swing panel
|
||||||
EventQueue.invokeLater(new Runnable() {
|
EventQueue.invokeLater(new Runnable() {
|
||||||
@Override
|
@Override
|
||||||
public void run() {
|
public void run() {
|
||||||
videoPanel.removeAll();
|
inImageMode = true;
|
||||||
videoPanel.setLayout(new BoxLayout(videoPanel, BoxLayout.Y_AXIS));
|
//remove video panels and recreate image view panel
|
||||||
videoPanel.add(fxPanel);
|
//TODO use swing layered pane to switch between different modes
|
||||||
videoPanel.setVisible(true);
|
videoPanel.removeAll();
|
||||||
}
|
videoPanel.setLayout(new BoxLayout(videoPanel, BoxLayout.Y_AXIS));
|
||||||
});
|
videoPanel.add(fxPanel);
|
||||||
|
videoPanel.setVisible(true);
|
||||||
|
|
||||||
|
if (fxImage.isError()) {
|
||||||
|
logger.log(Level.WARNING, "Could not load image file into media view: " + fileName);
|
||||||
|
//MessageNotifyUtil.Message.warn("Could not load image file: " + file.getName());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
@ -22,6 +22,8 @@ import java.util.ArrayList;
|
|||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
import java.util.logging.Level;
|
||||||
|
import javax.imageio.ImageIO;
|
||||||
import org.openide.nodes.AbstractNode;
|
import org.openide.nodes.AbstractNode;
|
||||||
import org.openide.nodes.Children;
|
import org.openide.nodes.Children;
|
||||||
import org.openide.nodes.Node;
|
import org.openide.nodes.Node;
|
||||||
@ -154,7 +156,17 @@ class ThumbnailViewChildren extends Children.Keys<Integer> {
|
|||||||
|
|
||||||
private static class IsSupportedContentVisitor extends ContentVisitor.Default<Boolean> {
|
private static class IsSupportedContentVisitor extends ContentVisitor.Default<Boolean> {
|
||||||
|
|
||||||
private static final List<String> SUPP_EXTENSIONS = Arrays.asList(".jpeg", ".jpg", ".gif", ".png");
|
private final List<String> SUPP_EXTENSIONS;
|
||||||
|
|
||||||
|
IsSupportedContentVisitor() {
|
||||||
|
String[] supportedImagesSuffixes = ImageIO.getReaderFileSuffixes();
|
||||||
|
|
||||||
|
SUPP_EXTENSIONS = new ArrayList<String>(supportedImagesSuffixes.length);
|
||||||
|
for (int i = 0; i < supportedImagesSuffixes.length; ++i) {
|
||||||
|
String suffix = supportedImagesSuffixes[i];
|
||||||
|
SUPP_EXTENSIONS.add("." + suffix);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Boolean visit(DerivedFile f) {
|
public Boolean visit(DerivedFile f) {
|
||||||
|
@ -26,6 +26,7 @@ import java.awt.Toolkit;
|
|||||||
import java.awt.image.BufferedImage;
|
import java.awt.image.BufferedImage;
|
||||||
import java.io.File;
|
import java.io.File;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
import java.io.InputStream;
|
||||||
import java.lang.ref.SoftReference;
|
import java.lang.ref.SoftReference;
|
||||||
import java.util.logging.Level;
|
import java.util.logging.Level;
|
||||||
import javax.imageio.ImageIO;
|
import javax.imageio.ImageIO;
|
||||||
@ -34,33 +35,41 @@ import javax.swing.JFrame;
|
|||||||
import org.openide.nodes.Children;
|
import org.openide.nodes.Children;
|
||||||
import org.openide.nodes.FilterNode;
|
import org.openide.nodes.FilterNode;
|
||||||
import org.openide.nodes.Node;
|
import org.openide.nodes.Node;
|
||||||
|
import org.openide.util.Exceptions;
|
||||||
import org.sleuthkit.autopsy.casemodule.Case;
|
import org.sleuthkit.autopsy.casemodule.Case;
|
||||||
|
import org.sleuthkit.autopsy.corelibs.ScalrWrapper;
|
||||||
import org.sleuthkit.autopsy.coreutils.Logger;
|
import org.sleuthkit.autopsy.coreutils.Logger;
|
||||||
import org.sleuthkit.datamodel.Content;
|
import org.sleuthkit.datamodel.Content;
|
||||||
|
import org.sleuthkit.datamodel.ReadContentInputStream;
|
||||||
import org.sleuthkit.datamodel.TskException;
|
import org.sleuthkit.datamodel.TskException;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Node that wraps around original node and adds the bitmap icon representing the picture
|
* Node that wraps around original node and adds the bitmap icon representing
|
||||||
|
* the picture
|
||||||
*/
|
*/
|
||||||
class ThumbnailViewNode extends FilterNode {
|
class ThumbnailViewNode extends FilterNode {
|
||||||
|
|
||||||
private SoftReference<Image> iconCache;
|
private SoftReference<Image> iconCache;
|
||||||
|
|
||||||
private static final Image defaultIcon = new ImageIcon("/org/sleuthkit/autopsy/images/file-icon.png").getImage();
|
private static final Image defaultIcon = new ImageIcon("/org/sleuthkit/autopsy/images/file-icon.png").getImage();
|
||||||
|
private static final Logger logger = Logger.getLogger(ThumbnailViewNode.class.getName());
|
||||||
|
//private final BufferedImage defaultIconBI;
|
||||||
|
|
||||||
/** the constructor */
|
/**
|
||||||
|
* the constructor
|
||||||
|
*/
|
||||||
ThumbnailViewNode(Node arg) {
|
ThumbnailViewNode(Node arg) {
|
||||||
super(arg, Children.LEAF);
|
super(arg, Children.LEAF);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String getDisplayName(){
|
public String getDisplayName() {
|
||||||
if(super.getDisplayName().length() > 15)
|
if (super.getDisplayName().length() > 15) {
|
||||||
return super.getDisplayName().substring(0, 15).concat("...");
|
return super.getDisplayName().substring(0, 15).concat("...");
|
||||||
else
|
} else {
|
||||||
return super.getDisplayName();
|
return super.getDisplayName();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Image getIcon(int type) {
|
public Image getIcon(int type) {
|
||||||
Image icon = null;
|
Image icon = null;
|
||||||
@ -68,12 +77,11 @@ class ThumbnailViewNode extends FilterNode {
|
|||||||
if (iconCache != null) {
|
if (iconCache != null) {
|
||||||
icon = iconCache.get();
|
icon = iconCache.get();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
if (icon == null) {
|
if (icon == null) {
|
||||||
Content content = this.getLookup().lookup(Content.class);
|
Content content = this.getLookup().lookup(Content.class);
|
||||||
|
|
||||||
if (content != null) {
|
if (content != null) {
|
||||||
if (getFile(content.getId()).exists()) {
|
if (getFile(content.getId()).exists()) {
|
||||||
try {
|
try {
|
||||||
@ -84,85 +92,56 @@ class ThumbnailViewNode extends FilterNode {
|
|||||||
} else {
|
} else {
|
||||||
try {
|
try {
|
||||||
icon = generateIcon(content);
|
icon = generateIcon(content);
|
||||||
ImageIO.write(toBufferedImage(icon), "jpg", getFile(content.getId()));
|
if (icon == null) {
|
||||||
} catch (TskException ex) {
|
icon = ThumbnailViewNode.defaultIcon;
|
||||||
icon = ThumbnailViewNode.defaultIcon;
|
} else {
|
||||||
|
ImageIO.write((BufferedImage) icon, "jpg", getFile(content.getId()));
|
||||||
|
}
|
||||||
} catch (IOException ex) {
|
} catch (IOException ex) {
|
||||||
|
logger.log(Level.WARNING, "Could not write cache thumbnail: " + content, ex);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
icon = ThumbnailViewNode.defaultIcon;
|
icon = ThumbnailViewNode.defaultIcon;
|
||||||
}
|
}
|
||||||
|
|
||||||
iconCache = new SoftReference<Image>(icon);
|
iconCache = new SoftReference<Image>(icon);
|
||||||
}
|
}
|
||||||
|
|
||||||
return icon;
|
return icon;
|
||||||
}
|
}
|
||||||
|
|
||||||
static private Image generateIcon(Content content) throws TskException {
|
/*
|
||||||
byte[] data = new byte[(int)content.getSize()];
|
* Generate a scaled image
|
||||||
int bytesRead = content.read(data, 0, content.getSize());
|
*/
|
||||||
|
static private BufferedImage generateIcon(Content content) {
|
||||||
if (bytesRead < 1)
|
|
||||||
|
InputStream inputStream = null;
|
||||||
|
try {
|
||||||
|
inputStream = new ReadContentInputStream(content);
|
||||||
|
BufferedImage bi = ImageIO.read(inputStream);
|
||||||
|
BufferedImage biScaled = ScalrWrapper.resizeFast(bi, 100, 100);
|
||||||
|
return biScaled;
|
||||||
|
}catch (OutOfMemoryError e) {
|
||||||
|
logger.log(Level.WARNING, "Could not scale image (too large): " + content.getName(), e);
|
||||||
return null;
|
return null;
|
||||||
|
}
|
||||||
Image result = Toolkit.getDefaultToolkit().createImage(data);
|
catch (Exception e) {
|
||||||
|
logger.log(Level.WARNING, "Could not scale image: " + content.getName(), e);
|
||||||
|
return null;
|
||||||
|
} finally {
|
||||||
|
if (inputStream != null) {
|
||||||
|
try {
|
||||||
|
inputStream.close();
|
||||||
|
} catch (IOException ex) {
|
||||||
|
logger.log(Level.WARNING, "Could not close input stream after resizing thumbnail: " + content.getName(), ex);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// scale the image
|
|
||||||
MediaTracker mTracker = new MediaTracker(new JFrame());
|
|
||||||
mTracker.addImage(result, 1);
|
|
||||||
try {
|
|
||||||
mTracker.waitForID(1);
|
|
||||||
} catch (InterruptedException ex) {
|
|
||||||
// TODO: maybe make bubble instead
|
|
||||||
Logger.getLogger(ThumbnailViewNode.class.getName()).log(Level.WARNING, "Error while trying to scale the icon.", ex);
|
|
||||||
}
|
}
|
||||||
int width = result.getWidth(null);
|
|
||||||
int height = result.getHeight(null);
|
|
||||||
|
|
||||||
int max = Math.max(width, height);
|
|
||||||
double scale = (75 * 100) / max;
|
|
||||||
|
|
||||||
// getScaledInstance can't take have width or height be 0, so round
|
|
||||||
// up by adding 1 after truncating to int.
|
|
||||||
width = (int) ((width * scale) / 100) + 1;
|
|
||||||
height = (int) ((height * scale) / 100) + 1;
|
|
||||||
|
|
||||||
result = result.getScaledInstance(width, height, Image.SCALE_SMOOTH);
|
|
||||||
|
|
||||||
// load the image completely
|
|
||||||
mTracker.addImage(result, 1);
|
|
||||||
try {
|
|
||||||
mTracker.waitForID(1);
|
|
||||||
} catch (InterruptedException ex) {
|
|
||||||
// TODO: maybe make bubble instead
|
|
||||||
Logger.getLogger(ThumbnailViewNode.class.getName()).log(Level.WARNING, "Error while trying to load the icon.", ex);
|
|
||||||
}
|
|
||||||
|
|
||||||
// create 75x75 image for the icon with the icon on the center
|
|
||||||
BufferedImage combined = new BufferedImage(75, 75, BufferedImage.TYPE_INT_ARGB);
|
|
||||||
Graphics2D g = (Graphics2D) combined.getGraphics();
|
|
||||||
g.setColor(Color.WHITE);
|
|
||||||
g.setBackground(Color.WHITE);
|
|
||||||
g.drawImage(result, (75 - width) / 2, (75 - height) / 2, null);
|
|
||||||
|
|
||||||
return Toolkit.getDefaultToolkit().createImage(combined.getSource());
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private static BufferedImage toBufferedImage(Image src) {
|
|
||||||
int w = src.getWidth(null);
|
|
||||||
int h = src.getHeight(null);
|
|
||||||
int type = BufferedImage.TYPE_INT_RGB; // other options
|
|
||||||
BufferedImage dest = new BufferedImage(w, h, type);
|
|
||||||
Graphics2D g2 = dest.createGraphics();
|
|
||||||
g2.drawImage(src, 0, 0, null);
|
|
||||||
g2.dispose();
|
|
||||||
return dest;
|
|
||||||
}
|
|
||||||
|
|
||||||
private static File getFile(long id) {
|
private static File getFile(long id) {
|
||||||
return new File(Case.getCurrentCase().getCacheDirectory() + File.separator + id + ".jpg");
|
return new File(Case.getCurrentCase().getCacheDirectory() + File.separator + id + ".jpg");
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -28,5 +28,8 @@
|
|||||||
|
|
||||||
<!-- process and system monitoring, note: matching native libs pulled from thirdparty -->
|
<!-- process and system monitoring, note: matching native libs pulled from thirdparty -->
|
||||||
<dependency conf="autopsy_core->*" org="org.fusesource" name="sigar" rev="1.6.4" />
|
<dependency conf="autopsy_core->*" org="org.fusesource" name="sigar" rev="1.6.4" />
|
||||||
|
|
||||||
|
<!-- better image resizing -->
|
||||||
|
<dependency conf="autopsy_core->*" org="org.imgscalr" name="imgscalr-lib" rev="4.2" />
|
||||||
</dependencies>
|
</dependencies>
|
||||||
</ivy-module>
|
</ivy-module>
|
||||||
|
@ -12,6 +12,7 @@ file.reference.geronimo-jms_1.1_spec-1.0.jar=release/modules/ext/geronimo-jms_1.
|
|||||||
file.reference.gson-1.4.jar=release/modules/ext/gson-1.4.jar
|
file.reference.gson-1.4.jar=release/modules/ext/gson-1.4.jar
|
||||||
file.reference.gstreamer-java-1.5.jar=release/modules/ext/gstreamer-java-1.5.jar
|
file.reference.gstreamer-java-1.5.jar=release/modules/ext/gstreamer-java-1.5.jar
|
||||||
file.reference.guava-11.0.2.jar=release/modules/ext/guava-11.0.2.jar
|
file.reference.guava-11.0.2.jar=release/modules/ext/guava-11.0.2.jar
|
||||||
|
file.reference.imgscalr-lib-4.2.jar=release/modules/ext/imgscalr-lib-4.2.jar
|
||||||
file.reference.javaee-api-5.0-2.jar=release/modules/ext/javaee-api-5.0-2.jar
|
file.reference.javaee-api-5.0-2.jar=release/modules/ext/javaee-api-5.0-2.jar
|
||||||
file.reference.javassist-3.12.1.GA.jar=release/modules/ext/javassist-3.12.1.GA.jar
|
file.reference.javassist-3.12.1.GA.jar=release/modules/ext/javassist-3.12.1.GA.jar
|
||||||
file.reference.jcalendarbutton-1.4.6.jar=release/modules/ext/jcalendarbutton-1.4.6.jar
|
file.reference.jcalendarbutton-1.4.6.jar=release/modules/ext/jcalendarbutton-1.4.6.jar
|
||||||
@ -29,6 +30,8 @@ file.reference.poi-ooxml-schemas-3.8.jar=release/modules/ext/poi-ooxml-schemas-3
|
|||||||
file.reference.poi-scratchpad-3.8.jar=release/modules/ext/poi-scratchpad-3.8.jar
|
file.reference.poi-scratchpad-3.8.jar=release/modules/ext/poi-scratchpad-3.8.jar
|
||||||
file.reference.reflections-0.9.8.jar=release/modules/ext/reflections-0.9.8.jar
|
file.reference.reflections-0.9.8.jar=release/modules/ext/reflections-0.9.8.jar
|
||||||
file.reference.servlet-api-2.5.jar=release/modules/ext/servlet-api-2.5.jar
|
file.reference.servlet-api-2.5.jar=release/modules/ext/servlet-api-2.5.jar
|
||||||
|
file.reference.sigar-1.6.4-sources.jar=release/modules/ext/sigar-1.6.4-sources.jar
|
||||||
|
file.reference.sigar-1.6.4.jar=release/modules/ext/sigar-1.6.4.jar
|
||||||
file.reference.slf4j-api-1.6.1.jar=release/modules/ext/slf4j-api-1.6.1.jar
|
file.reference.slf4j-api-1.6.1.jar=release/modules/ext/slf4j-api-1.6.1.jar
|
||||||
file.reference.slf4j-simple-1.6.1.jar=release/modules/ext/slf4j-simple-1.6.1.jar
|
file.reference.slf4j-simple-1.6.1.jar=release/modules/ext/slf4j-simple-1.6.1.jar
|
||||||
file.reference.stax-api-1.0.1.jar=release/modules/ext/stax-api-1.0.1.jar
|
file.reference.stax-api-1.0.1.jar=release/modules/ext/stax-api-1.0.1.jar
|
||||||
|
@ -661,6 +661,7 @@
|
|||||||
<package>org.hyperic.sigar.util</package>
|
<package>org.hyperic.sigar.util</package>
|
||||||
<package>org.hyperic.sigar.vmware</package>
|
<package>org.hyperic.sigar.vmware</package>
|
||||||
<package>org.hyperic.sigar.win32</package>
|
<package>org.hyperic.sigar.win32</package>
|
||||||
|
<package>org.imgscalr</package>
|
||||||
<package>org.jbundle.thin.base.screen.jcalendarbutton</package>
|
<package>org.jbundle.thin.base.screen.jcalendarbutton</package>
|
||||||
<package>org.openxmlformats.schemas.drawingml.x2006.chart</package>
|
<package>org.openxmlformats.schemas.drawingml.x2006.chart</package>
|
||||||
<package>org.openxmlformats.schemas.drawingml.x2006.chart.impl</package>
|
<package>org.openxmlformats.schemas.drawingml.x2006.chart.impl</package>
|
||||||
@ -774,6 +775,10 @@
|
|||||||
<runtime-relative-path>ext/ant-1.8.2.jar</runtime-relative-path>
|
<runtime-relative-path>ext/ant-1.8.2.jar</runtime-relative-path>
|
||||||
<binary-origin>release/modules/ext/ant-1.8.2.jar</binary-origin>
|
<binary-origin>release/modules/ext/ant-1.8.2.jar</binary-origin>
|
||||||
</class-path-extension>
|
</class-path-extension>
|
||||||
|
<class-path-extension>
|
||||||
|
<runtime-relative-path>ext/commons-lang-2.4-javadoc.jar</runtime-relative-path>
|
||||||
|
<binary-origin>release/modules/ext/commons-lang-2.4-javadoc.jar</binary-origin>
|
||||||
|
</class-path-extension>
|
||||||
<class-path-extension>
|
<class-path-extension>
|
||||||
<runtime-relative-path>ext/stax-api-1.0.1.jar</runtime-relative-path>
|
<runtime-relative-path>ext/stax-api-1.0.1.jar</runtime-relative-path>
|
||||||
<binary-origin>release/modules/ext/stax-api-1.0.1.jar</binary-origin>
|
<binary-origin>release/modules/ext/stax-api-1.0.1.jar</binary-origin>
|
||||||
@ -786,6 +791,14 @@
|
|||||||
<runtime-relative-path>ext/reflections-0.9.8.jar</runtime-relative-path>
|
<runtime-relative-path>ext/reflections-0.9.8.jar</runtime-relative-path>
|
||||||
<binary-origin>release/modules/ext/reflections-0.9.8.jar</binary-origin>
|
<binary-origin>release/modules/ext/reflections-0.9.8.jar</binary-origin>
|
||||||
</class-path-extension>
|
</class-path-extension>
|
||||||
|
<class-path-extension>
|
||||||
|
<runtime-relative-path>ext/imgscalr-lib-4.2-sources.jar</runtime-relative-path>
|
||||||
|
<binary-origin>release/modules/ext/imgscalr-lib-4.2-sources.jar</binary-origin>
|
||||||
|
</class-path-extension>
|
||||||
|
<class-path-extension>
|
||||||
|
<runtime-relative-path>ext/imgscalr-lib-4.2-javadoc.jar</runtime-relative-path>
|
||||||
|
<binary-origin>release/modules/ext/imgscalr-lib-4.2-javadoc.jar</binary-origin>
|
||||||
|
</class-path-extension>
|
||||||
<class-path-extension>
|
<class-path-extension>
|
||||||
<runtime-relative-path>ext/jna-3.4.0.jar</runtime-relative-path>
|
<runtime-relative-path>ext/jna-3.4.0.jar</runtime-relative-path>
|
||||||
<binary-origin>release/modules/ext/jna-3.4.0.jar</binary-origin>
|
<binary-origin>release/modules/ext/jna-3.4.0.jar</binary-origin>
|
||||||
@ -858,6 +871,10 @@
|
|||||||
<runtime-relative-path>ext/commons-codec-1.5.jar</runtime-relative-path>
|
<runtime-relative-path>ext/commons-codec-1.5.jar</runtime-relative-path>
|
||||||
<binary-origin>release/modules/ext/commons-codec-1.5.jar</binary-origin>
|
<binary-origin>release/modules/ext/commons-codec-1.5.jar</binary-origin>
|
||||||
</class-path-extension>
|
</class-path-extension>
|
||||||
|
<class-path-extension>
|
||||||
|
<runtime-relative-path>ext/imgscalr-lib-4.2.jar</runtime-relative-path>
|
||||||
|
<binary-origin>release/modules/ext/imgscalr-lib-4.2.jar</binary-origin>
|
||||||
|
</class-path-extension>
|
||||||
<class-path-extension>
|
<class-path-extension>
|
||||||
<runtime-relative-path>ext/poi-ooxml-schemas-3.8.jar</runtime-relative-path>
|
<runtime-relative-path>ext/poi-ooxml-schemas-3.8.jar</runtime-relative-path>
|
||||||
<binary-origin>release/modules/ext/poi-ooxml-schemas-3.8.jar</binary-origin>
|
<binary-origin>release/modules/ext/poi-ooxml-schemas-3.8.jar</binary-origin>
|
||||||
@ -870,6 +887,10 @@
|
|||||||
<runtime-relative-path>ext/guava-11.0.2.jar</runtime-relative-path>
|
<runtime-relative-path>ext/guava-11.0.2.jar</runtime-relative-path>
|
||||||
<binary-origin>release/modules/ext/guava-11.0.2.jar</binary-origin>
|
<binary-origin>release/modules/ext/guava-11.0.2.jar</binary-origin>
|
||||||
</class-path-extension>
|
</class-path-extension>
|
||||||
|
<class-path-extension>
|
||||||
|
<runtime-relative-path>ext/commons-lang-2.4-sources.jar</runtime-relative-path>
|
||||||
|
<binary-origin>release/modules/ext/commons-lang-2.4-sources.jar</binary-origin>
|
||||||
|
</class-path-extension>
|
||||||
<class-path-extension>
|
<class-path-extension>
|
||||||
<runtime-relative-path>ext/poi-excelant-3.8.jar</runtime-relative-path>
|
<runtime-relative-path>ext/poi-excelant-3.8.jar</runtime-relative-path>
|
||||||
<binary-origin>release/modules/ext/poi-excelant-3.8.jar</binary-origin>
|
<binary-origin>release/modules/ext/poi-excelant-3.8.jar</binary-origin>
|
||||||
|
@ -0,0 +1,51 @@
|
|||||||
|
/*
|
||||||
|
* Autopsy Forensic Browser
|
||||||
|
*
|
||||||
|
* Copyright 2013 Basis Technology Corp.
|
||||||
|
* Contact: carrier <at> sleuthkit <dot> org
|
||||||
|
*
|
||||||
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
* you may not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.sleuthkit.autopsy.corelibs;
|
||||||
|
|
||||||
|
import java.awt.image.BufferedImage;
|
||||||
|
import org.imgscalr.Scalr;
|
||||||
|
import org.imgscalr.Scalr.Method;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Scalr wrapper to deal with exports and provide thread-safety
|
||||||
|
*
|
||||||
|
*/
|
||||||
|
public class ScalrWrapper {
|
||||||
|
|
||||||
|
public static synchronized BufferedImage resize(BufferedImage input, int width, int height) {
|
||||||
|
return Scalr.resize(input, width, height, Scalr.OP_ANTIALIAS);
|
||||||
|
}
|
||||||
|
|
||||||
|
public static synchronized BufferedImage resize(BufferedImage input, int size) {
|
||||||
|
return Scalr.resize(input, size, Scalr.OP_ANTIALIAS);
|
||||||
|
}
|
||||||
|
|
||||||
|
public static synchronized BufferedImage resizeHighQuality(BufferedImage input, int width, int height) {
|
||||||
|
return Scalr.resize(input, Method.QUALITY, width, height, Scalr.OP_ANTIALIAS);
|
||||||
|
}
|
||||||
|
|
||||||
|
public static synchronized BufferedImage resizeFast(BufferedImage input, int size) {
|
||||||
|
return Scalr.resize(input, Method.SPEED, Scalr.Mode.AUTOMATIC, size, Scalr.OP_ANTIALIAS);
|
||||||
|
}
|
||||||
|
|
||||||
|
public static synchronized BufferedImage resizeFast(BufferedImage input, int width, int height) {
|
||||||
|
return Scalr.resize(input, Method.SPEED, Scalr.Mode.AUTOMATIC, width, height, Scalr.OP_ANTIALIAS);
|
||||||
|
}
|
||||||
|
}
|
@ -16,9 +16,9 @@
|
|||||||
* See the License for the specific language governing permissions and
|
* See the License for the specific language governing permissions and
|
||||||
* limitations under the License.
|
* limitations under the License.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
package org.sleuthkit.autopsy.keywordsearch;
|
package org.sleuthkit.autopsy.keywordsearch;
|
||||||
|
|
||||||
|
import java.util.Arrays;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import org.sleuthkit.autopsy.coreutils.StringExtract.StringExtractUnicodeTable.SCRIPT;
|
import org.sleuthkit.autopsy.coreutils.StringExtract.StringExtractUnicodeTable.SCRIPT;
|
||||||
@ -29,72 +29,124 @@ import org.sleuthkit.datamodel.AbstractFile;
|
|||||||
* chunks
|
* chunks
|
||||||
*/
|
*/
|
||||||
interface AbstractFileExtract {
|
interface AbstractFileExtract {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Common options that can be used by some extractors
|
* Common options that can be used by some extractors
|
||||||
*/
|
*/
|
||||||
enum ExtractOptions {
|
enum ExtractOptions {
|
||||||
|
|
||||||
EXTRACT_UTF16, ///< extract UTF16 text, possible values Boolean.TRUE.toString(), Boolean.FALSE.toString()
|
EXTRACT_UTF16, ///< extract UTF16 text, possible values Boolean.TRUE.toString(), Boolean.FALSE.toString()
|
||||||
EXTRACT_UTF8, ///< extract UTF8 text, possible values Boolean.TRUE.toString(), Boolean.FALSE.toString()
|
EXTRACT_UTF8, ///< extract UTF8 text, possible values Boolean.TRUE.toString(), Boolean.FALSE.toString()
|
||||||
};
|
};
|
||||||
|
|
||||||
|
//generally text extractors should ignore archives
|
||||||
|
//and let unpacking modules take case of them
|
||||||
|
static final List<String> ARCHIVE_MIME_TYPES =
|
||||||
|
Arrays.asList(
|
||||||
|
//ignore unstructured binary and compressed data, for which string extraction or unzipper works better
|
||||||
|
"application/x-7z-compressed",
|
||||||
|
"application/x-ace-compressed",
|
||||||
|
"application/x-alz-compressed",
|
||||||
|
"application/x-arj",
|
||||||
|
"application/vnd.ms-cab-compressed",
|
||||||
|
"application/x-cfs-compressed",
|
||||||
|
"application/x-dgc-compressed",
|
||||||
|
"application/x-apple-diskimage",
|
||||||
|
"application/x-gca-compressed",
|
||||||
|
"application/x-dar",
|
||||||
|
"application/x-lzx",
|
||||||
|
"application/x-lzh",
|
||||||
|
"application/x-rar-compressed",
|
||||||
|
"application/x-stuffit",
|
||||||
|
"application/x-stuffitx",
|
||||||
|
"application/x-gtar",
|
||||||
|
"application/x-archive",
|
||||||
|
"application/x-executable",
|
||||||
|
"application/x-gzip",
|
||||||
|
"application/zip",
|
||||||
|
"application/x-zoo",
|
||||||
|
"application/x-cpio",
|
||||||
|
"application/x-shar",
|
||||||
|
"application/x-tar",
|
||||||
|
"application/x-bzip",
|
||||||
|
"application/x-bzip2",
|
||||||
|
"application/x-lzip",
|
||||||
|
"application/x-lzma",
|
||||||
|
"application/x-lzop",
|
||||||
|
"application/x-z",
|
||||||
|
"application/x-compress");
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get number of chunks resulted from extracting this AbstractFile
|
* Get number of chunks resulted from extracting this AbstractFile
|
||||||
|
*
|
||||||
* @return the number of chunks produced
|
* @return the number of chunks produced
|
||||||
*/
|
*/
|
||||||
int getNumChunks();
|
int getNumChunks();
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get the source file associated with this extraction
|
* Get the source file associated with this extraction
|
||||||
|
*
|
||||||
* @return the source AbstractFile
|
* @return the source AbstractFile
|
||||||
*/
|
*/
|
||||||
AbstractFile getSourceFile();
|
AbstractFile getSourceFile();
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Index the Abstract File
|
* Index the Abstract File
|
||||||
|
*
|
||||||
* @param sourceFile file to index
|
* @param sourceFile file to index
|
||||||
* @return true if indexed successfully, false otherwise
|
* @return true if indexed successfully, false otherwise
|
||||||
* @throws org.sleuthkit.autopsy.keywordsearch.Ingester.IngesterException
|
* @throws org.sleuthkit.autopsy.keywordsearch.Ingester.IngesterException
|
||||||
*/
|
*/
|
||||||
boolean index(AbstractFile sourceFile) throws Ingester.IngesterException;
|
boolean index(AbstractFile sourceFile) throws Ingester.IngesterException;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Sets the scripts to use for the extraction
|
* Sets the scripts to use for the extraction
|
||||||
|
*
|
||||||
* @param extractScripts scripts to use
|
* @param extractScripts scripts to use
|
||||||
* @return true if extractor supports script - specific extraction, false otherwise
|
* @return true if extractor supports script - specific extraction, false
|
||||||
|
* otherwise
|
||||||
*/
|
*/
|
||||||
boolean setScripts(List<SCRIPT> extractScript);
|
boolean setScripts(List<SCRIPT> extractScript);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get the currently used scripts for extraction
|
* Get the currently used scripts for extraction
|
||||||
|
*
|
||||||
* @return scripts currently used or null if not supported
|
* @return scripts currently used or null if not supported
|
||||||
*/
|
*/
|
||||||
List<SCRIPT> getScripts();
|
List<SCRIPT> getScripts();
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get current options
|
* Get current options
|
||||||
* @return currently used, extractor specific options, or null of not supported
|
*
|
||||||
|
* @return currently used, extractor specific options, or null of not
|
||||||
|
* supported
|
||||||
*/
|
*/
|
||||||
Map<String,String> getOptions();
|
Map<String, String> getOptions();
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Set extractor specific options
|
* Set extractor specific options
|
||||||
|
*
|
||||||
* @param options options to use
|
* @param options options to use
|
||||||
*/
|
*/
|
||||||
void setOptions(Map<String,String> options);
|
void setOptions(Map<String, String> options);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Determines if the extractor works only for specified types
|
* Determines if the extractor works only for specified types is
|
||||||
* is supportedTypes() or whether is a generic content extractor (such as string extractor)
|
* supportedTypes() or whether is a generic content extractor (such as
|
||||||
* @return
|
* string extractor)
|
||||||
|
*
|
||||||
|
* @return
|
||||||
*/
|
*/
|
||||||
boolean isContentTypeSpecific();
|
boolean isContentTypeSpecific();
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Determines if the file content is supported by the extractor,
|
* Determines if the file content is supported by the extractor if
|
||||||
* if isContentTypeSpecific() returns true.
|
* isContentTypeSpecific() returns true.
|
||||||
|
*
|
||||||
* @param file to test if its content should be supported
|
* @param file to test if its content should be supported
|
||||||
|
* @param detectedFormat mime-type with detected format (such as text/plain)
|
||||||
|
* or null if not detected
|
||||||
* @return true if the file content is supported, false otherwise
|
* @return true if the file content is supported, false otherwise
|
||||||
*/
|
*/
|
||||||
boolean isSupported(AbstractFile file);
|
boolean isSupported(AbstractFile file, String detectedFormat);
|
||||||
}
|
}
|
||||||
|
@ -22,6 +22,7 @@ import java.io.IOException;
|
|||||||
import java.io.InputStream;
|
import java.io.InputStream;
|
||||||
import java.io.Reader;
|
import java.io.Reader;
|
||||||
import java.nio.charset.Charset;
|
import java.nio.charset.Charset;
|
||||||
|
import java.util.Arrays;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.logging.Level;
|
import java.util.logging.Level;
|
||||||
@ -49,9 +50,15 @@ public class AbstractFileHtmlExtract implements AbstractFileExtract {
|
|||||||
private AbstractFile sourceFile;
|
private AbstractFile sourceFile;
|
||||||
private int numChunks = 0;
|
private int numChunks = 0;
|
||||||
//private static final String UTF16BOM = "\uFEFF"; disabled prepending of BOM
|
//private static final String UTF16BOM = "\uFEFF"; disabled prepending of BOM
|
||||||
private static final String[] SUPPORTED_EXTENSIONS = {
|
static final List<String> WEB_MIME_TYPES = Arrays.asList(
|
||||||
"htm", "html", "xhtml", "shtml", "xhtm", "shtm", "css", "js", "php", "jsp"
|
"application/javascript",
|
||||||
};
|
"application/xhtml+xml",
|
||||||
|
"application/json",
|
||||||
|
"text/css",
|
||||||
|
"text/html",
|
||||||
|
"text/javascript" //"application/xml",
|
||||||
|
//"application/xml-dtd",
|
||||||
|
);
|
||||||
|
|
||||||
AbstractFileHtmlExtract() {
|
AbstractFileHtmlExtract() {
|
||||||
this.module = KeywordSearchIngestModule.getDefault();
|
this.module = KeywordSearchIngestModule.getDefault();
|
||||||
@ -67,7 +74,7 @@ public class AbstractFileHtmlExtract implements AbstractFileExtract {
|
|||||||
public List<SCRIPT> getScripts() {
|
public List<SCRIPT> getScripts() {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Map<String, String> getOptions() {
|
public Map<String, String> getOptions() {
|
||||||
return null;
|
return null;
|
||||||
@ -75,7 +82,6 @@ public class AbstractFileHtmlExtract implements AbstractFileExtract {
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void setOptions(Map<String, String> options) {
|
public void setOptions(Map<String, String> options) {
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@ -207,13 +213,16 @@ public class AbstractFileHtmlExtract implements AbstractFileExtract {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public boolean isSupported(AbstractFile file) {
|
public boolean isSupported(AbstractFile file, String detectedFormat) {
|
||||||
String fileNameLower = file.getName().toLowerCase();
|
if (detectedFormat == null) {
|
||||||
for (int i = 0; i < SUPPORTED_EXTENSIONS.length; ++i) {
|
return false;
|
||||||
if (fileNameLower.endsWith(SUPPORTED_EXTENSIONS[i])) {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
return false;
|
else if (WEB_MIME_TYPES.contains(detectedFormat) ) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -51,13 +51,7 @@ class AbstractFileStringExtract implements AbstractFileExtract {
|
|||||||
private static final SCRIPT DEFAULT_SCRIPT = SCRIPT.LATIN_2;
|
private static final SCRIPT DEFAULT_SCRIPT = SCRIPT.LATIN_2;
|
||||||
private final List<SCRIPT> extractScripts = new ArrayList<SCRIPT>();
|
private final List<SCRIPT> extractScripts = new ArrayList<SCRIPT>();
|
||||||
private Map<String, String> extractOptions = new HashMap<String, String>();
|
private Map<String, String> extractOptions = new HashMap<String, String>();
|
||||||
//string extractor extracts from all other than archives
|
|
||||||
//TODO use content type detection mechanism
|
|
||||||
static final String[] UNSUPPORTED_EXTENSIONS = {
|
|
||||||
//Archives
|
|
||||||
//Note: archive unpacker module will process these instead
|
|
||||||
"tar", "jar", "zip", "7z", "gzip", "bzip", "bzip2", "gz", "tgz", "cab", "rar", "arj", "dmg", "iso"
|
|
||||||
};
|
|
||||||
|
|
||||||
//disabled prepending of BOM
|
//disabled prepending of BOM
|
||||||
//static {
|
//static {
|
||||||
@ -185,18 +179,26 @@ class AbstractFileStringExtract implements AbstractFileExtract {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public boolean isSupported(AbstractFile file) {
|
public boolean isSupported(AbstractFile file, String detectedFormat) {
|
||||||
String fileNameLower = file.getName().toLowerCase();
|
if (detectedFormat == null) {
|
||||||
int dotI = fileNameLower.lastIndexOf(".");
|
return true;
|
||||||
if (dotI == -1 || dotI == fileNameLower.length() - 1) {
|
|
||||||
return true; //no extension
|
|
||||||
}
|
}
|
||||||
final String extension = fileNameLower.substring(dotI + 1);
|
else if (detectedFormat.equals("application/octet-stream")) {
|
||||||
for (int i = 0; i < UNSUPPORTED_EXTENSIONS.length; ++i) {
|
//any binary unstructured blobs (string extraction will be used)
|
||||||
if (extension.equals(UNSUPPORTED_EXTENSIONS[i])) {
|
return true;
|
||||||
return false;
|
}
|
||||||
}
|
else if (AbstractFileExtract.ARCHIVE_MIME_TYPES.contains(detectedFormat)) {
|
||||||
|
return false; //let unzipper take care of it
|
||||||
|
}
|
||||||
|
//skip images/video/audio
|
||||||
|
else if (detectedFormat.contains("image/")
|
||||||
|
|| detectedFormat.contains("audio/")
|
||||||
|
|| detectedFormat.contains("video/")
|
||||||
|
) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
return true;
|
||||||
}
|
}
|
||||||
return true;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -66,30 +66,6 @@ public class AbstractFileTikaTextExtract implements AbstractFileExtract {
|
|||||||
private int numChunks = 0;
|
private int numChunks = 0;
|
||||||
//private static final String UTF16BOM = "\uFEFF"; disabled prepending of BOM
|
//private static final String UTF16BOM = "\uFEFF"; disabled prepending of BOM
|
||||||
private final ExecutorService tikaParseExecutor = Executors.newSingleThreadExecutor();
|
private final ExecutorService tikaParseExecutor = Executors.newSingleThreadExecutor();
|
||||||
// TODO: use type detection mechanism instead, and maintain supported MimeTypes, not extensions
|
|
||||||
// supported extensions list from http://www.lucidimagination.com/devzone/technical-articles/content-extraction-tika
|
|
||||||
static final String[] SUPPORTED_EXTENSIONS = {
|
|
||||||
//Archive (to be removed when we have archive module
|
|
||||||
/// handled by 7zip module now "tar", "jar", "zip", "gzip", "bzip2", "gz", "tgz", "ar", "cpio",
|
|
||||||
//MS Office
|
|
||||||
"doc", "dot", "docx", "docm", "dotx", "dotm",
|
|
||||||
"xls", "xlw", "xlt", "xlsx", "xlsm", "xltx", "xltm",
|
|
||||||
"ppt", "pps", "pot", "pptx", "pptm", "potx", "potm",
|
|
||||||
//Open Office
|
|
||||||
"odf", "odt", "ott", "ods", "ots", "odp", "otp",
|
|
||||||
"sxw", "stw", "sxc", "stc", "sxi", "sxi",
|
|
||||||
"sdw", "sdc", "vor", "sgl",
|
|
||||||
//rich text, pdf
|
|
||||||
"rtf", "pdf",
|
|
||||||
//html (other extractors take priority)
|
|
||||||
"html", "htm", "xhtml",
|
|
||||||
//text
|
|
||||||
"txt", "log", "manifest",
|
|
||||||
//code
|
|
||||||
"class",
|
|
||||||
//images, media, other
|
|
||||||
"bmp", "gif", "png", "jpeg", "jpg", "tiff", "mp3", "flv", "aiff", "au", "midi", "wav",
|
|
||||||
"pst", "xml", "class", "dwg", "eml", "emlx", "mbox", "mht"};
|
|
||||||
|
|
||||||
AbstractFileTikaTextExtract() {
|
AbstractFileTikaTextExtract() {
|
||||||
this.module = KeywordSearchIngestModule.getDefault();
|
this.module = KeywordSearchIngestModule.getDefault();
|
||||||
@ -282,19 +258,27 @@ public class AbstractFileTikaTextExtract implements AbstractFileExtract {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public boolean isSupported(AbstractFile file) {
|
public boolean isSupported(AbstractFile file, String detectedFormat) {
|
||||||
String fileNameLower = file.getName().toLowerCase();
|
if (detectedFormat == null) {
|
||||||
int dotI = fileNameLower.lastIndexOf(".");
|
return false;
|
||||||
if (dotI == -1 || dotI == fileNameLower.length() - 1) {
|
} else if (detectedFormat.equals("application/octet-stream")) {
|
||||||
return false; //no extension
|
//any binary unstructured blobs (string extraction will be used)
|
||||||
|
return false;
|
||||||
|
} else if (AbstractFileExtract.ARCHIVE_MIME_TYPES.contains(detectedFormat)) {
|
||||||
|
return false;
|
||||||
|
} //skip video other than flv (tika supports flv only)
|
||||||
|
else if (detectedFormat.contains("video/")
|
||||||
|
&& !detectedFormat.equals("video/x-flv")) {
|
||||||
|
return false;
|
||||||
}
|
}
|
||||||
final String extension = fileNameLower.substring(dotI + 1);
|
|
||||||
for (int i = 0; i < SUPPORTED_EXTENSIONS.length; ++i) {
|
//TODO might need to add more mime-types to ignore
|
||||||
if (extension.equals(SUPPORTED_EXTENSIONS[i])) {
|
|
||||||
return true;
|
//default to true, which includes
|
||||||
}
|
//text, docs, pdf and others
|
||||||
}
|
|
||||||
return false;
|
return true;
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -20,6 +20,8 @@ package org.sleuthkit.autopsy.keywordsearch;
|
|||||||
|
|
||||||
import java.awt.event.ActionEvent;
|
import java.awt.event.ActionEvent;
|
||||||
import java.awt.event.ActionListener;
|
import java.awt.event.ActionListener;
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.io.InputStream;
|
||||||
import java.lang.Long;
|
import java.lang.Long;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.Collection;
|
import java.util.Collection;
|
||||||
@ -37,10 +39,12 @@ import org.sleuthkit.autopsy.coreutils.Logger;
|
|||||||
import javax.swing.SwingUtilities;
|
import javax.swing.SwingUtilities;
|
||||||
import javax.swing.SwingWorker;
|
import javax.swing.SwingWorker;
|
||||||
import javax.swing.Timer;
|
import javax.swing.Timer;
|
||||||
|
import org.apache.tika.Tika;
|
||||||
import org.netbeans.api.progress.aggregate.AggregateProgressFactory;
|
import org.netbeans.api.progress.aggregate.AggregateProgressFactory;
|
||||||
import org.netbeans.api.progress.aggregate.AggregateProgressHandle;
|
import org.netbeans.api.progress.aggregate.AggregateProgressHandle;
|
||||||
import org.netbeans.api.progress.aggregate.ProgressContributor;
|
import org.netbeans.api.progress.aggregate.ProgressContributor;
|
||||||
import org.openide.util.Cancellable;
|
import org.openide.util.Cancellable;
|
||||||
|
import org.openide.util.Exceptions;
|
||||||
import org.sleuthkit.autopsy.casemodule.Case;
|
import org.sleuthkit.autopsy.casemodule.Case;
|
||||||
import org.sleuthkit.autopsy.coreutils.EscapeUtil;
|
import org.sleuthkit.autopsy.coreutils.EscapeUtil;
|
||||||
import org.sleuthkit.autopsy.coreutils.StopWatch;
|
import org.sleuthkit.autopsy.coreutils.StopWatch;
|
||||||
@ -57,6 +61,7 @@ import org.sleuthkit.datamodel.BlackboardArtifact;
|
|||||||
import org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE;
|
import org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE;
|
||||||
import org.sleuthkit.datamodel.BlackboardAttribute;
|
import org.sleuthkit.datamodel.BlackboardAttribute;
|
||||||
import org.sleuthkit.datamodel.AbstractFile;
|
import org.sleuthkit.datamodel.AbstractFile;
|
||||||
|
import org.sleuthkit.datamodel.ReadContentInputStream;
|
||||||
import org.sleuthkit.datamodel.SleuthkitCase;
|
import org.sleuthkit.datamodel.SleuthkitCase;
|
||||||
import org.sleuthkit.datamodel.TskCoreException;
|
import org.sleuthkit.datamodel.TskCoreException;
|
||||||
import org.sleuthkit.datamodel.TskData;
|
import org.sleuthkit.datamodel.TskData;
|
||||||
@ -123,6 +128,7 @@ public final class KeywordSearchIngestModule implements IngestModuleAbstractFile
|
|||||||
private static AbstractFileStringExtract stringExtractor;
|
private static AbstractFileStringExtract stringExtractor;
|
||||||
private boolean initialized = false;
|
private boolean initialized = false;
|
||||||
private KeywordSearchConfigurationPanel panel;
|
private KeywordSearchConfigurationPanel panel;
|
||||||
|
private Tika tikaFormatDetector;
|
||||||
|
|
||||||
private enum IngestStatus {
|
private enum IngestStatus {
|
||||||
|
|
||||||
@ -147,7 +153,7 @@ public final class KeywordSearchIngestModule implements IngestModuleAbstractFile
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public ProcessResult process(PipelineContext<IngestModuleAbstractFile>pipelineContext, AbstractFile abstractFile) {
|
public ProcessResult process(PipelineContext<IngestModuleAbstractFile> pipelineContext, AbstractFile abstractFile) {
|
||||||
|
|
||||||
if (initialized == false) //error initializing indexing/Solr
|
if (initialized == false) //error initializing indexing/Solr
|
||||||
{
|
{
|
||||||
@ -189,7 +195,6 @@ public final class KeywordSearchIngestModule implements IngestModuleAbstractFile
|
|||||||
return ProcessResult.OK;
|
return ProcessResult.OK;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* After all files are ingested, execute final index commit and final search
|
* After all files are ingested, execute final index commit and final search
|
||||||
* Cleanup resources, threads, timers
|
* Cleanup resources, threads, timers
|
||||||
@ -297,6 +302,8 @@ public final class KeywordSearchIngestModule implements IngestModuleAbstractFile
|
|||||||
keywordLists.clear();
|
keywordLists.clear();
|
||||||
keywordToList.clear();
|
keywordToList.clear();
|
||||||
|
|
||||||
|
tikaFormatDetector = null;
|
||||||
|
|
||||||
initialized = false;
|
initialized = false;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -338,6 +345,8 @@ public final class KeywordSearchIngestModule implements IngestModuleAbstractFile
|
|||||||
|
|
||||||
caseHandle = Case.getCurrentCase().getSleuthkitCase();
|
caseHandle = Case.getCurrentCase().getSleuthkitCase();
|
||||||
|
|
||||||
|
tikaFormatDetector = new Tika();
|
||||||
|
|
||||||
ingester = Server.getIngester();
|
ingester = Server.getIngester();
|
||||||
|
|
||||||
final Server server = KeywordSearch.getServer();
|
final Server server = KeywordSearch.getServer();
|
||||||
@ -659,18 +668,20 @@ public final class KeywordSearchIngestModule implements IngestModuleAbstractFile
|
|||||||
* index
|
* index
|
||||||
* @param stringsOnly true if use string extraction, false if to use a
|
* @param stringsOnly true if use string extraction, false if to use a
|
||||||
* content-type specific text extractor
|
* content-type specific text extractor
|
||||||
|
* @param detectedFormat mime-type detected, or null if none detected
|
||||||
* @return true if the file was indexed, false otherwise
|
* @return true if the file was indexed, false otherwise
|
||||||
* @throws IngesterException exception thrown if indexing failed
|
* @throws IngesterException exception thrown if indexing failed
|
||||||
*/
|
*/
|
||||||
private boolean extractIndex(AbstractFile aFile, boolean stringsOnly) throws IngesterException {
|
private boolean extractIndex(AbstractFile aFile, boolean stringsOnly, String detectedFormat) throws IngesterException {
|
||||||
AbstractFileExtract fileExtract = null;
|
AbstractFileExtract fileExtract = null;
|
||||||
|
|
||||||
if (stringsOnly && stringExtractor.isSupported(aFile)) {
|
if (stringsOnly && stringExtractor.isSupported(aFile, detectedFormat)) {
|
||||||
fileExtract = stringExtractor;
|
fileExtract = stringExtractor;
|
||||||
} else {
|
} else {
|
||||||
//go over available text extractors and pick the first one (most specific one)
|
//not only strings
|
||||||
|
//go over available text extractors in order, and pick the first one (most specific one)
|
||||||
for (AbstractFileExtract fe : textExtractors) {
|
for (AbstractFileExtract fe : textExtractors) {
|
||||||
if (fe.isSupported(aFile)) {
|
if (fe.isSupported(aFile, detectedFormat)) {
|
||||||
fileExtract = fe;
|
fileExtract = fe;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
@ -678,7 +689,8 @@ public final class KeywordSearchIngestModule implements IngestModuleAbstractFile
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (fileExtract == null) {
|
if (fileExtract == null) {
|
||||||
logger.log(Level.INFO, "No supported file extractor found for file: " + aFile.getId() + " " + aFile.getName());
|
logger.log(Level.INFO, "No text extractor found for file id:"
|
||||||
|
+ aFile.getId() + ", name: " + aFile.getName() + ", detected format: " + detectedFormat);
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -688,10 +700,16 @@ public final class KeywordSearchIngestModule implements IngestModuleAbstractFile
|
|||||||
return fileExtract.index(aFile);
|
return fileExtract.index(aFile);
|
||||||
}
|
}
|
||||||
|
|
||||||
private boolean isTextExtractSupported(AbstractFile aFile) {
|
/**
|
||||||
|
* Check with every extractor if it supports the file with the detected format
|
||||||
|
* @param aFile file to check for
|
||||||
|
* @param detectedFormat mime-type with detected format (such as text/plain) or null if not detected
|
||||||
|
* @return true if text extraction is supported
|
||||||
|
*/
|
||||||
|
private boolean isTextExtractSupported(AbstractFile aFile, String detectedFormat) {
|
||||||
for (AbstractFileExtract extractor : textExtractors) {
|
for (AbstractFileExtract extractor : textExtractors) {
|
||||||
if (extractor.isContentTypeSpecific() == true
|
if (extractor.isContentTypeSpecific() == true
|
||||||
&& extractor.isSupported(aFile)) {
|
&& extractor.isSupported(aFile, detectedFormat)) {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -706,8 +724,8 @@ public final class KeywordSearchIngestModule implements IngestModuleAbstractFile
|
|||||||
if (aType.equals(TskData.TSK_DB_FILES_TYPE_ENUM.VIRTUAL_DIR)) {
|
if (aType.equals(TskData.TSK_DB_FILES_TYPE_ENUM.VIRTUAL_DIR)) {
|
||||||
//skip indexing of virtual dirs (no content, no real name) - will index children files
|
//skip indexing of virtual dirs (no content, no real name) - will index children files
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
boolean isUnallocFile = aType.equals(TskData.TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS);
|
boolean isUnallocFile = aType.equals(TskData.TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS);
|
||||||
|
|
||||||
final long size = aFile.getSize();
|
final long size = aFile.getSize();
|
||||||
@ -725,18 +743,39 @@ public final class KeywordSearchIngestModule implements IngestModuleAbstractFile
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
boolean extractTextSupported = isTextExtractSupported(aFile);
|
//use Tika to detect the format
|
||||||
|
String detectedFormat = null;
|
||||||
|
InputStream is = null;
|
||||||
|
try {
|
||||||
|
is = new ReadContentInputStream(aFile);
|
||||||
|
detectedFormat = tikaFormatDetector.detect(is, aFile.getName());
|
||||||
|
|
||||||
|
} catch (Exception e) {
|
||||||
|
logger.log(Level.WARNING, "Could not detect format using tika for file: " + aFile, e);
|
||||||
|
} finally {
|
||||||
|
if (is != null) {
|
||||||
|
try {
|
||||||
|
is.close();
|
||||||
|
} catch (IOException ex) {
|
||||||
|
logger.log(Level.WARNING, "Could not close stream after detecting format using tika for file: "
|
||||||
|
+ aFile, ex);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
logger.log(Level.INFO, "Detected format: " + aFile.getName() + " " + detectedFormat);
|
||||||
|
|
||||||
|
boolean extractTextSupported = isTextExtractSupported(aFile, detectedFormat);
|
||||||
if (isUnallocFile == false && extractTextSupported) {
|
if (isUnallocFile == false && extractTextSupported) {
|
||||||
//we know it's an allocated FS file
|
//we know it's an allocated FS file
|
||||||
//extract text with one of the extractors, divide into chunks and index with Solr
|
//extract text with one of the extractors, divide into chunks and index with Solr
|
||||||
try {
|
try {
|
||||||
//logger.log(Level.INFO, "indexing: " + aFile.getName());
|
//logger.log(Level.INFO, "indexing: " + aFile.getName());
|
||||||
if (!extractIndex(aFile, false)) {
|
if (!extractIndex(aFile, false, detectedFormat)) {
|
||||||
logger.log(Level.WARNING, "Failed to extract text and ingest, file '" + aFile.getName() + "' (id: " + aFile.getId() + ").");
|
logger.log(Level.WARNING, "Failed to extract text and ingest, file '" + aFile.getName() + "' (id: " + aFile.getId() + ").");
|
||||||
ingestStatus.put(aFile.getId(), IngestStatus.SKIPPED);
|
ingestStatus.put(aFile.getId(), IngestStatus.SKIPPED);
|
||||||
//try to extract strings, if a file
|
//try to extract strings, if a file
|
||||||
if (aFile.isFile() == true) {
|
if (aFile.isFile() == true) {
|
||||||
processNonIngestible(aFile);
|
processNonIngestible(aFile, detectedFormat);
|
||||||
}
|
}
|
||||||
|
|
||||||
} else {
|
} else {
|
||||||
@ -749,7 +788,7 @@ public final class KeywordSearchIngestModule implements IngestModuleAbstractFile
|
|||||||
ingestStatus.put(aFile.getId(), IngestStatus.SKIPPED);
|
ingestStatus.put(aFile.getId(), IngestStatus.SKIPPED);
|
||||||
//try to extract strings, if a file
|
//try to extract strings, if a file
|
||||||
if (aFile.isFile() == true) {
|
if (aFile.isFile() == true) {
|
||||||
processNonIngestible(aFile);
|
processNonIngestible(aFile, detectedFormat);
|
||||||
}
|
}
|
||||||
|
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
@ -758,18 +797,18 @@ public final class KeywordSearchIngestModule implements IngestModuleAbstractFile
|
|||||||
ingestStatus.put(aFile.getId(), IngestStatus.SKIPPED);
|
ingestStatus.put(aFile.getId(), IngestStatus.SKIPPED);
|
||||||
//try to extract strings if a file
|
//try to extract strings if a file
|
||||||
if (aFile.isFile() == true) {
|
if (aFile.isFile() == true) {
|
||||||
processNonIngestible(aFile);
|
processNonIngestible(aFile, detectedFormat);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
//unallocated file or unsupported content type by Solr
|
//unallocated file or unsupported content type by Solr
|
||||||
processNonIngestible(aFile);
|
processNonIngestible(aFile, detectedFormat);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private boolean processNonIngestible(AbstractFile aFile) {
|
private boolean processNonIngestible(AbstractFile aFile, String detectedFormat) {
|
||||||
try {
|
try {
|
||||||
if (!extractIndex(aFile, true)) {
|
if (!extractIndex(aFile, true, detectedFormat)) {
|
||||||
logger.log(Level.WARNING, "Failed to extract strings and ingest, file '" + aFile.getName() + "' (id: " + aFile.getId() + ").");
|
logger.log(Level.WARNING, "Failed to extract strings and ingest, file '" + aFile.getName() + "' (id: " + aFile.getId() + ").");
|
||||||
ingestStatus.put(aFile.getId(), IngestStatus.SKIPPED);
|
ingestStatus.put(aFile.getId(), IngestStatus.SKIPPED);
|
||||||
return false;
|
return false;
|
||||||
@ -1025,7 +1064,7 @@ public final class KeywordSearchIngestModule implements IngestModuleAbstractFile
|
|||||||
detailsSb.append("<tr>");
|
detailsSb.append("<tr>");
|
||||||
detailsSb.append("<th>File</th>");
|
detailsSb.append("<th>File</th>");
|
||||||
detailsSb.append("<td>").append(hitFile.getParentPath()).append(hitFile.getName()).append("</td>");
|
detailsSb.append("<td>").append(hitFile.getParentPath()).append(hitFile.getName()).append("</td>");
|
||||||
|
|
||||||
detailsSb.append("</tr>");
|
detailsSb.append("</tr>");
|
||||||
|
|
||||||
|
|
||||||
|
3
NEWS.txt
3
NEWS.txt
@ -6,6 +6,8 @@ New features:
|
|||||||
|
|
||||||
Improvements:
|
Improvements:
|
||||||
- Sleuthkit-4.0.2 and libewf-20130128
|
- Sleuthkit-4.0.2 and libewf-20130128
|
||||||
|
- improved image loading in Media View and Thumbnail View (faster loading, handles large files better)
|
||||||
|
- improve Keyword Search file indexing (decision whether to index using detected mime-type instead of file extension)
|
||||||
- show children counts in directory tree
|
- show children counts in directory tree
|
||||||
|
|
||||||
Bugfixes:
|
Bugfixes:
|
||||||
@ -16,6 +18,7 @@ Bugfixes:
|
|||||||
- exif module better jpeg detection using signature and not only file extension.
|
- exif module better jpeg detection using signature and not only file extension.
|
||||||
- The "media view" tab is inactive for deleted files (#165)
|
- The "media view" tab is inactive for deleted files (#165)
|
||||||
|
|
||||||
|
|
||||||
---------------- VERSION 3.0.4 --------------
|
---------------- VERSION 3.0.4 --------------
|
||||||
|
|
||||||
New features:
|
New features:
|
||||||
|
@ -1,4 +1,9 @@
|
|||||||
|
OpenIDE-Module-Display-Category=External Viewers
|
||||||
|
OpenIDE-Module-Long-Description=\
|
||||||
|
Displays user activity as an interactive timeline chart with year, month and day granularity. \n\
|
||||||
|
Events for a selected day are viewable in the built-in result and content viewers.
|
||||||
OpenIDE-Module-Name=Timeline
|
OpenIDE-Module-Name=Timeline
|
||||||
CTL_MakeTimeline="Make Timeline (Beta)"
|
CTL_MakeTimeline="Make Timeline (Beta)"
|
||||||
|
OpenIDE-Module-Short-Description=Displays user activity timeline
|
||||||
TimelineProgressDialog.jLabel1.text=Creating timeline . . .
|
TimelineProgressDialog.jLabel1.text=Creating timeline . . .
|
||||||
TimelineFrame.title=Timeline
|
TimelineFrame.title=Timeline
|
||||||
|
Loading…
x
Reference in New Issue
Block a user