remove more unneeded ContentStreams and cleanup logging

This commit is contained in:
millmanorama 2016-12-14 13:31:58 +01:00
parent 0303c96d41
commit b904c37dd2
6 changed files with 11 additions and 168 deletions

View File

@ -18,19 +18,15 @@
*/
package org.sleuthkit.autopsy.keywordsearch;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.Reader;
import java.util.logging.Level;
import org.apache.commons.io.IOUtils;
import org.apache.solr.common.util.ContentStream;
import org.openide.util.Exceptions;
import org.openide.util.NbBundle;
import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.datamodel.ContentUtils;
import static org.sleuthkit.autopsy.keywordsearch.Bundle.ByteArtifactStream_getSrcInfo_text;
import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.BlackboardArtifact;
import org.sleuthkit.datamodel.BlackboardAttribute;
@ -39,6 +35,7 @@ import org.sleuthkit.datamodel.SleuthkitCase;
import org.sleuthkit.datamodel.TskCoreException;
public class ArtifactExtractor extends TextExtractor<Void, BlackboardArtifact> {
static final private Logger logger = Logger.getLogger(ArtifactExtractor.class.getName());
static Content getDataSource(BlackboardArtifact artifact) throws TskCoreException {
Content dataSource;
@ -76,9 +73,8 @@ public class ArtifactExtractor extends TextExtractor<Void, BlackboardArtifact> {
@Override
void logWarning(String msg, Exception ex) {
throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates.
logger.log(Level.WARNING, msg, ex); //NON-NLS }
}
@Override
Void newAppendixProvider() {
return null;
@ -122,7 +118,6 @@ public class ArtifactExtractor extends TextExtractor<Void, BlackboardArtifact> {
return null;
}
return IOUtils.toInputStream(artifactContents);
}
@ -137,75 +132,8 @@ public class ArtifactExtractor extends TextExtractor<Void, BlackboardArtifact> {
return source.getArtifactID();
}
@Override
String getName(BlackboardArtifact source) {
return source.getDisplayName();
}
static private class ByteArtifactStream implements ContentStream {
//input
private final byte[] content; //extracted subcontent
private long contentSize;
private final BlackboardArtifact aContent; //origin
private final InputStream stream;
private static final Logger logger = Logger.getLogger(ByteArtifactStream.class.getName());
public ByteArtifactStream(byte[] content, long contentSize, BlackboardArtifact aContent) {
this.content = content;
this.aContent = aContent;
stream = new ByteArrayInputStream(content, 0, (int) contentSize);
}
public byte[] getByteContent() {
return content;
}
public BlackboardArtifact getSourceContent() {
return aContent;
}
@Override
public String getContentType() {
return "text/plain;charset=" + Server.DEFAULT_INDEXED_TEXT_CHARSET.name(); //NON-NLS
}
@Override
public String getName() {
return aContent.getDisplayName();
}
@Override
public Reader getReader() throws IOException {
return new InputStreamReader(stream);
}
@Override
public Long getSize() {
return contentSize;
}
@Override
@NbBundle.Messages("ByteArtifactStream.getSrcInfo.text=Artifact:{0}")
public String getSourceInfo() {
return ByteArtifactStream_getSrcInfo_text(aContent.getArtifactID());
}
@Override
public InputStream getStream() throws IOException {
return stream;
}
@Override
protected void finalize() throws Throwable {
super.finalize();
stream.close();
}
return source.getDisplayName() + "_" + source.getArtifactID();
}
}

View File

@ -18,17 +18,8 @@
*/
package org.sleuthkit.autopsy.keywordsearch;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.Reader;
import java.util.Arrays;
import java.util.List;
import org.apache.solr.common.util.ContentStream;
import org.openide.util.NbBundle;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.datamodel.AbstractContent;
import org.sleuthkit.datamodel.AbstractFile;
/**
@ -120,68 +111,4 @@ abstract class FileTextExtractor<AppendixProvider> extends TextExtractor<Appendi
String getName(AbstractFile source) {
return source.getName();
}
private static class ByteContentStream implements ContentStream {
//input
private final byte[] content; //extracted subcontent
private long contentSize;
private final AbstractContent aContent; //origin
private final InputStream stream;
private static final Logger logger = Logger.getLogger(ByteContentStream.class.getName());
public ByteContentStream(byte[] content, long contentSize, AbstractContent aContent) {
this.content = content;
this.aContent = aContent;
stream = new ByteArrayInputStream(content, 0, (int) contentSize);
}
public byte[] getByteContent() {
return content;
}
public AbstractContent getSourceContent() {
return aContent;
}
@Override
public String getContentType() {
return "text/plain;charset=" + Server.DEFAULT_INDEXED_TEXT_CHARSET.name(); //NON-NLS
}
@Override
public String getName() {
return aContent.getName();
}
@Override
public Reader getReader() throws IOException {
return new InputStreamReader(stream);
}
@Override
public Long getSize() {
return contentSize;
}
@Override
public String getSourceInfo() {
return NbBundle.getMessage(this.getClass(), "ByteContentStream.getSrcInfo.text", aContent.getId());
}
@Override
public InputStream getStream() throws IOException {
return stream;
}
@Override
protected void finalize() throws Throwable {
super.finalize();
stream.close();
}
}
}

View File

@ -24,13 +24,11 @@ import java.io.Reader;
import java.io.StringReader;
import java.util.Arrays;
import java.util.List;
import java.util.logging.Level;
import net.htmlparser.jericho.Attributes;
import net.htmlparser.jericho.Renderer;
import net.htmlparser.jericho.Source;
import net.htmlparser.jericho.StartTag;
import net.htmlparser.jericho.StartTagType;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.ReadContentInputStream;
@ -41,8 +39,6 @@ import org.sleuthkit.datamodel.ReadContentInputStream;
*/
class HtmlTextExtractor extends FileTextExtractor<Void> {
private static final Logger logger = Logger.getLogger(HtmlTextExtractor.class.getName());
static final int MAX_EXTR_TEXT_CHARS = 512 * 1024;
private static final int MAX_SIZE = 50000000;
@ -58,10 +54,6 @@ class HtmlTextExtractor extends FileTextExtractor<Void> {
HtmlTextExtractor() {
}
@Override
void logWarning(final String msg, Exception ex) {
logger.log(Level.WARNING, msg, ex); //NON-NLS
}
@Override
boolean isContentTypeSpecific() {

View File

@ -25,7 +25,6 @@ import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.logging.Level;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.coreutils.StringExtract;
import org.sleuthkit.autopsy.coreutils.StringExtract.StringExtractUnicodeTable.SCRIPT;
@ -40,7 +39,6 @@ import org.sleuthkit.datamodel.TskException;
*/
class StringsTextExtractor extends FileTextExtractor<Void> {
private static final Logger logger = Logger.getLogger(StringsTextExtractor.class.getName());
private static final long MAX_STRING_CHUNK_SIZE = 1 * 1024 * 1024L;
private final List<SCRIPT> extractScripts = new ArrayList<>();
private Map<String, String> extractOptions = new HashMap<>();
@ -141,10 +139,6 @@ class StringsTextExtractor extends FileTextExtractor<Void> {
return true;
}
@Override
void logWarning(String msg, Exception ex) {
logger.log(Level.WARNING, msg, ex); //NON-NLS
}
@Override
Void newAppendixProvider() {

View File

@ -20,13 +20,18 @@ package org.sleuthkit.autopsy.keywordsearch;
import java.io.InputStream;
import java.io.Reader;
import java.util.logging.Level;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.datamodel.SleuthkitVisitableItem;
abstract class TextExtractor<AppendixProvider, TextSource extends SleuthkitVisitableItem> {
static final private Logger logger = Logger.getLogger(TextExtractor.class.getName());
abstract boolean noExtractionOptionsAreEnabled();
abstract void logWarning(final String msg, Exception ex);
void logWarning(String msg, Exception ex) {
logger.log(Level.WARNING, msg, ex); //NON-NLS }
}
void appendDataToFinalChunk(StringBuilder sb, AppendixProvider dataProvider) {
//no-op

View File

@ -34,7 +34,6 @@ import org.apache.tika.Tika;
import org.apache.tika.metadata.Metadata;
import org.apache.tika.parser.ParseContext;
import org.openide.util.NbBundle;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.keywordsearch.Ingester.IngesterException;
import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.ReadContentInputStream;
@ -51,9 +50,7 @@ import org.sleuthkit.datamodel.ReadContentInputStream;
*/
class TikaTextExtractor extends FileTextExtractor<Metadata> {
private static final Logger logger = Logger.getLogger(TikaTextExtractor.class.getName());
private static final int MAX_EXTR_TEXT_CHARS = 512 * 1024;
private final ExecutorService tikaParseExecutor = Executors.newSingleThreadExecutor();
private static final List<String> TIKA_SUPPORTED_TYPES
@ -65,7 +62,7 @@ class TikaTextExtractor extends FileTextExtractor<Metadata> {
@Override
void logWarning(final String msg, Exception ex) {
KeywordSearch.getTikaLogger().log(Level.WARNING, msg, ex);
logger.log(Level.WARNING, msg, ex);
super.logWarning(msg, ex);
}
@Override