Merge branch 'develop' of https://github.com/sleuthkit/autopsy into develop

This commit is contained in:
Nick Davis 2014-04-15 11:50:56 -04:00
commit b3c9b23feb
10 changed files with 104 additions and 44 deletions

View File

@ -48,6 +48,7 @@ import org.sleuthkit.autopsy.ingest.IngestJobContext;
import org.sleuthkit.autopsy.ingest.IngestMessage;
import org.sleuthkit.autopsy.ingest.IngestModuleAdapter;
import org.sleuthkit.autopsy.ingest.IngestServices;
import org.sleuthkit.autopsy.ingest.IngestModuleReferenceCounter;
import org.sleuthkit.datamodel.TskData;
/**
@ -60,6 +61,7 @@ class SampleDataSourceIngestModule extends IngestModuleAdapter implements DataSo
private static final HashMap<Long, Long> fileCountsForIngestJobs = new HashMap<>();
private final boolean skipKnownFiles;
private IngestJobContext context = null;
private static final IngestModuleReferenceCounter refCounter = new IngestModuleReferenceCounter();
SampleDataSourceIngestModule(SampleModuleIngestJobSettings settings) {
this.skipKnownFiles = settings.skipKnownFiles();
@ -129,7 +131,7 @@ class SampleDataSourceIngestModule extends IngestModuleAdapter implements DataSo
}
synchronized static void initFileCount(long ingestJobId) {
Long refCount = IngestModuleAdapter.moduleRefCountIncrementAndGet(ingestJobId);
Long refCount = refCounter.incrementAndGet(ingestJobId);
if (refCount == 1) {
fileCountsForIngestJobs.put(ingestJobId, 0L);
}
@ -142,7 +144,7 @@ class SampleDataSourceIngestModule extends IngestModuleAdapter implements DataSo
}
synchronized static void postFileCount(long ingestJobId) {
Long refCount = IngestModuleAdapter.moduleRefCountDecrementAndGet(ingestJobId);
Long refCount = refCounter.decrementAndGet(ingestJobId);
if (refCount == 0) {
Long filesCount = fileCountsForIngestJobs.remove(ingestJobId);
String msgText = String.format("Found %d files", filesCount);
@ -153,4 +155,5 @@ class SampleDataSourceIngestModule extends IngestModuleAdapter implements DataSo
IngestServices.getInstance().postMessage(message);
}
}
}

View File

@ -40,6 +40,7 @@ import org.sleuthkit.autopsy.ingest.IngestMessage;
import org.sleuthkit.autopsy.ingest.IngestModuleAdapter;
import org.sleuthkit.autopsy.ingest.IngestServices;
import org.sleuthkit.autopsy.ingest.ModuleDataEvent;
import org.sleuthkit.autopsy.ingest.IngestModuleReferenceCounter;
import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.BlackboardArtifact;
import org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE;
@ -59,6 +60,7 @@ class SampleFileIngestModule extends IngestModuleAdapter implements FileIngestMo
private static int attrId = -1;
private final boolean skipKnownFiles;
private IngestJobContext context = null;
private static final IngestModuleReferenceCounter refCounter = new IngestModuleReferenceCounter();
SampleFileIngestModule(SampleModuleIngestJobSettings settings) {
this.skipKnownFiles = settings.skipKnownFiles();
@ -167,7 +169,7 @@ class SampleFileIngestModule extends IngestModuleAdapter implements FileIngestMo
}
synchronized static void initBlackboardPostCount(long ingestJobId) {
Long refCount = IngestModuleAdapter.moduleRefCountIncrementAndGet(ingestJobId);
Long refCount = refCounter.incrementAndGet(ingestJobId);
if (refCount == 1) {
artifactCountsForIngestJobs.put(ingestJobId, 0L);
}
@ -180,7 +182,7 @@ class SampleFileIngestModule extends IngestModuleAdapter implements FileIngestMo
}
synchronized static void reportBlackboardPostCount(long ingestJobId) {
Long refCount = IngestModuleAdapter.moduleRefCountDecrementAndGet(ingestJobId);
Long refCount = refCounter.decrementAndGet(ingestJobId);
if (refCount == 0) {
Long filesCount = artifactCountsForIngestJobs.remove(ingestJobId);
String msgText = String.format("Posted %d times to the blackboard", filesCount);
@ -190,5 +192,5 @@ class SampleFileIngestModule extends IngestModuleAdapter implements FileIngestMo
msgText);
IngestServices.getInstance().postMessage(message);
}
}
}
}

View File

@ -18,33 +18,11 @@
*/
package org.sleuthkit.autopsy.ingest;
import java.util.HashMap;
/**
* An adapter that provides a default implementation of the IngestModule
* interface.
*/
public abstract class IngestModuleAdapter implements IngestModule {
// Maps a JobId to the count of instances
static HashMap<Long, Long> moduleRefCount = new HashMap<>();
public static synchronized long moduleRefCountIncrementAndGet(long jobId) {
long count = moduleRefCount.containsKey(jobId) ? moduleRefCount.get(jobId) : 0;
long nextCount = count + 1;
moduleRefCount.put(jobId, nextCount);
return nextCount;
}
public static synchronized long moduleRefCountDecrementAndGet(long jobId) {
if (moduleRefCount.containsKey(jobId)) {
long count = moduleRefCount.get(jobId);
moduleRefCount.put(jobId, --count);
return count;
} else {
return 0;
}
}
@Override
public void startUp(IngestJobContext context) throws IngestModuleException {
}

View File

@ -0,0 +1,59 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2011 - 2013 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.ingest;
import java.util.HashMap;
/**
* A utility class that modules can use to keep track of whether they are the
* first/last instance for a particular job.
*
* An instance of this should be static in your module class.
*/
public class IngestModuleReferenceCounter {
// Maps a JobId to the count of instances
private HashMap<Long, Long> moduleRefCount = new HashMap<>();
public synchronized long get(long jobId) {
return moduleRefCount.get(jobId);
}
public synchronized long incrementAndGet(long jobId) {
long count = moduleRefCount.containsKey(jobId) ? moduleRefCount.get(jobId) : 0;
long nextCount = count + 1;
moduleRefCount.put(jobId, nextCount);
return nextCount;
}
public synchronized long decrementAndGet(long jobId) {
if (moduleRefCount.containsKey(jobId)) {
long count = moduleRefCount.get(jobId);
if (--count == 0) {
moduleRefCount.remove(jobId);
} else {
moduleRefCount.put(jobId, count);
}
return count;
} else {
return -1;
}
}
}

View File

@ -41,6 +41,7 @@ import org.sleuthkit.autopsy.ingest.FileIngestModule;
import org.sleuthkit.autopsy.ingest.IngestJobContext;
import org.sleuthkit.autopsy.ingest.IngestServices;
import org.sleuthkit.autopsy.ingest.ModuleDataEvent;
import org.sleuthkit.autopsy.ingest.IngestModuleReferenceCounter;
import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.BlackboardArtifact;
import org.sleuthkit.datamodel.BlackboardAttribute;
@ -62,14 +63,15 @@ public final class ExifParserFileIngestModule extends IngestModuleAdapter implem
private AtomicInteger filesProcessed = new AtomicInteger(0);
private volatile boolean filesToFire = false;
private long jobId;
private static final IngestModuleReferenceCounter refCounter = new IngestModuleReferenceCounter();
ExifParserFileIngestModule() {
}
@Override
public void startUp(IngestJobContext context) throws IngestModuleException {
jobId = context.getJobId();
IngestModuleAdapter.moduleRefCountIncrementAndGet(jobId);
refCounter.incrementAndGet(jobId);
}
@ -198,7 +200,7 @@ public final class ExifParserFileIngestModule extends IngestModuleAdapter implem
@Override
public void shutDown(boolean ingestJobCancelled) {
// We only need to check for this final event on the last module per job
if (IngestModuleAdapter.moduleRefCountDecrementAndGet(jobId) == 0) {
if (refCounter.decrementAndGet(jobId) == 0) {
if (filesToFire) {
//send the final new data event
services.fireModuleDataEvent(new ModuleDataEvent(ExifParserModuleFactory.getModuleName(), BlackboardArtifact.ARTIFACT_TYPE.TSK_METADATA_EXIF));

View File

@ -33,6 +33,7 @@ import org.sleuthkit.autopsy.ingest.IngestJobContext;
import org.sleuthkit.autopsy.ingest.IngestMessage;
import org.sleuthkit.autopsy.ingest.IngestServices;
import org.sleuthkit.autopsy.ingest.ModuleDataEvent;
import org.sleuthkit.autopsy.ingest.IngestModuleReferenceCounter;
import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.BlackboardArtifact;
import org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE;
@ -53,6 +54,7 @@ public class FileExtMismatchIngestModule extends IngestModuleAdapter implements
private long jobId;
private static AtomicLong processTime = new AtomicLong(0);
private static AtomicLong numFiles = new AtomicLong(0);
private static final IngestModuleReferenceCounter refCounter = new IngestModuleReferenceCounter();
FileExtMismatchIngestModule(FileExtMismatchDetectorModuleSettings settings) {
this.settings = settings;
@ -61,7 +63,7 @@ public class FileExtMismatchIngestModule extends IngestModuleAdapter implements
@Override
public void startUp(IngestJobContext context) throws IngestModuleException {
jobId = context.getJobId();
IngestModuleAdapter.moduleRefCountIncrementAndGet(jobId);
refCounter.incrementAndGet(jobId);
FileExtMismatchXML xmlLoader = FileExtMismatchXML.getDefault();
SigTypeToExtMap = xmlLoader.load();
}
@ -153,7 +155,7 @@ public class FileExtMismatchIngestModule extends IngestModuleAdapter implements
@Override
public void shutDown(boolean ingestJobCancelled) {
// We only need to post the summary msg from the last module per job
if (IngestModuleAdapter.moduleRefCountDecrementAndGet(jobId) == 0) {
if (refCounter.decrementAndGet(jobId) == 0) {
StringBuilder detailsSb = new StringBuilder();
detailsSb.append("<table border='0' cellpadding='4' width='280'>");
detailsSb.append("<tr><td>").append(FileExtMismatchDetectorModuleFactory.getModuleName()).append("</td></tr>");

View File

@ -35,6 +35,7 @@ import org.sleuthkit.datamodel.TskData.FileKnown;
import org.sleuthkit.datamodel.TskException;
import org.sleuthkit.autopsy.ingest.IngestModule.ProcessResult;
import org.sleuthkit.autopsy.ingest.IngestModuleAdapter;
import org.sleuthkit.autopsy.ingest.IngestModuleReferenceCounter;
/**
* Detects the type of a file based on signature (magic) values. Posts results
@ -48,6 +49,7 @@ public class FileTypeIdIngestModule extends IngestModuleAdapter implements FileI
private long jobId;
private static AtomicLong matchTime = new AtomicLong(0);
private static AtomicLong numFiles = new AtomicLong(0);
private static final IngestModuleReferenceCounter refCounter = new IngestModuleReferenceCounter();
// The detector. Swap out with a different implementation of FileTypeDetectionInterface as needed.
// If desired in the future to be more knowledgable about weird files or rare formats, we could
@ -61,7 +63,7 @@ public class FileTypeIdIngestModule extends IngestModuleAdapter implements FileI
@Override
public void startUp(IngestJobContext context) throws IngestModuleException {
jobId = context.getJobId();
IngestModuleAdapter.moduleRefCountIncrementAndGet(jobId);
refCounter.incrementAndGet(jobId);
}
@Override
@ -108,7 +110,7 @@ public class FileTypeIdIngestModule extends IngestModuleAdapter implements FileI
@Override
public void shutDown(boolean ingestJobCancelled) {
// We only need to post the summary msg from the last module per job
if (IngestModuleAdapter.moduleRefCountDecrementAndGet(jobId) == 0) {
if (refCounter.decrementAndGet(jobId) == 0) {
StringBuilder detailsSb = new StringBuilder();
detailsSb.append("<table border='0' cellpadding='4' width='280'>");
detailsSb.append("<tr><td>").append(FileTypeIdModuleFactory.getModuleName()).append("</td></tr>");

View File

@ -60,6 +60,7 @@ import org.sleuthkit.autopsy.ingest.IngestModule.ProcessResult;
import org.sleuthkit.autopsy.ingest.IngestJobContext;
import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.ingest.ModuleDataEvent;
import org.sleuthkit.autopsy.ingest.IngestModuleReferenceCounter;
/**
* 7Zip ingest module extracts supported archives, adds extracted DerivedFiles,
@ -91,6 +92,7 @@ public final class SevenZipIngestModule extends IngestModuleAdapter implements F
private static final int ZIP_SIGNATURE_BE = 0x504B0304;
private IngestJobContext context;
private long jobId;
private final static IngestModuleReferenceCounter refCounter = new IngestModuleReferenceCounter();
SevenZipIngestModule() {
}
@ -124,7 +126,7 @@ public final class SevenZipIngestModule extends IngestModuleAdapter implements F
}
// if first instance of this module for this job then check 7zip init
if (IngestModuleAdapter.moduleRefCountIncrementAndGet(jobId) == 1) {
if (refCounter.incrementAndGet(jobId) == 1) {
try {
SevenZip.initSevenZipFromPlatformJAR();
String platform = SevenZip.getUsedPlatform();
@ -184,6 +186,12 @@ public final class SevenZipIngestModule extends IngestModuleAdapter implements F
return ProcessResult.OK;
}
@Override
public void shutDown(boolean ingestJobCancelled) {
// We don't need the value, but for cleanliness and consistency, -- it
refCounter.decrementAndGet(jobId);
}
private void sendNewFilesEvent(AbstractFile archive, List<AbstractFile> unpackedFiles) {
//currently sending a single event for all new files
services.fireModuleContentEvent(new ModuleContentEvent(archive));

View File

@ -43,6 +43,7 @@ import org.sleuthkit.datamodel.TskException;
import org.sleuthkit.autopsy.hashdatabase.HashDbManager.HashDb;
import org.sleuthkit.autopsy.ingest.IngestModuleAdapter;
import org.sleuthkit.autopsy.ingest.FileIngestModule;
import org.sleuthkit.autopsy.ingest.IngestModuleReferenceCounter;
import org.sleuthkit.datamodel.HashInfo;
public class HashDbIngestModule extends IngestModuleAdapter implements FileIngestModule {
@ -56,10 +57,11 @@ public class HashDbIngestModule extends IngestModuleAdapter implements FileInges
private List<HashDb> knownBadHashSets = new ArrayList<>();
private List<HashDb> knownHashSets = new ArrayList<>();
private long jobId;
static AtomicLong totalKnownBadCount = new AtomicLong(0);
static AtomicLong totalCalctime = new AtomicLong(0);
static AtomicLong totalLookuptime = new AtomicLong(0);
private static AtomicLong totalKnownBadCount = new AtomicLong(0);
private static AtomicLong totalCalctime = new AtomicLong(0);
private static AtomicLong totalLookuptime = new AtomicLong(0);
private static final IngestModuleReferenceCounter refCounter = new IngestModuleReferenceCounter();
HashDbIngestModule(HashLookupModuleSettings settings) {
this.settings = settings;
}
@ -70,7 +72,7 @@ public class HashDbIngestModule extends IngestModuleAdapter implements FileInges
getEnabledHashSets(hashDbManager.getKnownBadFileHashSets(), knownBadHashSets);
getEnabledHashSets(hashDbManager.getKnownFileHashSets(), knownHashSets);
if (IngestModuleAdapter.moduleRefCountIncrementAndGet(jobId) == 1) {
if (refCounter.incrementAndGet(jobId) == 1) {
// if first module for this job then post error msgs if needed
if (knownBadHashSets.isEmpty()) {
@ -303,7 +305,7 @@ public class HashDbIngestModule extends IngestModuleAdapter implements FileInges
@Override
public void shutDown(boolean ingestJobCancelled) {
if (IngestModuleAdapter.moduleRefCountDecrementAndGet(jobId) == 0) {
if (refCounter.decrementAndGet(jobId) == 0) {
if ((!knownBadHashSets.isEmpty()) || (!knownHashSets.isEmpty())) {
StringBuilder detailsSb = new StringBuilder();
//details

View File

@ -38,6 +38,7 @@ import org.sleuthkit.autopsy.ingest.IngestMessage;
import org.sleuthkit.autopsy.ingest.IngestMessage.MessageType;
import org.sleuthkit.autopsy.ingest.IngestModuleAdapter;
import org.sleuthkit.autopsy.ingest.IngestJobContext;
import org.sleuthkit.autopsy.ingest.IngestModuleReferenceCounter;
import org.sleuthkit.autopsy.keywordsearch.Ingester.IngesterException;
import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.ReadContentInputStream;
@ -91,7 +92,8 @@ public final class KeywordSearchIngestModule extends IngestModuleAdapter impleme
private long dataSourceId;
private static AtomicInteger instanceCount = new AtomicInteger(0); //just used for logging
private int instanceNum = 0;
private static final IngestModuleReferenceCounter refCounter = new IngestModuleReferenceCounter();
private enum IngestStatus {
TEXT_INGESTED, /// Text was extracted by knowing file type and text_ingested
@ -130,7 +132,7 @@ public final class KeywordSearchIngestModule extends IngestModuleAdapter impleme
// increment the module reference count
// if first instance of this module for this job then check the server and existence of keywords
if (IngestModuleAdapter.moduleRefCountIncrementAndGet(jobId) == 1) {
if (refCounter.incrementAndGet(jobId) == 1) {
final Server server = KeywordSearch.getServer();
try {
if (!server.isRunning()) {
@ -256,7 +258,7 @@ public final class KeywordSearchIngestModule extends IngestModuleAdapter impleme
SearchRunner.getInstance().endJob(jobId);
// We only need to post the summary msg from the last module per job
if (IngestModuleAdapter.moduleRefCountDecrementAndGet(jobId) == 0) {
if (refCounter.decrementAndGet(jobId) == 0) {
postIndexSummary();
}