mirror of
https://github.com/overcuriousity/autopsy-flatpak.git
synced 2025-07-17 18:17:43 +00:00
Merge branch 'develop' of https://github.com/sleuthkit/autopsy into develop
This commit is contained in:
commit
1535b86d15
10
.gitignore
vendored
10
.gitignore
vendored
@ -3,8 +3,7 @@
|
||||
/*/build/
|
||||
*/nbproject/private/*
|
||||
/nbproject/private/*
|
||||
/Core/release/modules/lib/
|
||||
/Core/release/modules/ext/
|
||||
/Core/release/
|
||||
/Core/src/org/sleuthkit/autopsy/coreutils/Version.properties
|
||||
/Core/build/
|
||||
/Core/dist/
|
||||
@ -68,13 +67,14 @@ genfiles.properties
|
||||
hs_err_pid*.log
|
||||
Core/src/org/sleuthkit/autopsy/casemodule/docs/QuickStart.html
|
||||
Core/src/org/sleuthkit/autopsy/casemodule/docs/screenshot.png
|
||||
Core/src/org/sleuthkit/autopsy/datamodel/ranges.csv
|
||||
/test/script/myconfig.xml
|
||||
/test/script/*/*.xml
|
||||
.DS_Store
|
||||
.*.swp
|
||||
Core/src/org/sleuthkit/autopsy/datamodel/ranges.csv
|
||||
|
||||
thunderbirdparser/release/modules/ext
|
||||
|
||||
ImageGallery/release/modules/ext/
|
||||
/ImageGallery/release/
|
||||
/thunderbirdparser/release/
|
||||
/Experimental/release/
|
||||
|
||||
|
30
BootstrapIvy.xml
Normal file
30
BootstrapIvy.xml
Normal file
@ -0,0 +1,30 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project name="BootstrapIvy" default="all" basedir="." xmlns:ivy="antlib:org.apache.ivy.ant">
|
||||
<property name="ivy.install.version" value="2.3.0-rc2" />
|
||||
<condition property="ivy.home" value="${env.IVY_HOME}">
|
||||
<isset property="env.IVY_HOME" />
|
||||
</condition>
|
||||
<property name="ivy.home" value="${user.home}/.ant" />
|
||||
<property name="ivy.jar.dir" value="${ivy.home}/lib" />
|
||||
<property name="ivy.jar.file" value="${ivy.jar.dir}/ivy.jar" />
|
||||
|
||||
<target name="download-ivy" unless="offline">
|
||||
<available file="${ivy.jar.file}" property="ivy.available"/>
|
||||
<antcall target="-download-ivy" />
|
||||
</target>
|
||||
|
||||
<target name="-download-ivy" unless="ivy.available">
|
||||
<mkdir dir="${ivy.jar.dir}"/>
|
||||
<get src="http://repo2.maven.org/maven2/org/apache/ivy/ivy/${ivy.install.version}/ivy-${ivy.install.version}.jar"
|
||||
dest="${ivy.jar.file}" usetimestamp="true"/>
|
||||
</target>
|
||||
|
||||
<!-- init-ivy will bootstrap Ivy if the user doesn't have it already -->
|
||||
<target name="init-ivy" depends="download-ivy" unless="ivy.lib.path">
|
||||
<path id="ivy.lib.path">
|
||||
<fileset dir="${ivy.jar.dir}" includes="*.jar"/>
|
||||
</path>
|
||||
<taskdef resource="org/apache/ivy/ant/antlib.xml"
|
||||
uri="antlib:org.apache.ivy.ant" classpathref="ivy.lib.path"/>
|
||||
</target>
|
||||
</project>
|
@ -2,10 +2,43 @@
|
||||
<!-- You may freely edit this file. See harness/README in the NetBeans platform -->
|
||||
<!-- for some information on what you could do (e.g. targets to override). -->
|
||||
<!-- If you delete this file and reopen the project it will be recreated. -->
|
||||
<project name="org.sleuthkit.autopsy.core" default="netbeans" basedir=".">
|
||||
<project name="org.sleuthkit.autopsy.core" default="netbeans" basedir="." xmlns:ivy="antlib:org.apache.ivy.ant" >
|
||||
<description>Builds, tests, and runs the project org.sleuthkit.autopsy.core</description>
|
||||
<import file="nbproject/build-impl.xml"/>
|
||||
|
||||
<import file="../BootstrapIvy.xml"/>
|
||||
|
||||
|
||||
<property name="thirdparty.dir" value="${basedir}/../thirdparty" />
|
||||
<property name="release.dir" value="${basedir}/release" />
|
||||
<property name="modules.dir" value="${release.dir}/modules/" />
|
||||
<property name="ext.dir" value="${modules.dir}/ext" />
|
||||
|
||||
<target name="get-InternalPythonModules" description="get internal python modules">
|
||||
<copy todir="${release.dir}/InternalPythonModules" >
|
||||
<fileset dir="{basedir}/../../InternalPythonModules"/>
|
||||
</copy>
|
||||
</target>
|
||||
|
||||
<target name="get-thirdparty-dependencies" description="get third-party dependencies">
|
||||
<!--Copy openCV dependencies to release-->
|
||||
<copy todir="${modules.dir}" >
|
||||
<fileset dir="${thirdparty.dir}/opencv" />
|
||||
</copy>
|
||||
|
||||
<!--Copy photorec to release-->
|
||||
<copy todir="${release.dir}/photorec_exec" >
|
||||
<fileset dir="${thirdparty.dir}/photorec_exec"/>
|
||||
</copy>
|
||||
|
||||
<!--Copy other jars-->
|
||||
<copy file="${thirdparty.dir}/rejistry/Rejistry-1.0-SNAPSHOT.jar" todir="${ext.dir}" />
|
||||
<copy file="${thirdparty.dir}/sevenzip/sevenzipjbinding.jar" todir="${ext.dir}" />
|
||||
<copy file="${thirdparty.dir}/sevenzip/sevenzipjbinding-AllPlatforms.jar" todir="${ext.dir}" />
|
||||
<copy file="${thirdparty.dir}/stix/StixLib.jar" todir="${ext.dir}" />
|
||||
<copy file="${thirdparty.dir}/jdom/jdom-2.0.5.jar" todir="${ext.dir}" />
|
||||
<copy file="${thirdparty.dir}/jdom/jdom-2.0.5-contrib.jar" todir="${ext.dir}" />
|
||||
</target>
|
||||
|
||||
<!-- Verify that the TSK_HOME env variable is set -->
|
||||
<target name="findTSK">
|
||||
<property environment="env"/>
|
||||
@ -18,25 +51,51 @@
|
||||
|
||||
<target name="getTSKJars" depends="findTSK">
|
||||
<property environment="env"/>
|
||||
<copy file="${env.TSK_HOME}/bindings/java/dist/Tsk_DataModel_PostgreSQL.jar" tofile="${basedir}/release/modules/ext/Tsk_DataModel_PostgreSQL.jar"/>
|
||||
<copy file="${env.TSK_HOME}/bindings/java/lib/sqlite-jdbc-3.8.11.jar" tofile="${basedir}/release/modules/ext/sqlite-jdbc-3.8.11.jar"/>
|
||||
<copy file="${env.TSK_HOME}/bindings/java/lib/postgresql-9.4.1211.jre7.jar" tofile="${basedir}/release/modules/ext/postgresql-9.4.1211.jre7.jar"/>
|
||||
<copy file="${env.TSK_HOME}/bindings/java/lib/mchange-commons-java-0.2.9.jar" tofile="${basedir}/release/modules/ext/mchange-commons-java-0.2.9.jar"/>
|
||||
<copy file="${env.TSK_HOME}/bindings/java/lib/c3p0-0.9.5.jar" tofile="${basedir}/release/modules/ext/c3p0-0.9.5.jar"/>
|
||||
<copy file="${env.TSK_HOME}/bindings/java/lib/sqlite-jdbc-3.8.11.jar" tofile="${basedir}/release/modules/ext/sqlite-jdbc-3.8.11.jar"/>
|
||||
<copy file="${env.TSK_HOME}/bindings/java/dist/Tsk_DataModel_PostgreSQL.jar"
|
||||
tofile="${basedir}/release/modules/ext/Tsk_DataModel_PostgreSQL.jar"/>
|
||||
<copy file="${env.TSK_HOME}/bindings/java/lib/sqlite-jdbc-3.8.11.jar"
|
||||
tofile="${basedir}/release/modules/ext/sqlite-jdbc-3.8.11.jar"/>
|
||||
<copy file="${env.TSK_HOME}/bindings/java/lib/postgresql-9.4.1211.jre7.jar"
|
||||
tofile="${basedir}/release/modules/ext/postgresql-9.4.1211.jre7.jar"/>
|
||||
<copy file="${env.TSK_HOME}/bindings/java/lib/mchange-commons-java-0.2.9.jar"
|
||||
tofile="${basedir}/release/modules/ext/mchange-commons-java-0.2.9.jar"/>
|
||||
<copy file="${env.TSK_HOME}/bindings/java/lib/c3p0-0.9.5.jar"
|
||||
tofile="${basedir}/release/modules/ext/c3p0-0.9.5.jar"/>
|
||||
</target>
|
||||
|
||||
<target name="download-binlist">
|
||||
|
||||
|
||||
<target name="download-binlist" description="Download the din list data file used to enrich credit card numbers">
|
||||
<get src="https://raw.githubusercontent.com/binlist/data/master/ranges.csv"
|
||||
dest="src\org\sleuthkit\autopsy\datamodel"
|
||||
ignoreerrors="true"
|
||||
verbose="true"/>
|
||||
</target>
|
||||
|
||||
<target name="init" depends="basic-init,files-init,build-init,-javac-init">
|
||||
<antcall target="download-binlist" />
|
||||
|
||||
<target name="init" depends="basic-init,files-init,build-init,-javac-init,init-ivy">
|
||||
<mkdir dir="${ext.dir}"/>
|
||||
<copy file="${thirdparty.dir}/LICENSE-2.0.txt" todir="${ext.dir}" />
|
||||
|
||||
<!-- fetch all the dependencies from Ivy and stick them in the right places -->
|
||||
<ivy:resolve/>
|
||||
<ivy:retrieve conf="core" sync="true" pattern="release/modules/ext/[artifact]-[revision](-[classifier]).[ext]" />
|
||||
|
||||
<!-- get additional deps -->
|
||||
<antcall target="getTSKJars" />
|
||||
<antcall target="get-thirdparty-dependencies" />
|
||||
<antcall target="get-InternalPythonModules"/>
|
||||
|
||||
<!--Donwload the bin list used for credit card number enrichment-->
|
||||
<antcall target="download-binlist" />
|
||||
</target>
|
||||
|
||||
<target name="clean" depends="projectized-common.clean">
|
||||
<!--Override clean to delete jars, etc downloaded with Ivy
|
||||
or copied in from thirdparty folder. This way we don't end up with
|
||||
out-of-date/unneeded stuff in the installer-->
|
||||
<mkdir dir="${release.dir}"/>
|
||||
<delete includeemptydirs="true" >
|
||||
<fileset dir="${release.dir}" includes="**/*"/>
|
||||
</delete>
|
||||
</target>
|
||||
</project>
|
||||
|
25
Core/ivy.xml
Normal file
25
Core/ivy.xml
Normal file
@ -0,0 +1,25 @@
|
||||
<ivy-module version="2.0">
|
||||
<info organisation="org.sleuthkit.autopsy" module="emailparser"/>
|
||||
<configurations >
|
||||
<!-- module dependencies -->
|
||||
<conf name="core"/>
|
||||
|
||||
</configurations>
|
||||
<dependencies >
|
||||
<dependency conf="core->default" org="org.apache.activemq" name="activemq-all" rev="5.11.1"/>
|
||||
|
||||
|
||||
<dependency conf="core->default" org="org.apache.curator" name="curator-client" rev="2.8.0"/>
|
||||
<dependency conf="core->default" org="org.apache.curator" name="curator-framework" rev="2.8.0"/>
|
||||
<dependency conf="core->default" org="org.apache.curator" name="curator-recipes" rev="2.8.0"/>
|
||||
|
||||
<dependency conf="core->default" org="org.python" name="jython-standalone" rev="2.7.0" />
|
||||
|
||||
<dependency conf="core->default" org="com.drewnoakes" name="metadata-extractor" rev="2.8.1"/>
|
||||
|
||||
<dependency conf="core->default" org="org.apache.tika" name="tika-core" rev="1.5"/>
|
||||
<dependency conf="core->default" org="com.adobe.xmp" name="xmpcore" rev="5.1.2"/>
|
||||
<dependency conf="core->default" org="org.apache.zookeeper" name="zookeeper" rev="3.4.6"/>
|
||||
|
||||
</dependencies>
|
||||
</ivy-module>
|
9
Core/ivysettings.xml
Normal file
9
Core/ivysettings.xml
Normal file
@ -0,0 +1,9 @@
|
||||
<ivysettings>
|
||||
<settings defaultResolver="main"/>
|
||||
<resolvers>
|
||||
<chain name="main">
|
||||
<ibiblio name="central" m2compatible="true"/>
|
||||
<ibiblio name="maven.restlet.org" root="http://maven.restlet.com" m2compatible="true" />
|
||||
</chain>
|
||||
</resolvers>
|
||||
</ivysettings>
|
Binary file not shown.
@ -1,18 +0,0 @@
|
||||
Copyright 2002-2012 Drew Noakes
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
|
||||
More information about this project is available at:
|
||||
|
||||
http://drewnoakes.com/code/exif/
|
||||
http://code.google.com/p/metadata-extractor/
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@ -501,8 +501,6 @@ final class LocalDiskPanel extends JPanel {
|
||||
*/
|
||||
private class LocalDiskModel implements TableModel {
|
||||
|
||||
private List<LocalDisk> physicalDrives = new ArrayList<>();
|
||||
private List<LocalDisk> partitions = new ArrayList<>();
|
||||
private LocalDiskThread worker = null;
|
||||
private boolean ready = false;
|
||||
private volatile boolean loadingDisks = false;
|
||||
@ -609,6 +607,8 @@ final class LocalDiskPanel extends JPanel {
|
||||
class LocalDiskThread extends SwingWorker<Object, Void> {
|
||||
|
||||
private final Logger logger = Logger.getLogger(LocalDiskThread.class.getName());
|
||||
private List<LocalDisk> physicalDrives = new ArrayList<>();
|
||||
private List<LocalDisk> partitions = new ArrayList<>();
|
||||
|
||||
@Override
|
||||
protected Object doInBackground() throws Exception {
|
||||
@ -674,10 +674,6 @@ final class LocalDiskPanel extends JPanel {
|
||||
pathTextField.setText("");
|
||||
fireUpdateEvent();
|
||||
ready = true;
|
||||
} else {
|
||||
//this should not be possible but happens
|
||||
//possible threading error when this method is called while running
|
||||
logger.log(Level.INFO, "Loading local disks was canceled"); //NON-NLS
|
||||
}
|
||||
}
|
||||
diskTable.revalidate();
|
||||
|
@ -89,16 +89,18 @@ public class ArtifactStringContent implements StringContent {
|
||||
* table.
|
||||
*/
|
||||
buffer.append("<table border='1'>"); //NON-NLS
|
||||
|
||||
// header row
|
||||
buffer.append("<tr>"); //NON-NLS
|
||||
buffer.append("<td><b>"); //NON-NLS
|
||||
buffer.append("<th><b>"); //NON-NLS
|
||||
buffer.append(Bundle.ArtifactStringContent_attrsTableHeader_type());
|
||||
buffer.append("</b></td>"); //NON-NLS
|
||||
buffer.append("<td><b>"); //NON-NLS
|
||||
buffer.append("</b></th>"); //NON-NLS
|
||||
buffer.append("<th><b>"); //NON-NLS
|
||||
buffer.append(Bundle.ArtifactStringContent_attrsTableHeader_value());
|
||||
buffer.append("</b></td>"); //NON-NLS
|
||||
buffer.append("<td><b>"); //NON-NLS
|
||||
buffer.append("</b></th>"); //NON-NLS
|
||||
buffer.append("<th><b>"); //NON-NLS
|
||||
buffer.append(Bundle.ArtifactStringContent_attrsTableHeader_sources());
|
||||
buffer.append("</b></td>"); //NON-NLS
|
||||
buffer.append("</b></th>"); //NON-NLS
|
||||
buffer.append("</tr>\n"); //NON-NLS
|
||||
try {
|
||||
Content content = artifact.getSleuthkitCase().getContentById(artifact.getObjectID());
|
||||
@ -108,64 +110,42 @@ public class ArtifactStringContent implements StringContent {
|
||||
*/
|
||||
for (BlackboardAttribute attr : artifact.getAttributes()) {
|
||||
|
||||
/*
|
||||
* Attribute display name column.
|
||||
*/
|
||||
buffer.append("<tr><td>"); //NON-NLS
|
||||
buffer.append(attr.getAttributeType().getDisplayName());
|
||||
buffer.append("</td>"); //NON-NLS
|
||||
|
||||
/*
|
||||
* Attribute value column.
|
||||
*/
|
||||
buffer.append("<td>"); //NON-NLS
|
||||
String value = "";
|
||||
switch (attr.getAttributeType().getValueType()) {
|
||||
case STRING:
|
||||
String str = attr.getValueString();
|
||||
str = str.replaceAll(" ", " "); //NON-NLS
|
||||
str = str.replaceAll("<", "<"); //NON-NLS
|
||||
str = str.replaceAll(">", ">"); //NON-NLS
|
||||
str = str.replaceAll("(\r\n|\n)", "<br />"); //NON-NLS
|
||||
buffer.append(str);
|
||||
break;
|
||||
case INTEGER:
|
||||
case LONG:
|
||||
case DOUBLE:
|
||||
buffer.append(attr.getDisplayString());
|
||||
break;
|
||||
case BYTE:
|
||||
buffer.append(Arrays.toString(attr.getValueBytes()));
|
||||
default:
|
||||
value = attr.getDisplayString();
|
||||
break;
|
||||
|
||||
// Use Autopsy date formatting settings, not TSK defaults
|
||||
case DATETIME:
|
||||
long epoch = attr.getValueLong();
|
||||
String time = "0000-00-00 00:00:00";
|
||||
value = "0000-00-00 00:00:00";
|
||||
if (null != content && 0 != epoch) {
|
||||
dateFormatter.setTimeZone(ContentUtils.getTimeZone(content));
|
||||
time = dateFormatter.format(new java.util.Date(epoch * 1000));
|
||||
value = dateFormatter.format(new java.util.Date(epoch * 1000));
|
||||
}
|
||||
buffer.append(time);
|
||||
break;
|
||||
}
|
||||
buffer.append("</td>"); //NON-NLS
|
||||
|
||||
/*
|
||||
* Attribute sources column.
|
||||
*/
|
||||
buffer.append("<td>"); //NON-NLS
|
||||
buffer.append(StringUtils.join(attr.getSources(), ", "));
|
||||
buffer.append("</td>"); //NON-NLS
|
||||
|
||||
buffer.append("</tr>\n"); //NON-NLS
|
||||
String sources = StringUtils.join(attr.getSources(), ", ");
|
||||
buffer.append(makeTableRow(attr.getAttributeType().getDisplayName(), value, sources));
|
||||
}
|
||||
|
||||
/*
|
||||
* Add a row for the source content path.
|
||||
*/
|
||||
buffer.append("<tr>"); //NON-NLS
|
||||
buffer.append("<td>"); //NON-NLS
|
||||
buffer.append(NbBundle.getMessage(this.getClass(), "ArtifactStringContent.getStr.srcFilePath.text"));
|
||||
buffer.append("</td>"); //NON-NLS
|
||||
buffer.append("<td>"); //NON-NLS
|
||||
|
||||
String path = "";
|
||||
try {
|
||||
if (null != content) {
|
||||
@ -175,33 +155,24 @@ public class ArtifactStringContent implements StringContent {
|
||||
logger.log(Level.SEVERE, String.format("Error getting source content path for artifact (artifact_id=%d, obj_id=%d)", artifact.getArtifactID(), artifact.getObjectID()), ex);
|
||||
path = Bundle.ArtifactStringContent_failedToGetSourcePath_message();
|
||||
}
|
||||
buffer.append(path);
|
||||
buffer.append("</td>"); //NON-NLS
|
||||
buffer.append("</tr>\n"); //NON-NLS
|
||||
|
||||
buffer.append(makeTableRow(NbBundle.getMessage(this.getClass(), "ArtifactStringContent.getStr.srcFilePath.text"),
|
||||
path, ""));
|
||||
|
||||
|
||||
/*
|
||||
* Add a row for the artifact id.
|
||||
*/
|
||||
buffer.append("<tr><td>"); //NON-NLS
|
||||
buffer.append(NbBundle.getMessage(this.getClass(), "ArtifactStringContent.getStr.artifactId.text"));
|
||||
buffer.append("</td><td>"); //NON-NLS
|
||||
buffer.append(artifact.getArtifactID());
|
||||
buffer.append("</td>"); //NON-NLS
|
||||
buffer.append("</tr>\n"); //NON-NLS
|
||||
|
||||
/*
|
||||
* Finish the document
|
||||
*/
|
||||
buffer.append("</table>"); //NON-NLS
|
||||
buffer.append("</html>\n"); //NON-NLS
|
||||
stringContent = buffer.toString();
|
||||
*/
|
||||
buffer.append(makeTableRow(NbBundle.getMessage(this.getClass(), "ArtifactStringContent.getStr.artifactId.text"),
|
||||
Long.toString(artifact.getArtifactID()), ""));
|
||||
|
||||
} catch (TskCoreException ex) {
|
||||
logger.log(Level.SEVERE, String.format("Error getting data for artifact (artifact_id=%d)", artifact.getArtifactID()), ex);
|
||||
buffer.append("<tr><td>"); //NON-NLS
|
||||
buffer.append(Bundle.ArtifactStringContent_failedToGetAttributes_message());
|
||||
buffer.append("</td>"); //NON-NLS
|
||||
buffer.append("</tr>\n"); //NON-NLS
|
||||
buffer.append(makeTableRow(Bundle.ArtifactStringContent_failedToGetAttributes_message(), "", ""));
|
||||
} finally {
|
||||
/*
|
||||
* Finish the document
|
||||
*/
|
||||
buffer.append("</table>"); //NON-NLS
|
||||
buffer.append("</html>\n"); //NON-NLS
|
||||
stringContent = buffer.toString();
|
||||
@ -210,5 +181,26 @@ public class ArtifactStringContent implements StringContent {
|
||||
|
||||
return stringContent;
|
||||
}
|
||||
|
||||
// escape special HTML characters
|
||||
private String escapeHtmlString(String str) {
|
||||
str = str.replaceAll(" ", " "); //NON-NLS
|
||||
str = str.replaceAll("<", "<"); //NON-NLS
|
||||
str = str.replaceAll(">", ">"); //NON-NLS
|
||||
str = str.replaceAll("(\r\n|\n)", "<br />"); //NON-NLS
|
||||
return str;
|
||||
}
|
||||
|
||||
/**
|
||||
* Make a row in the result table
|
||||
* @param type String for column1 (Type of attribute))
|
||||
* @param value String for column2 (value of attribute)
|
||||
* @param source Column 3 (attribute source)
|
||||
* @return HTML formatted string of these values
|
||||
*/
|
||||
private String makeTableRow(String type, String value, String source) {
|
||||
String row = "<tr><td>" + escapeHtmlString(type) + "</td><td>" + escapeHtmlString(value) + "</td><td>" + escapeHtmlString(source) + "</td></tr>";
|
||||
return row;
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -49,22 +49,22 @@
|
||||
<dependency conf="autopsy_core->*" org="com.github.spullara.mustache.java" name="compiler" rev="0.9.1" />
|
||||
|
||||
<!-- image support for autopsy and image gallery -->
|
||||
<dependency conf="autopsy_core->*" org="com.twelvemonkeys.common" name="common-lang" rev="3.2" />
|
||||
<dependency conf="autopsy_core->*" org="com.twelvemonkeys.common" name="common-io" rev="3.2" />
|
||||
<dependency conf="autopsy_core->*" org="com.twelvemonkeys.common" name="common-image" rev="3.2" />
|
||||
<dependency conf="autopsy_core->*" org="com.twelvemonkeys.imageio" name="imageio-jpeg" rev="3.2" />
|
||||
<dependency conf="autopsy_core->*" org="com.twelvemonkeys.imageio" name="imageio-bmp" rev="3.2" />
|
||||
<dependency conf="autopsy_core->*" org="com.twelvemonkeys.imageio" name="imageio-tiff" rev="3.2" />
|
||||
<dependency conf="autopsy_core->*" org="com.twelvemonkeys.imageio" name="imageio-pnm" rev="3.2" />
|
||||
<dependency conf="autopsy_core->*" org="com.twelvemonkeys.imageio" name="imageio-psd" rev="3.2" />
|
||||
<dependency conf="autopsy_core->*" org="com.twelvemonkeys.imageio" name="imageio-iff" rev="3.2" />
|
||||
<dependency conf="autopsy_core->*" org="com.twelvemonkeys.imageio" name="imageio-pcx" rev="3.2" />
|
||||
<dependency conf="autopsy_core->*" org="com.twelvemonkeys.imageio" name="imageio-pict" rev="3.2" />
|
||||
<dependency conf="autopsy_core->*" org="com.twelvemonkeys.imageio" name="imageio-sgi" rev="3.2" />
|
||||
<dependency conf="autopsy_core->*" org="com.twelvemonkeys.imageio" name="imageio-tga" rev="3.2" />
|
||||
<dependency conf="autopsy_core->*" org="com.twelvemonkeys.imageio" name="imageio-icns" rev="3.2" />
|
||||
<dependency conf="autopsy_core->*" org="com.twelvemonkeys.imageio" name="imageio-thumbsdb" rev="3.2" />
|
||||
<dependency conf="autopsy_core->*" org="com.twelvemonkeys.imageio" name="imageio-core" rev="3.2" />
|
||||
<dependency conf="autopsy_core->*" org="com.twelvemonkeys.imageio" name="imageio-metadata" rev="3.2" />
|
||||
<dependency conf="autopsy_core->default" org="com.twelvemonkeys.common" name="common-lang" rev="3.2" />
|
||||
<dependency conf="autopsy_core->default" org="com.twelvemonkeys.common" name="common-io" rev="3.2" />
|
||||
<dependency conf="autopsy_core->default" org="com.twelvemonkeys.common" name="common-image" rev="3.2" />
|
||||
<dependency conf="autopsy_core->default" org="com.twelvemonkeys.imageio" name="imageio-jpeg" rev="3.2" />
|
||||
<dependency conf="autopsy_core->default" org="com.twelvemonkeys.imageio" name="imageio-bmp" rev="3.2" />
|
||||
<dependency conf="autopsy_core->default" org="com.twelvemonkeys.imageio" name="imageio-tiff" rev="3.2" />
|
||||
<dependency conf="autopsy_core->default" org="com.twelvemonkeys.imageio" name="imageio-pnm" rev="3.2" />
|
||||
<dependency conf="autopsy_core->default" org="com.twelvemonkeys.imageio" name="imageio-psd" rev="3.2" />
|
||||
<dependency conf="autopsy_core->default" org="com.twelvemonkeys.imageio" name="imageio-iff" rev="3.2" />
|
||||
<dependency conf="autopsy_core->default" org="com.twelvemonkeys.imageio" name="imageio-pcx" rev="3.2" />
|
||||
<dependency conf="autopsy_core->default" org="com.twelvemonkeys.imageio" name="imageio-pict" rev="3.2" />
|
||||
<dependency conf="autopsy_core->default" org="com.twelvemonkeys.imageio" name="imageio-sgi" rev="3.2" />
|
||||
<dependency conf="autopsy_core->default" org="com.twelvemonkeys.imageio" name="imageio-tga" rev="3.2" />
|
||||
<dependency conf="autopsy_core->default" org="com.twelvemonkeys.imageio" name="imageio-icns" rev="3.2" />
|
||||
<dependency conf="autopsy_core->default" org="com.twelvemonkeys.imageio" name="imageio-thumbsdb" rev="3.2" />
|
||||
<dependency conf="autopsy_core->default" org="com.twelvemonkeys.imageio" name="imageio-core" rev="3.2" />
|
||||
<dependency conf="autopsy_core->default" org="com.twelvemonkeys.imageio" name="imageio-metadata" rev="3.2" />
|
||||
</dependencies>
|
||||
</ivy-module>
|
||||
|
@ -2,7 +2,24 @@
|
||||
<!-- You may freely edit this file. See harness/README in the NetBeans platform -->
|
||||
<!-- for some information on what you could do (e.g. targets to override). -->
|
||||
<!-- If you delete this file and reopen the project it will be recreated. -->
|
||||
<project name="org.sleuthkit.autopsy.experimental" default="netbeans" basedir=".">
|
||||
<project name="org.sleuthkit.autopsy.experimental" default="netbeans" basedir="." xmlns:ivy="antlib:org.apache.ivy.ant">
|
||||
<description>Builds, tests, and runs the project org.sleuthkit.autopsy.experimental.</description>
|
||||
<import file="nbproject/build-impl.xml"/>
|
||||
<import file="../BootstrapIvy.xml"/>
|
||||
<property name="release.dir" value="${basedir}/release" />
|
||||
<target name="init" depends="basic-init,files-init,build-init,-javac-init,init-ivy">
|
||||
<ivy:settings file="ivysettings.xml" />
|
||||
<ivy:resolve/>
|
||||
<ivy:retrieve sync="false" pattern="${release.dir}/modules/ext/[artifact]-[revision](-[classifier]).[ext]" />
|
||||
</target>
|
||||
|
||||
<target name="clean" depends="projectized-common.clean">
|
||||
<!--Override clean to delete jars, etc downloaded with Ivy
|
||||
or copied in from thirdparty folder. This way we don't end up with
|
||||
out-of-date/unneeded stuff in the installer-->
|
||||
<mkdir dir="${release.dir}"/>
|
||||
<delete includeemptydirs="true">
|
||||
<fileset dir="${release.dir}" includes="**/*"/>
|
||||
</delete>
|
||||
</target>
|
||||
</project>
|
||||
|
22
Experimental/ivy.xml
Normal file
22
Experimental/ivy.xml
Normal file
@ -0,0 +1,22 @@
|
||||
<ivy-module version="2.0">
|
||||
<info organisation="org.sleuthkit.autopsy" module="corelibs"/>
|
||||
<configurations >
|
||||
<!-- module dependencies -->
|
||||
<conf name="experimental"/>
|
||||
|
||||
</configurations>
|
||||
<dependencies>
|
||||
<dependency conf="experimental->default" org="com.github.lgooddatepicker" name="LGoodDatePicker" rev="4.3.1"/>
|
||||
<dependency conf="experimental->default" org="org.apache.solr" name="solr-solrj" rev="4.9.1"/>
|
||||
<dependency conf="experimental->default" org="org.apache.tika" name="tika-core" rev="1.5"/>
|
||||
<dependency conf="experimental->default" org="org.postgresql" name="postgresql" rev="9.4-1201-jdbc41"/>
|
||||
<dependency conf="experimental->default" org="com.mchange" name="c3p0" rev="0.9.5"/>
|
||||
<dependency conf="experimental->default" org="com.fasterxml.jackson.core" name="jackson-core" rev="2.7.0"/>
|
||||
<dependency conf="experimental->default" org="org.swinglabs.swingx" name="swingx-all" rev="1.6.4"/>
|
||||
<exclude artifact="commons-io"/>
|
||||
<exclude artifact="slf4j-api"/>
|
||||
<exclude artifact="wstx-asl"/>
|
||||
<exclude artifact="zookeeper"/>
|
||||
|
||||
</dependencies>
|
||||
</ivy-module>
|
10
Experimental/ivysettings.xml
Normal file
10
Experimental/ivysettings.xml
Normal file
@ -0,0 +1,10 @@
|
||||
<ivysettings>
|
||||
<settings defaultResolver="main"/>
|
||||
<resolvers>
|
||||
<chain name="main">
|
||||
<ibiblio name="central" m2compatible="true"/>
|
||||
<ibiblio name="ibiblio" m2compatible="true"/>
|
||||
<ibiblio name="xerial" m2compatible="true" root="http://www.xerial.org/maven/repository/snapshot" />
|
||||
</chain>
|
||||
</resolvers>
|
||||
</ivysettings>
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
0
Core/release/InternalPythonModules/README.txt → InternalPythonModules/README.txt
Executable file → Normal file
0
Core/release/InternalPythonModules/README.txt → InternalPythonModules/README.txt
Executable file → Normal file
@ -5,34 +5,7 @@
|
||||
<project name="org.sleuthkit.autopsy.keywordsearch" default="netbeans" basedir="." xmlns:ivy="antlib:org.apache.ivy.ant">
|
||||
<description>Builds, tests, and runs the project org.sleuthkit.autopsy.keywordsearch.</description>
|
||||
<import file="nbproject/build-impl.xml"/>
|
||||
|
||||
<property name="ivy.install.version" value="2.3.0-rc2" />
|
||||
<condition property="ivy.home" value="${env.IVY_HOME}">
|
||||
<isset property="env.IVY_HOME" />
|
||||
</condition>
|
||||
<property name="ivy.home" value="${user.home}/.ant" />
|
||||
<property name="ivy.jar.dir" value="${ivy.home}/lib" />
|
||||
<property name="ivy.jar.file" value="${ivy.jar.dir}/ivy.jar" />
|
||||
|
||||
<target name="download-ivy" unless="offline">
|
||||
<available file="${ivy.jar.file}" property="ivy.available"/>
|
||||
<antcall target="-download-ivy" />
|
||||
</target>
|
||||
|
||||
<target name="-download-ivy" unless="ivy.available">
|
||||
<mkdir dir="${ivy.jar.dir}"/>
|
||||
<get src="http://repo2.maven.org/maven2/org/apache/ivy/ivy/${ivy.install.version}/ivy-${ivy.install.version}.jar"
|
||||
dest="${ivy.jar.file}" usetimestamp="true"/>
|
||||
</target>
|
||||
|
||||
<!-- init-ivy will bootstrap Ivy if the user doesn't have it already -->
|
||||
<target name="init-ivy" depends="download-ivy" unless="ivy.lib.path">
|
||||
<path id="ivy.lib.path">
|
||||
<fileset dir="${ivy.jar.dir}" includes="*.jar"/>
|
||||
</path>
|
||||
<taskdef resource="org/apache/ivy/ant/antlib.xml"
|
||||
uri="antlib:org.apache.ivy.ant" classpathref="ivy.lib.path"/>
|
||||
</target>
|
||||
<import file="../BootstrapIvy.xml"/>
|
||||
|
||||
<property name="release.dir" value="release" />
|
||||
|
||||
|
@ -125,6 +125,7 @@ class GlobalEditListPanel extends javax.swing.JPanel implements ListSelectionLis
|
||||
}
|
||||
}
|
||||
|
||||
@NbBundle.Messages("GlobalEditListPanel.editKeyword.title=Edit Keyword")
|
||||
/**
|
||||
* Adds keywords to a keyword list, returns true if at least one keyword was successfully added and no
|
||||
* duplicates were found.
|
||||
@ -139,6 +140,9 @@ class GlobalEditListPanel extends javax.swing.JPanel implements ListSelectionLis
|
||||
int dupeCount = 0;
|
||||
int badCount = 1; // Default to 1 so we enter the loop the first time
|
||||
|
||||
if (!existingKeywords.isEmpty()){ //if there is an existing keyword then this action was called by the edit button
|
||||
dialog.setTitle(NbBundle.getMessage(GlobalEditListPanel.class, "GlobalEditListPanel.editKeyword.title"));
|
||||
}
|
||||
while (badCount > 0) {
|
||||
dialog.setInitialKeywordList(keywordsToRedisplay, isLiteral, isWholeWord);
|
||||
dialog.display();
|
||||
|
19
build.xml
19
build.xml
@ -221,12 +221,19 @@
|
||||
|
||||
|
||||
<target name="doxygen" description="build doxygen docs, requires doxygen in PATH" depends="-init,-hide-excluded-modules">
|
||||
<exec executable="doxygen" dir="${basedir}/docs/doxygen">
|
||||
<arg value="Doxyfile"/>
|
||||
</exec>
|
||||
<exec executable="doxygen" dir="${basedir}/docs/doxygen-user">
|
||||
<arg value="Doxyfile"/>
|
||||
</exec>
|
||||
<property environment="env" />
|
||||
<if>
|
||||
<available file="doxygen"
|
||||
filepath="${env.PATH}"/>
|
||||
<then>
|
||||
<exec executable="doxygen" dir="${basedir}/docs/doxygen">
|
||||
<arg value="Doxyfile"/>
|
||||
</exec>
|
||||
<exec executable="doxygen" dir="${basedir}/docs/doxygen-user">
|
||||
<arg value="Doxyfile"/>
|
||||
</exec>
|
||||
</then>
|
||||
</if>
|
||||
</target>
|
||||
|
||||
<target name="check-release">
|
||||
|
100
docs/doxygen-user/adHocKeywordSearch.dox
Normal file
100
docs/doxygen-user/adHocKeywordSearch.dox
Normal file
@ -0,0 +1,100 @@
|
||||
/*! \page ad_hoc_keyword_search_page Ad Hoc Keyword Search
|
||||
|
||||
|
||||
\section ad_hoc_kw_overview Overview
|
||||
|
||||
The ad hoc keyword search features allows you to run single keyword terms or lists of keywords against all images in a case. Both options are located in the top right of the main Autopsy window.
|
||||
|
||||
\image html keyword-search-ad-hoc.PNG
|
||||
|
||||
The \ref keyword_search_page must be selected during ingest before doing an ad hoc keyword search. If you don't want to search for any of the existing keyword lists, you can deselect everything to just index the files for later searching.
|
||||
|
||||
\section ad_hoc_kw_types_section Creating Keywords
|
||||
|
||||
The following sections will give a description of each keyword type, then will show some sample text and how various search terms would work against it.
|
||||
|
||||
## Exact match
|
||||
|
||||
Exact match should be used in cases where the search term is expected to always be surrounded by non-word characters (typically whitespace or punctuation). Spaces/punctuation are allowed in the search term, and capitalization is ignored.
|
||||
|
||||
> The quick reddish-brown fox jumps over the lazy dog.
|
||||
|
||||
- "quick", "brown", "dog" will match
|
||||
- "FOX", "Fox", "fox" will all match
|
||||
- "reddish-brown fox", "brown fox", "LAZY DOG" will match
|
||||
- "rown" and "lazy do" will not match since they are not bounded by non-word characters in the text
|
||||
|
||||
## Substring match
|
||||
|
||||
Substring match should be used where the search term is just part of a word, or to allow for different word endings. Capitalization is ignored but spaces and other punctuation can not appear in the search term.
|
||||
|
||||
> The quick reddish-brown fox jumps over the lazy dog.
|
||||
|
||||
- "jump" will match "jumps", and would also match "jumping", "jumped", etc.
|
||||
- "dog" will match
|
||||
- "UMP", "oX" will match
|
||||
- "y dog", "ish-brown" will not match
|
||||
|
||||
## Regex match
|
||||
|
||||
Regex match can be used to search for a specific pattern. Regular expressions are supported using Lucene Regex Syntax which is documented here: https://lucene.apache.org/core/6_4_0/core/org/apache/lucene/util/automaton/RegExp.html. .* is automatically added to the beginning and end of the regular expressions to ensure all matches are found. Additionally, the resulting hits are split on common token separator boundaries (e.g. space, newline, colon, exclamation point etc.) to make the resulting keyword hit more amenable to highlighting.
|
||||
|
||||
There is some validation on the regex but it's best to test on a sample image to make sure your regexes are correct and working as expected.
|
||||
|
||||
> In the year 1885 in an article titled Current Notes, the quick brown fox first jumped over the lazy dog.
|
||||
|
||||
- "qu.ck", "Cu.*es" will match
|
||||
- "[Ff][Oo][Xx]" will match any version of "fox". There is no way to specify that an entire regex should be case-insensitive.
|
||||
- "[0-9]{4}" will match 1885. Character classes like "\d" are not supported. Backreferences are also not supported (but will not generate an error), so "Cu(.)\1ent" would not work to find "Current"
|
||||
|
||||
## Other notes
|
||||
|
||||
### Built-in keywords
|
||||
|
||||
The \ref keyword_search_page has several built-in searches that can not be edited. The ones that are most prone to false hits (IP Address and Phone Number) require that the matching text is surrounded by boundary characters, such as spaces or certain punctuation. For example:
|
||||
- "Address 10.1.5.127 is unavailable" - The built-in IP Address search would find "10.1.5.127" because it is surrounded by whitespace
|
||||
- "abc10.1.7.99xyz" - The built-in IP Address search would not find it because it is surrounded by letters
|
||||
|
||||
If you want to override this default behavior:
|
||||
- Copy the existing regex. The easiest way to do this is to click on Keyword Lists, the list you want then the specific entry you want and hit control+c to copy. It will need a bit of cleanup afterward.
|
||||
- Remove the boundary characters on the beginning and end of the regex
|
||||
- Make a new keyword list containing the result and run it either during ingest or through the Keyword Lists button.
|
||||
|
||||
### Non-Latin text
|
||||
In general all three types of keyword searches will work as expected but the feature has not been thoroughly tested with all character sets. As with regex above, we suggest testing on a sample file. Some notes:
|
||||
- Exact match and substring match may no longer be case-insensitive
|
||||
- In languages like Japanese that don't contain word breaks, every character is processed as a separate word. This tends to make substring match fail, but those searches can be run using exact match. For example, if the text contained 日本語, an exact match search on 日本 would find it (a substring search on 日本 would fail).
|
||||
|
||||
\section ad_hoc_kw_search Keyword Search
|
||||
|
||||
Individual keyword or regular expressions can quickly be searched using the search text box widget. You can select "Exact Match", "Substring Match" and "Regular Expression" match. See the earlier \ref ad_hoc_kw_types_section section for information on each keyword type.
|
||||
|
||||
\image html keyword-search-bar.PNG
|
||||
|
||||
Results will be opened in a separate Results Viewer for every search executed and they will also be saved in the Directory Tree as shown in the screenshot below.
|
||||
|
||||
\image html keyword-search-hits.PNG
|
||||
|
||||
\section ad_hoc_kw_lists Keyword Lists
|
||||
|
||||
In addition to being selected during ingest, keyword lists can also be run through the Keyword Lists button. For information on setting up these keyword lists, see the \ref keywordListsTab section of the ingest module documentation.
|
||||
|
||||
Lists created using the Keyword Search Configuration Dialog can be manually searched by the user by pressing on the 'Keyword Lists' button, selecting the check boxes corresponding to the lists to be searched, and pressing the 'Search' button.
|
||||
|
||||
\image html keyword-search-list.PNG
|
||||
|
||||
The results of the keyword list search are shown in the tree, as shown below.
|
||||
|
||||
\image html keyword-search-list-results.PNG
|
||||
|
||||
\section ad_hoc_during_ingest Doing ad hoc searches during ingest
|
||||
|
||||
Ad hoc searches are intended to be used after ingest completes, but can be used in a limited capacity while ingest is ongoing.
|
||||
|
||||
Manual \ref ad_hoc_kw_search for individual keywords or regular expressions can be executed while ingest is ongoing, using the current index. Note however, that you may miss some results if the entire index has not yet been populated. Autopsy enables you to perform the search on an incomplete index in order to retrieve some preliminary results in real-time.
|
||||
|
||||
During the ingest, the normal manual search using \ref ad_hoc_kw_lists behaves differently than after ingest is complete. A selected list can instead be added to the ingest process and it will be searched in the background instead.
|
||||
|
||||
Most keyword management features are disabled during ingest. You can not edit keyword lists but can create new lists (but not add to them) and copy and export existing lists.
|
||||
|
||||
*/
|
BIN
docs/doxygen-user/images/keyword-search-ad-hoc.PNG
Normal file
BIN
docs/doxygen-user/images/keyword-search-ad-hoc.PNG
Normal file
Binary file not shown.
After Width: | Height: | Size: 85 KiB |
BIN
docs/doxygen-user/images/keyword-search-inbox.PNG
Normal file
BIN
docs/doxygen-user/images/keyword-search-inbox.PNG
Normal file
Binary file not shown.
After Width: | Height: | Size: 14 KiB |
Binary file not shown.
Before Width: | Height: | Size: 14 KiB After Width: | Height: | Size: 15 KiB |
BIN
docs/doxygen-user/images/keyword_results.PNG
Normal file
BIN
docs/doxygen-user/images/keyword_results.PNG
Normal file
Binary file not shown.
After Width: | Height: | Size: 100 KiB |
@ -1,70 +1,61 @@
|
||||
/*! \page keyword_search_page Keyword Search Module
|
||||
|
||||
What Does It Do
|
||||
========
|
||||
\section keyword_module_overview What Does It Do
|
||||
|
||||
The Keyword Search module facilitates both the \ref ingest_page "ingest" portion of searching and also supports manual text searching after ingest has completed. It extracts text from the files being ingested and adds them to a Solr index that can then be searched.
|
||||
The Keyword Search module facilitates both the \ref ingest_page "ingest" portion of searching and also supports manual text searching after ingest has completed (see \ref ad_hoc_keyword_search_page). It extracts text from the files being ingested and adds them to a Solr index that can then be searched.
|
||||
|
||||
Autopsy tries its best to extract the maximum amount of text from the files being indexed. First, the indexing will try to extract text from supported file formats, such as pure text file format, MS Office Documents, PDF files, Email, and many others. If the file is not supported by the standard text extractor, Autopsy will fall back to a string extraction algorithm. String extraction on unknown file formats or arbitrary binary files can often extract a sizeable amount of text from a file, often enough to provide additional clues to reviewers. String extraction will not extract text strings from encrypted files.
|
||||
|
||||
Configuration
|
||||
=======
|
||||
|
||||
Autopsy ships with some built-in lists that define regular expressions and enable the user to search for Phone Numbers, IP addresses, URLs and E-mail addresses. However, enabling some of these very general lists can produce a very large number of hits, and many of them can be false-positives. Regular expressions involving backtracking can potentially take a long time to complete.
|
||||
Autopsy ships with some built-in lists that define regular expressions and enable the user to search for Phone Numbers, IP addresses, URLs and E-mail addresses. However, enabling some of these very general lists can produce a very large number of hits, and many of them can be false-positives. Regular expressions can potentially take a long time to complete.
|
||||
|
||||
Once files are placed in the Solr index, they can be searched quickly for specific keywords, regular expressions, or keyword search lists that can contain a mixture of keywords and regular expressions. Search queries can be executed automatically during the ingest run or at the end of the ingest, depending on the current settings and the time it takes to ingest the image.
|
||||
|
||||
\section keyword_search_configuration_dialog Keyword Search Configuration Dialog
|
||||
|
||||
The keyword search configuration dialog has three tabs, each with its own purpose:
|
||||
\li The Lists tab is used to add, remove, and modify keyword search lists.
|
||||
\li The String Extraction tab is used to enable language scripts and extraction type.
|
||||
\li The General tab is used to configure the ingest timings and display information.
|
||||
\li The \ref keywordListsTab is used to add, remove, and modify keyword search lists.
|
||||
\li The \ref stringExtractionTab is used to enable language scripts and extraction type.
|
||||
\li The \ref generalSettingsTab is used to configure the ingest timings and display information.
|
||||
|
||||
To create a list, select the 'New List' button and choose a name for the new Keyword List. Once the list has been created, keywords can be added to it. Lists can be added to the keyword search ingest process; searches will happen at regular intervals as content is added to the index.
|
||||
## Lists tab {#keywordListsTab}
|
||||
|
||||
Regular expressions are supported using Lucene Regex Syntax which is documented here: https://lucene.apache.org/core/6_4_0/core/org/apache/lucene/util/automaton/RegExp.html. .* is automatically added to the beginning and end of the regular expressions to ensure all matches are found. Additionally, the resulting hits are split on common token separator boundaries (e.g. space, newline, colon, exclamation point etc.) to reduce false positives and to make the resulting keyword hit more amenable to highlighting. If you would prefer not to split on these common boundary characters, you can put .* at the start and/or end of the regex.
|
||||
The Lists tab is used to create/import and add content to keyword lists. To create a list, select the 'New List' button and choose a name for the new Keyword List. Once the list has been created, keywords can be added to it (see \ref ad_hoc_kw_types_section for more information on keyword types). Lists can be added to the keyword search ingest process; searches will happen at regular intervals as content is added to the index.
|
||||
|
||||
<b>List Import and Export</b> \n
|
||||
Autopsy supports importing Encase tab-delimited lists as well as lists created previously with Autopsy. For Encase lists, folder structure and hierarchy is ignored. There is currently no way to export lists for use with Encase, but lists can be exported to share between Autopsy users.
|
||||
|
||||
<b>Lists tab</b> \n
|
||||
\image html keyword-search-configuration-dialog.PNG
|
||||
|
||||
The Lists tab is used to create/import and add content to keyword lists. Once a keyword list is selected, the "New Keywords" button can be used to add one or more entries to the list.
|
||||
The lists of keywords can be found on the left side of the panel. New lists can be created, existing lists can be renamed, copied, exported, or deleted, and lists can be imported. Autopsy supports importing Encase tab-delimited lists as well as lists created previously with Autopsy. For Encase lists, folder structure and hierarchy is ignored. There is currently no way to export lists for use with Encase, but lists can be exported to share between Autopsy users.
|
||||
|
||||
Once a keyword list is selected all keywords in that list will be displayed on the right side of the tab. The "New Keywords" button can be used to add one or more entries to the list, and the "Edit keyword" and "Delete keywords" buttons can alter the existing entries.
|
||||
|
||||
<br>
|
||||
\image html keyword-search-configuration-new-keywords.PNG
|
||||
|
||||
New entries can be typed into the dialog or pasted from the clipboard. All entries added at once must be the same type of match (exact, substring, or regex), but the dialog can be used multiple times to add keywords to the keyword list.
|
||||
New entries can be typed into the dialog or pasted from the clipboard. All entries added at once must be the same type of match (exact, substring, or regex), but the dialog can be used multiple times to add keywords to the keyword list. Refer to the \ref ad_hoc_kw_types_section section for an explanation of each keyword type.
|
||||
|
||||
<br>
|
||||
<b>String extraction setting</b> \n
|
||||
Under the Keyword list is the option to send ingest inbox messages for each hit. If this is enabled, each keyword hit for that list will be accessible through the yellow triangle next to the Keyword Lists button. This feature gives you a quick way to view your most important keyword search results.
|
||||
|
||||
\image html keyword-search-inbox.PNG
|
||||
|
||||
## String Extraction tab {#stringExtractionTab}
|
||||
The string extraction setting defines how strings are extracted from files from which text cannot be extracted because their file formats are not supported. This is the case with arbitrary binary files (such as the page file) and chunks of unallocated space that represent deleted files.
|
||||
When we extract strings from binary files we need to interpet sequences of bytes as text differently, depending on the possible text encoding and script/language used. In many cases we don't know in advance what the specific encoding/language the text is encoded in. However, it helps if the investigator is looking for a specific language, because by selecting less languages the indexing performance will be improved and the number of false positives will be reduced.
|
||||
When we extract strings from binary files we need to interpret sequences of bytes as text differently, depending on the possible text encoding and script/language used. In many cases we don't know in advance what the specific encoding/language the text is encoded in. However, it helps if the investigator is looking for a specific language, because by selecting less languages the indexing performance will be improved and the number of false positives will be reduced.
|
||||
|
||||
The default setting is to search for English strings only, encoded as either UTF8 or UTF16. This setting has the best performance (shortest ingest time).
|
||||
The user can also use the String Viewer first and try different script/language settings, and see which settings give satisfactory results for the type of text relevant to the investigation. Then the same setting that works for the investigation can be applied to the keyword search ingest.
|
||||
<br>
|
||||
<b> String Extraction tab</b>
|
||||
|
||||
\image html keyword-search-configuration-dialog-string-extraction.PNG
|
||||
|
||||
<br>
|
||||
<br>
|
||||
<b>General Settings</b> \n
|
||||
<br>
|
||||
<b>NIST NSRL Support</b> \n
|
||||
## General Settings tab {#generalSettingsTab}
|
||||
|
||||
\image html keyword-search-configuration-dialog-general.PNG
|
||||
|
||||
### NIST NSRL Support
|
||||
The hash database ingest service can be configured to use the NIST NSRL hash database of known files. The keyword search advanced configuration dialog "General" tab contains an option to skip keyword indexing and search on files that have previously marked as "known" and uninteresting files. Selecting this option can greatly reduce size of the index and improve ingest performance. In most cases, user does not need to keyword search for "known" files.
|
||||
|
||||
<b>Result update frequency during ingest</b> \n
|
||||
### Result update frequency during ingest
|
||||
To control how frequently searches are executed during ingest, the user can adjust the timing setting available in the keyword search advanced configuration dialog "General" tab. Setting the number of minutes lower will result in more frequent index updates and searches being executed and the user will be able to see results more in real-time. However, more frequent updates can affect the overall performance, especially on lower-end systems, and can potentially lengthen the overall time needed for the ingest to complete.
|
||||
|
||||
One can also choose to have no periodic searches. This will speed up the ingest. Users choosing this option can run their keyword searches once the entire keyword search index is complete.
|
||||
|
||||
<b>General tab</b>
|
||||
\image html keyword-search-configuration-dialog-general.PNG
|
||||
|
||||
|
||||
|
||||
<!----------------------------------------->
|
||||
|
||||
@ -73,10 +64,9 @@ Using the Module
|
||||
======
|
||||
Search queries can be executed manually by the user at any time, as long as there are some files already indexed and ready to be searched. Searching before indexing is complete will naturally only search indexes that are already compiled.
|
||||
|
||||
|
||||
See \ref ingest_page "Ingest" for more information on ingest in general.
|
||||
|
||||
Once there are files in the index, the \subpage keyword_search_bar "Keyword Search Bar" will be available for use to manually search at any time.
|
||||
Once there are files in the index, \ref ad_hoc_keyword_search_page will be available for use to manually search at any time.
|
||||
|
||||
<!----------------------------------->
|
||||
|
||||
@ -86,46 +76,15 @@ The Ingest Settings for the Keyword Search module allow the user to enable or di
|
||||
|
||||
\image html keyword-search-ingest-settings.PNG
|
||||
|
||||
<br>
|
||||
\section keyword_search_bar Keyword Search Bar
|
||||
|
||||
The keyword search bar is used to search for keywords in the manual mode (outside of ingest). The existing index will be searched for matching words, phrases, lists, or regular expressions.
|
||||
|
||||
<b>Individual Keyword Search</b> \n
|
||||
Individual keyword or regular expressions can quickly be searched using the search text box widget. You can select "Exact Match", "Substring Match" and "Regular Expression" match.
|
||||
|
||||
\image html keyword-search-bar.PNG
|
||||
<br>
|
||||
Results will be opened in a separate Results Viewer for every search executed and they will also be saved in the Directory Tree as shown in the screenshot below.
|
||||
<br>
|
||||
\image html keyword-search-hits.PNG
|
||||
<br>
|
||||
|
||||
<b>Keyword List Search</b> \n
|
||||
Lists created using the Keyword Search Configuration Dialog can be manually searched by the user by pressing on the 'Keyword Lists' button, selecting the check boxes corresponding to the lists to be searched, and pressing the 'Search' button.
|
||||
|
||||
\image html keyword-search-list.PNG
|
||||
<br>
|
||||
The results of the keyword list search are shown in the tree, as shown below.
|
||||
<br>
|
||||
\image html keyword-search-list-results.PNG
|
||||
<br>
|
||||
|
||||
<b>Searching during ingest</b> \n
|
||||
Manual search for individual keywords or regular expressions can be executed while ingest is ongoing, using the current index. Note however, that you may miss some results if the entire index has not yet been populated. Autopsy enables you to perform the search on an incomplete index in order to retrieve some preliminary results in real-time.
|
||||
|
||||
During the ingest, the manual search by keyword list is deactivated. A newly selected list can instead be added, and it will be searched in the background instead.
|
||||
|
||||
Keywords and lists can be managed during ingest.
|
||||
|
||||
|
||||
Seeing Results
|
||||
------
|
||||
|
||||
The Keyword Search module will save the search results regardless whether the search is performed by the ingest process, or manually by the user. The saved results are available in the Directory Tree in the left hand side panel.
|
||||
|
||||
To see keyword search results in real-time while ingest is running, add keyword lists using the \subpage keyword_search_configuration_dialog "Keyword Search Configuration Dialog" and select the "Use during ingest" check box. You can select "Send messages to inbox during ingest" per list, if the hits on that list should be reported in the Inbox, which is recommended for very specific searches.
|
||||
The keyword results will appear in the tree under "Keyword Hits". Each keyword search term will display the number of matches, and can be expanded to show the matches. From here, clicking on one of the matches will show a list of files on the right side of the screen. Select a file and go to the Indexed Text tab to see exactly where the matches occurred in the file.
|
||||
|
||||
\image html keyword_results.PNG
|
||||
|
||||
|
||||
*/
|
||||
|
@ -39,6 +39,7 @@ The following topics are available here:
|
||||
- \subpage content_viewer_page
|
||||
- \subpage image_gallery_page
|
||||
- \subpage file_search_page
|
||||
- \subpage ad_hoc_keyword_search_page
|
||||
- \subpage timeline_page
|
||||
- \subpage stix_page
|
||||
- \subpage logs_and_output_page
|
||||
|
@ -301,6 +301,10 @@ class TskDbDiff(object):
|
||||
|
||||
conn = sqlite3.connect(backup_db_file)
|
||||
id_path_table = build_id_table(conn.cursor())
|
||||
id_vs_parts_table = build_id_vs_parts_table(conn.cursor())
|
||||
id_vs_info_table = build_id_vs_info_table(conn.cursor())
|
||||
id_fs_info_table = build_id_fs_info_table(conn.cursor())
|
||||
id_objects_table = build_id_objects_table(conn.cursor())
|
||||
conn.text_factory = lambda x: x.decode("utf-8", "ignore")
|
||||
|
||||
# Delete the blackboard tables
|
||||
@ -310,7 +314,7 @@ class TskDbDiff(object):
|
||||
# Write to the database dump
|
||||
with codecs.open(dump_file, "wb", "utf_8") as db_log:
|
||||
for line in conn.iterdump():
|
||||
line = normalize_db_entry(line, id_path_table)
|
||||
line = normalize_db_entry(line, id_path_table, id_vs_parts_table, id_vs_info_table, id_fs_info_table, id_objects_table)
|
||||
db_log.write('%s\n' % line)
|
||||
# Now sort the file
|
||||
|
||||
@ -342,7 +346,7 @@ class TskDbDiff(object):
|
||||
class TskDbDiffException(Exception):
|
||||
pass
|
||||
|
||||
def normalize_db_entry(line, table):
|
||||
def normalize_db_entry(line, table, vs_parts_table, vs_info_table, fs_info_table, objects_table):
|
||||
""" Make testing more consistent and reasonable by doctoring certain db entries.
|
||||
|
||||
Args:
|
||||
@ -369,28 +373,59 @@ def normalize_db_entry(line, table):
|
||||
# remove object ID
|
||||
elif (path_index != -1):
|
||||
obj_id = fields_list[0]
|
||||
path = table[int(obj_id)]
|
||||
newLine = ('INSERT INTO "tsk_files_path" VALUES(' + path + ', '.join(fields_list[1:]) + ');')
|
||||
objValue = table[int(obj_id)]
|
||||
par_obj_id = objects_table[int(obj_id)]
|
||||
par_obj_value = table[par_obj_id]
|
||||
par_obj_name = par_obj_value[par_obj_value.rfind('/')+1:]
|
||||
#check the par_id that we insert to the path name when we create uniqueName
|
||||
pathValue = re.sub(par_obj_name + '_' + str(par_obj_id), par_obj_name, fields_list[1])
|
||||
|
||||
newLine = ('INSERT INTO "tsk_files_path" VALUES(' + objValue + ', ' + pathValue + ', ' + ', '.join(fields_list[2:]) + ');')
|
||||
return newLine
|
||||
# remove object ID
|
||||
elif (layout_index != -1):
|
||||
obj_id = fields_list[0]
|
||||
path= table[int(obj_id)]
|
||||
newLine = ('INSERT INTO "tsk_file_layout" VALUES(' + path + ', '.join(fields_list[1:]) + ');')
|
||||
newLine = ('INSERT INTO "tsk_file_layout" VALUES(' + path + ', ' + ', '.join(fields_list[1:]) + ');')
|
||||
return newLine
|
||||
# remove object ID
|
||||
elif (object_index != -1):
|
||||
obj_id = fields_list[0]
|
||||
parent_id = fields_list[1]
|
||||
|
||||
newLine = 'INSERT INTO "tsk_objects" VALUES('
|
||||
path = None
|
||||
parent_path = None
|
||||
|
||||
#if obj_id or parent_id is invalid literal, we simple return the values as it is
|
||||
try:
|
||||
path = table[int(obj_id)]
|
||||
parent_path = table[int(parent_id)]
|
||||
newLine = ('INSERT INTO "tsk_objects" VALUES(' + path + ', ' + parent_path + ', ' + ', '.join(fields_list[2:]) + ');')
|
||||
return newLine
|
||||
except Exception as e:
|
||||
# objects table has things that aren't files. if lookup fails, don't replace anything.
|
||||
obj_id = int(obj_id)
|
||||
parent_id = int(parent_id)
|
||||
except Exception as e:
|
||||
return line
|
||||
|
||||
if obj_id in table.keys():
|
||||
path = table[obj_id]
|
||||
elif obj_id in vs_parts_table.keys():
|
||||
path = vs_parts_table[obj_id]
|
||||
elif obj_id in vs_info_table.keys():
|
||||
path = vs_info_table[obj_id]
|
||||
elif obj_id in fs_info_table.keys():
|
||||
path = fs_info_table[obj_id]
|
||||
|
||||
if parent_id in table.keys():
|
||||
parent_path = table[parent_id]
|
||||
elif parent_id in vs_parts_table.keys():
|
||||
parent_path = vs_parts_table[parent_id]
|
||||
elif parent_id in vs_info_table.keys():
|
||||
parent_path = vs_info_table[parent_id]
|
||||
elif parent_id in fs_info_table.keys():
|
||||
parent_path = fs_info_table[parent_id]
|
||||
|
||||
|
||||
if path and parent_path:
|
||||
return newLine + path + ', ' + parent_path + ', ' + ', '.join(fields_list[2:]) + ');'
|
||||
else:
|
||||
return line
|
||||
# remove time-based information, ie Test_6/11/14 -> Test
|
||||
elif (report_index != -1):
|
||||
fields_list[1] = "AutopsyTestCase"
|
||||
@ -443,7 +478,51 @@ def build_id_table(artifact_cursor):
|
||||
mapping = dict([(row[0], str(row[1]) + str(row[2])) for row in artifact_cursor.execute("SELECT obj_id, parent_path, name FROM tsk_files")])
|
||||
return mapping
|
||||
|
||||
def build_id_vs_parts_table(artifact_cursor):
|
||||
"""Build the map of object ids to vs_parts.
|
||||
|
||||
Args:
|
||||
artifact_cursor: the database cursor
|
||||
"""
|
||||
# for each row in the db, take the object id, addr, and start, then create a tuple in the dictionary
|
||||
# with the object id as the key and (addr + start) as the value
|
||||
mapping = dict([(row[0], str(row[1]) + '_' + str(row[2])) for row in artifact_cursor.execute("SELECT obj_id, addr, start FROM tsk_vs_parts")])
|
||||
return mapping
|
||||
|
||||
def build_id_vs_info_table(artifact_cursor):
|
||||
"""Build the map of object ids to vs_info.
|
||||
|
||||
Args:
|
||||
artifact_cursor: the database cursor
|
||||
"""
|
||||
# for each row in the db, take the object id, vs_type, and img_offset, then create a tuple in the dictionary
|
||||
# with the object id as the key and (vs_type + img_offset) as the value
|
||||
mapping = dict([(row[0], str(row[1]) + '_' + str(row[2])) for row in artifact_cursor.execute("SELECT obj_id, vs_type, img_offset FROM tsk_vs_info")])
|
||||
return mapping
|
||||
|
||||
|
||||
def build_id_fs_info_table(artifact_cursor):
|
||||
"""Build the map of object ids to fs_info.
|
||||
|
||||
Args:
|
||||
artifact_cursor: the database cursor
|
||||
"""
|
||||
# for each row in the db, take the object id, img_offset, and fs_type, then create a tuple in the dictionary
|
||||
# with the object id as the key and (img_offset + fs_type) as the value
|
||||
mapping = dict([(row[0], str(row[1]) + '_' + str(row[2])) for row in artifact_cursor.execute("SELECT obj_id, img_offset, fs_type FROM tsk_fs_info")])
|
||||
return mapping
|
||||
|
||||
def build_id_objects_table(artifact_cursor):
|
||||
"""Build the map of object ids to par_id.
|
||||
|
||||
Args:
|
||||
artifact_cursor: the database cursor
|
||||
"""
|
||||
# for each row in the db, take the object id, par_obj_id, then create a tuple in the dictionary
|
||||
# with the object id as the key and par_obj_id as the value
|
||||
mapping = dict([(row[0], row[1]) for row in artifact_cursor.execute("SELECT obj_id, par_obj_id FROM tsk_objects")])
|
||||
return mapping
|
||||
|
||||
def main():
|
||||
try:
|
||||
sys.argv.pop(0)
|
||||
|
31277
thirdparty/mactime/kanarazu-MACTIME.txt
vendored
31277
thirdparty/mactime/kanarazu-MACTIME.txt
vendored
File diff suppressed because it is too large
Load Diff
BIN
thirdparty/mactime/mactime.exe
vendored
BIN
thirdparty/mactime/mactime.exe
vendored
Binary file not shown.
BIN
thirdparty/opencv/lib/amd64/api-ms-win-core-console-l1-1-0.dll
vendored
Normal file
BIN
thirdparty/opencv/lib/amd64/api-ms-win-core-console-l1-1-0.dll
vendored
Normal file
Binary file not shown.
BIN
thirdparty/opencv/lib/amd64/api-ms-win-core-datetime-l1-1-0.dll
vendored
Normal file
BIN
thirdparty/opencv/lib/amd64/api-ms-win-core-datetime-l1-1-0.dll
vendored
Normal file
Binary file not shown.
BIN
thirdparty/opencv/lib/amd64/api-ms-win-core-debug-l1-1-0.dll
vendored
Normal file
BIN
thirdparty/opencv/lib/amd64/api-ms-win-core-debug-l1-1-0.dll
vendored
Normal file
Binary file not shown.
BIN
thirdparty/opencv/lib/amd64/api-ms-win-core-errorhandling-l1-1-0.dll
vendored
Normal file
BIN
thirdparty/opencv/lib/amd64/api-ms-win-core-errorhandling-l1-1-0.dll
vendored
Normal file
Binary file not shown.
BIN
thirdparty/opencv/lib/amd64/api-ms-win-core-file-l1-1-0.dll
vendored
Normal file
BIN
thirdparty/opencv/lib/amd64/api-ms-win-core-file-l1-1-0.dll
vendored
Normal file
Binary file not shown.
BIN
thirdparty/opencv/lib/amd64/api-ms-win-core-file-l1-2-0.dll
vendored
Normal file
BIN
thirdparty/opencv/lib/amd64/api-ms-win-core-file-l1-2-0.dll
vendored
Normal file
Binary file not shown.
BIN
thirdparty/opencv/lib/amd64/api-ms-win-core-file-l2-1-0.dll
vendored
Normal file
BIN
thirdparty/opencv/lib/amd64/api-ms-win-core-file-l2-1-0.dll
vendored
Normal file
Binary file not shown.
BIN
thirdparty/opencv/lib/amd64/api-ms-win-core-handle-l1-1-0.dll
vendored
Normal file
BIN
thirdparty/opencv/lib/amd64/api-ms-win-core-handle-l1-1-0.dll
vendored
Normal file
Binary file not shown.
BIN
thirdparty/opencv/lib/amd64/api-ms-win-core-heap-l1-1-0.dll
vendored
Normal file
BIN
thirdparty/opencv/lib/amd64/api-ms-win-core-heap-l1-1-0.dll
vendored
Normal file
Binary file not shown.
BIN
thirdparty/opencv/lib/amd64/api-ms-win-core-interlocked-l1-1-0.dll
vendored
Normal file
BIN
thirdparty/opencv/lib/amd64/api-ms-win-core-interlocked-l1-1-0.dll
vendored
Normal file
Binary file not shown.
BIN
thirdparty/opencv/lib/amd64/api-ms-win-core-libraryloader-l1-1-0.dll
vendored
Normal file
BIN
thirdparty/opencv/lib/amd64/api-ms-win-core-libraryloader-l1-1-0.dll
vendored
Normal file
Binary file not shown.
BIN
thirdparty/opencv/lib/amd64/api-ms-win-core-localization-l1-2-0.dll
vendored
Normal file
BIN
thirdparty/opencv/lib/amd64/api-ms-win-core-localization-l1-2-0.dll
vendored
Normal file
Binary file not shown.
BIN
thirdparty/opencv/lib/amd64/api-ms-win-core-memory-l1-1-0.dll
vendored
Normal file
BIN
thirdparty/opencv/lib/amd64/api-ms-win-core-memory-l1-1-0.dll
vendored
Normal file
Binary file not shown.
BIN
thirdparty/opencv/lib/amd64/api-ms-win-core-namedpipe-l1-1-0.dll
vendored
Normal file
BIN
thirdparty/opencv/lib/amd64/api-ms-win-core-namedpipe-l1-1-0.dll
vendored
Normal file
Binary file not shown.
BIN
thirdparty/opencv/lib/amd64/api-ms-win-core-processenvironment-l1-1-0.dll
vendored
Normal file
BIN
thirdparty/opencv/lib/amd64/api-ms-win-core-processenvironment-l1-1-0.dll
vendored
Normal file
Binary file not shown.
BIN
thirdparty/opencv/lib/amd64/api-ms-win-core-processthreads-l1-1-0.dll
vendored
Normal file
BIN
thirdparty/opencv/lib/amd64/api-ms-win-core-processthreads-l1-1-0.dll
vendored
Normal file
Binary file not shown.
BIN
thirdparty/opencv/lib/amd64/api-ms-win-core-processthreads-l1-1-1.dll
vendored
Normal file
BIN
thirdparty/opencv/lib/amd64/api-ms-win-core-processthreads-l1-1-1.dll
vendored
Normal file
Binary file not shown.
BIN
thirdparty/opencv/lib/amd64/api-ms-win-core-profile-l1-1-0.dll
vendored
Normal file
BIN
thirdparty/opencv/lib/amd64/api-ms-win-core-profile-l1-1-0.dll
vendored
Normal file
Binary file not shown.
BIN
thirdparty/opencv/lib/amd64/api-ms-win-core-rtlsupport-l1-1-0.dll
vendored
Normal file
BIN
thirdparty/opencv/lib/amd64/api-ms-win-core-rtlsupport-l1-1-0.dll
vendored
Normal file
Binary file not shown.
BIN
thirdparty/opencv/lib/amd64/api-ms-win-core-string-l1-1-0.dll
vendored
Normal file
BIN
thirdparty/opencv/lib/amd64/api-ms-win-core-string-l1-1-0.dll
vendored
Normal file
Binary file not shown.
BIN
thirdparty/opencv/lib/amd64/api-ms-win-core-synch-l1-1-0.dll
vendored
Normal file
BIN
thirdparty/opencv/lib/amd64/api-ms-win-core-synch-l1-1-0.dll
vendored
Normal file
Binary file not shown.
BIN
thirdparty/opencv/lib/amd64/api-ms-win-core-synch-l1-2-0.dll
vendored
Normal file
BIN
thirdparty/opencv/lib/amd64/api-ms-win-core-synch-l1-2-0.dll
vendored
Normal file
Binary file not shown.
BIN
thirdparty/opencv/lib/amd64/api-ms-win-core-sysinfo-l1-1-0.dll
vendored
Normal file
BIN
thirdparty/opencv/lib/amd64/api-ms-win-core-sysinfo-l1-1-0.dll
vendored
Normal file
Binary file not shown.
BIN
thirdparty/opencv/lib/amd64/api-ms-win-core-timezone-l1-1-0.dll
vendored
Normal file
BIN
thirdparty/opencv/lib/amd64/api-ms-win-core-timezone-l1-1-0.dll
vendored
Normal file
Binary file not shown.
BIN
thirdparty/opencv/lib/amd64/api-ms-win-core-util-l1-1-0.dll
vendored
Normal file
BIN
thirdparty/opencv/lib/amd64/api-ms-win-core-util-l1-1-0.dll
vendored
Normal file
Binary file not shown.
BIN
thirdparty/opencv/lib/amd64/api-ms-win-crt-conio-l1-1-0.dll
vendored
Normal file
BIN
thirdparty/opencv/lib/amd64/api-ms-win-crt-conio-l1-1-0.dll
vendored
Normal file
Binary file not shown.
BIN
thirdparty/opencv/lib/amd64/api-ms-win-crt-convert-l1-1-0.dll
vendored
Normal file
BIN
thirdparty/opencv/lib/amd64/api-ms-win-crt-convert-l1-1-0.dll
vendored
Normal file
Binary file not shown.
BIN
thirdparty/opencv/lib/amd64/api-ms-win-crt-environment-l1-1-0.dll
vendored
Normal file
BIN
thirdparty/opencv/lib/amd64/api-ms-win-crt-environment-l1-1-0.dll
vendored
Normal file
Binary file not shown.
BIN
thirdparty/opencv/lib/amd64/api-ms-win-crt-filesystem-l1-1-0.dll
vendored
Normal file
BIN
thirdparty/opencv/lib/amd64/api-ms-win-crt-filesystem-l1-1-0.dll
vendored
Normal file
Binary file not shown.
BIN
thirdparty/opencv/lib/amd64/api-ms-win-crt-heap-l1-1-0.dll
vendored
Normal file
BIN
thirdparty/opencv/lib/amd64/api-ms-win-crt-heap-l1-1-0.dll
vendored
Normal file
Binary file not shown.
BIN
thirdparty/opencv/lib/amd64/api-ms-win-crt-locale-l1-1-0.dll
vendored
Normal file
BIN
thirdparty/opencv/lib/amd64/api-ms-win-crt-locale-l1-1-0.dll
vendored
Normal file
Binary file not shown.
BIN
thirdparty/opencv/lib/amd64/api-ms-win-crt-math-l1-1-0.dll
vendored
Normal file
BIN
thirdparty/opencv/lib/amd64/api-ms-win-crt-math-l1-1-0.dll
vendored
Normal file
Binary file not shown.
BIN
thirdparty/opencv/lib/amd64/api-ms-win-crt-multibyte-l1-1-0.dll
vendored
Normal file
BIN
thirdparty/opencv/lib/amd64/api-ms-win-crt-multibyte-l1-1-0.dll
vendored
Normal file
Binary file not shown.
BIN
thirdparty/opencv/lib/amd64/api-ms-win-crt-private-l1-1-0.dll
vendored
Normal file
BIN
thirdparty/opencv/lib/amd64/api-ms-win-crt-private-l1-1-0.dll
vendored
Normal file
Binary file not shown.
BIN
thirdparty/opencv/lib/amd64/api-ms-win-crt-process-l1-1-0.dll
vendored
Normal file
BIN
thirdparty/opencv/lib/amd64/api-ms-win-crt-process-l1-1-0.dll
vendored
Normal file
Binary file not shown.
BIN
thirdparty/opencv/lib/amd64/api-ms-win-crt-runtime-l1-1-0.dll
vendored
Normal file
BIN
thirdparty/opencv/lib/amd64/api-ms-win-crt-runtime-l1-1-0.dll
vendored
Normal file
Binary file not shown.
BIN
thirdparty/opencv/lib/amd64/api-ms-win-crt-stdio-l1-1-0.dll
vendored
Normal file
BIN
thirdparty/opencv/lib/amd64/api-ms-win-crt-stdio-l1-1-0.dll
vendored
Normal file
Binary file not shown.
BIN
thirdparty/opencv/lib/amd64/api-ms-win-crt-string-l1-1-0.dll
vendored
Normal file
BIN
thirdparty/opencv/lib/amd64/api-ms-win-crt-string-l1-1-0.dll
vendored
Normal file
Binary file not shown.
BIN
thirdparty/opencv/lib/amd64/api-ms-win-crt-time-l1-1-0.dll
vendored
Normal file
BIN
thirdparty/opencv/lib/amd64/api-ms-win-crt-time-l1-1-0.dll
vendored
Normal file
Binary file not shown.
BIN
thirdparty/opencv/lib/amd64/api-ms-win-crt-utility-l1-1-0.dll
vendored
Normal file
BIN
thirdparty/opencv/lib/amd64/api-ms-win-crt-utility-l1-1-0.dll
vendored
Normal file
Binary file not shown.
BIN
thirdparty/opencv/lib/amd64/concrt140.dll
vendored
Normal file
BIN
thirdparty/opencv/lib/amd64/concrt140.dll
vendored
Normal file
Binary file not shown.
BIN
thirdparty/opencv/lib/amd64/libeay32.dll
vendored
Normal file
BIN
thirdparty/opencv/lib/amd64/libeay32.dll
vendored
Normal file
Binary file not shown.
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user