mirror of
https://github.com/overcuriousity/autopsy-flatpak.git
synced 2025-07-12 16:06:15 +00:00
Merge branch 'master' of https://github.com/Smoss/autopsy
This commit is contained in:
commit
8c9c9cb9bb
@ -1,7 +1,7 @@
|
|||||||
/*
|
/*
|
||||||
* Autopsy Forensic Browser
|
* Autopsy Forensic Browser
|
||||||
*
|
*
|
||||||
* Copyright 2011 Basis Technology Corp.
|
* Copyright 2011-2013 Basis Technology Corp.
|
||||||
* Contact: carrier <at> sleuthkit <dot> org
|
* Contact: carrier <at> sleuthkit <dot> org
|
||||||
*
|
*
|
||||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
@ -46,6 +46,7 @@ import org.sleuthkit.autopsy.corecomponentinterfaces.CoreComponentControl;
|
|||||||
import org.sleuthkit.autopsy.coreutils.FileUtil;
|
import org.sleuthkit.autopsy.coreutils.FileUtil;
|
||||||
import org.sleuthkit.autopsy.coreutils.Logger;
|
import org.sleuthkit.autopsy.coreutils.Logger;
|
||||||
import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil;
|
import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil;
|
||||||
|
import org.sleuthkit.autopsy.coreutils.PlatformUtil;
|
||||||
import org.sleuthkit.autopsy.coreutils.Version;
|
import org.sleuthkit.autopsy.coreutils.Version;
|
||||||
import org.sleuthkit.datamodel.*;
|
import org.sleuthkit.datamodel.*;
|
||||||
import org.sleuthkit.datamodel.SleuthkitJNI.CaseDbHandle.AddImageProcess;
|
import org.sleuthkit.datamodel.SleuthkitJNI.CaseDbHandle.AddImageProcess;
|
||||||
@ -877,6 +878,7 @@ public class Case {
|
|||||||
|
|
||||||
//case change helper
|
//case change helper
|
||||||
private static void doCaseChange(Case toChangeTo) {
|
private static void doCaseChange(Case toChangeTo) {
|
||||||
|
logger.log(Level.INFO, "Changing Case to: " + toChangeTo);
|
||||||
if (toChangeTo != null) { // new case is open
|
if (toChangeTo != null) { // new case is open
|
||||||
|
|
||||||
// clear the temp folder when the case is created / opened
|
// clear the temp folder when the case is created / opened
|
||||||
@ -916,6 +918,9 @@ public class Case {
|
|||||||
f.setTitle(Case.getAppName()); // set the window name to just application name
|
f.setTitle(Case.getAppName()); // set the window name to just application name
|
||||||
}
|
}
|
||||||
|
|
||||||
|
//log memory usage after case changed
|
||||||
|
logger.log(Level.INFO, PlatformUtil.getAllMemUsageInfo());
|
||||||
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -360,9 +360,10 @@ public class DataContentViewerMedia extends javax.swing.JPanel implements DataCo
|
|||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (file.isDirNameFlagSet(TSK_FS_NAME_FLAG_ENUM.UNALLOC)) {
|
//try displaying deleted files if we can read them
|
||||||
return false;
|
//if (file.isDirNameFlagSet(TSK_FS_NAME_FLAG_ENUM.UNALLOC)) {
|
||||||
}
|
// return false;
|
||||||
|
//}
|
||||||
|
|
||||||
if (file.getSize() == 0) {
|
if (file.getSize() == 0) {
|
||||||
return false;
|
return false;
|
||||||
|
@ -150,7 +150,7 @@ public abstract class KeywordSearchListsAbstract {
|
|||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
List<KeywordSearchList> getListsL() {
|
public List<KeywordSearchList> getListsL() {
|
||||||
List<KeywordSearchList> ret = new ArrayList<KeywordSearchList>();
|
List<KeywordSearchList> ret = new ArrayList<KeywordSearchList>();
|
||||||
for (KeywordSearchList list : theLists.values()) {
|
for (KeywordSearchList list : theLists.values()) {
|
||||||
ret.add(list);
|
ret.add(list);
|
||||||
@ -158,7 +158,7 @@ public abstract class KeywordSearchListsAbstract {
|
|||||||
return ret;
|
return ret;
|
||||||
}
|
}
|
||||||
|
|
||||||
List<KeywordSearchList> getListsL(boolean locked) {
|
public List<KeywordSearchList> getListsL(boolean locked) {
|
||||||
List<KeywordSearchList> ret = new ArrayList<KeywordSearchList>();
|
List<KeywordSearchList> ret = new ArrayList<KeywordSearchList>();
|
||||||
for (KeywordSearchList list : theLists.values()) {
|
for (KeywordSearchList list : theLists.values()) {
|
||||||
if (list.isLocked().equals(locked)) {
|
if (list.isLocked().equals(locked)) {
|
||||||
@ -173,7 +173,7 @@ public abstract class KeywordSearchListsAbstract {
|
|||||||
*
|
*
|
||||||
* @return List of keyword list names
|
* @return List of keyword list names
|
||||||
*/
|
*/
|
||||||
List<String> getListNames() {
|
public List<String> getListNames() {
|
||||||
return new ArrayList<String>(theLists.keySet());
|
return new ArrayList<String>(theLists.keySet());
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -183,7 +183,7 @@ public abstract class KeywordSearchListsAbstract {
|
|||||||
* @param locked true if look for locked lists, false otherwise
|
* @param locked true if look for locked lists, false otherwise
|
||||||
* @return List of keyword list names
|
* @return List of keyword list names
|
||||||
*/
|
*/
|
||||||
List<String> getListNames(boolean locked) {
|
public List<String> getListNames(boolean locked) {
|
||||||
ArrayList<String> lists = new ArrayList<String>();
|
ArrayList<String> lists = new ArrayList<String>();
|
||||||
for (String listName : theLists.keySet()) {
|
for (String listName : theLists.keySet()) {
|
||||||
KeywordSearchList list = theLists.get(listName);
|
KeywordSearchList list = theLists.get(listName);
|
||||||
@ -201,7 +201,7 @@ public abstract class KeywordSearchListsAbstract {
|
|||||||
* @param keyword
|
* @param keyword
|
||||||
* @return found list or null
|
* @return found list or null
|
||||||
*/
|
*/
|
||||||
KeywordSearchList getListWithKeyword(Keyword keyword) {
|
public KeywordSearchList getListWithKeyword(Keyword keyword) {
|
||||||
KeywordSearchList found = null;
|
KeywordSearchList found = null;
|
||||||
for (KeywordSearchList list : theLists.values()) {
|
for (KeywordSearchList list : theLists.values()) {
|
||||||
if (list.hasKeyword(keyword)) {
|
if (list.hasKeyword(keyword)) {
|
||||||
@ -218,7 +218,7 @@ public abstract class KeywordSearchListsAbstract {
|
|||||||
* @param keyword
|
* @param keyword
|
||||||
* @return found list or null
|
* @return found list or null
|
||||||
*/
|
*/
|
||||||
KeywordSearchList getListWithKeyword(String keyword) {
|
public KeywordSearchList getListWithKeyword(String keyword) {
|
||||||
KeywordSearchList found = null;
|
KeywordSearchList found = null;
|
||||||
for (KeywordSearchList list : theLists.values()) {
|
for (KeywordSearchList list : theLists.values()) {
|
||||||
if (list.hasKeyword(keyword)) {
|
if (list.hasKeyword(keyword)) {
|
||||||
@ -244,7 +244,7 @@ public abstract class KeywordSearchListsAbstract {
|
|||||||
* @param locked true if look for locked lists, false otherwise
|
* @param locked true if look for locked lists, false otherwise
|
||||||
* @return number of unlocked lists currently stored
|
* @return number of unlocked lists currently stored
|
||||||
*/
|
*/
|
||||||
int getNumberLists(boolean locked) {
|
public int getNumberLists(boolean locked) {
|
||||||
int numLists = 0;
|
int numLists = 0;
|
||||||
for (String listName : theLists.keySet()) {
|
for (String listName : theLists.keySet()) {
|
||||||
KeywordSearchList list = theLists.get(listName);
|
KeywordSearchList list = theLists.get(listName);
|
||||||
@ -261,7 +261,7 @@ public abstract class KeywordSearchListsAbstract {
|
|||||||
* @param name id of the list
|
* @param name id of the list
|
||||||
* @return keyword list representation
|
* @return keyword list representation
|
||||||
*/
|
*/
|
||||||
KeywordSearchList getList(String name) {
|
public KeywordSearchList getList(String name) {
|
||||||
return theLists.get(name);
|
return theLists.get(name);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -477,19 +477,19 @@ public abstract class KeywordSearchListsAbstract {
|
|||||||
return hash;
|
return hash;
|
||||||
}
|
}
|
||||||
|
|
||||||
String getName() {
|
public String getName() {
|
||||||
return name;
|
return name;
|
||||||
}
|
}
|
||||||
|
|
||||||
Date getDateCreated() {
|
public Date getDateCreated() {
|
||||||
return created;
|
return created;
|
||||||
}
|
}
|
||||||
|
|
||||||
Date getDateModified() {
|
public Date getDateModified() {
|
||||||
return modified;
|
return modified;
|
||||||
}
|
}
|
||||||
|
|
||||||
Boolean getUseForIngest() {
|
public Boolean getUseForIngest() {
|
||||||
return useForIngest;
|
return useForIngest;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -497,7 +497,7 @@ public abstract class KeywordSearchListsAbstract {
|
|||||||
this.useForIngest = use;
|
this.useForIngest = use;
|
||||||
}
|
}
|
||||||
|
|
||||||
Boolean getIngestMessages() {
|
public Boolean getIngestMessages() {
|
||||||
return ingestMessages;
|
return ingestMessages;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -505,7 +505,7 @@ public abstract class KeywordSearchListsAbstract {
|
|||||||
this.ingestMessages = ingestMessages;
|
this.ingestMessages = ingestMessages;
|
||||||
}
|
}
|
||||||
|
|
||||||
List<Keyword> getKeywords() {
|
public List<Keyword> getKeywords() {
|
||||||
return keywords;
|
return keywords;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -513,7 +513,7 @@ public abstract class KeywordSearchListsAbstract {
|
|||||||
return keywords.contains(keyword);
|
return keywords.contains(keyword);
|
||||||
}
|
}
|
||||||
|
|
||||||
boolean hasKeyword(String keyword) {
|
public boolean hasKeyword(String keyword) {
|
||||||
//note, this ignores isLiteral
|
//note, this ignores isLiteral
|
||||||
for (Keyword k : keywords) {
|
for (Keyword k : keywords) {
|
||||||
if (k.getQuery().equals(keyword)) {
|
if (k.getQuery().equals(keyword)) {
|
||||||
@ -523,7 +523,7 @@ public abstract class KeywordSearchListsAbstract {
|
|||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
Boolean isLocked() {
|
public Boolean isLocked() {
|
||||||
return locked;
|
return locked;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
2
NEWS.txt
2
NEWS.txt
@ -9,10 +9,12 @@ Improvements:
|
|||||||
- show children counts in directory tree
|
- show children counts in directory tree
|
||||||
|
|
||||||
Bugfixes:
|
Bugfixes:
|
||||||
|
- fixed memory leaks in "Add Image"
|
||||||
- show error message in hex and string viewer if specific offset of a file could not be read.
|
- show error message in hex and string viewer if specific offset of a file could not be read.
|
||||||
- file search actions not always enabled when new case is open.
|
- file search actions not always enabled when new case is open.
|
||||||
- fixed directory tree history being reset when tree is refreshed.
|
- fixed directory tree history being reset when tree is refreshed.
|
||||||
- exif module better jpeg detection using signature and not only file extension.
|
- exif module better jpeg detection using signature and not only file extension.
|
||||||
|
- The "media view" tab is inactive for deleted files (#165)
|
||||||
|
|
||||||
---------------- VERSION 3.0.4 --------------
|
---------------- VERSION 3.0.4 --------------
|
||||||
|
|
||||||
|
@ -2,10 +2,10 @@
|
|||||||
*
|
*
|
||||||
* Autopsy Forensic Browser
|
* Autopsy Forensic Browser
|
||||||
*
|
*
|
||||||
* Copyright 2012 Basis Technology Corp.
|
* Copyright 2012-2013 Basis Technology Corp.
|
||||||
*
|
*
|
||||||
* Copyright 2012 42six Solutions.
|
* Copyright 2012 42six Solutions.
|
||||||
* Contact: aebadirad <at> 42six <dot> com
|
*
|
||||||
* Project Contact/Architect: carrier <at> sleuthkit <dot> org
|
* Project Contact/Architect: carrier <at> sleuthkit <dot> org
|
||||||
*
|
*
|
||||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
@ -117,12 +117,9 @@ public class Chrome extends Extract implements IngestModuleImage {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// we should have only one allocated history file. Log a warning if we
|
// log a message if we don't have any allocated history files
|
||||||
// have more, but process them all
|
if (allocatedHistoryFiles.size() == 0) {
|
||||||
if (allocatedHistoryFiles.size() > 1) {
|
logger.log(Level.INFO, "Could not find any allocated Chrome history files.");
|
||||||
logger.log(Level.INFO, "Found more than one allocated Chrome history file. Processing them all.");
|
|
||||||
} else if (allocatedHistoryFiles.size() == 0) {
|
|
||||||
logger.log(Level.INFO, "Could not find an allocated Chrome history file.");
|
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
#!/usr/bin/python
|
#!/usr/bin/python
|
||||||
#en_US.latin-1
|
# -*- coding: utf_8 -*-
|
||||||
import codecs
|
import codecs
|
||||||
import datetime
|
import datetime
|
||||||
import logging
|
import logging
|
||||||
@ -372,15 +372,15 @@ def run_config_test(config_file):
|
|||||||
parsed = parse(config_file)
|
parsed = parse(config_file)
|
||||||
counts = {}
|
counts = {}
|
||||||
if parsed.getElementsByTagName("indir"):
|
if parsed.getElementsByTagName("indir"):
|
||||||
case.input_dir = parsed.getElementsByTagName("indir")[0].getAttribute("value").encode().decode("utf-8")
|
case.input_dir = parsed.getElementsByTagName("indir")[0].getAttribute("value").encode().decode("utf_8")
|
||||||
if parsed.getElementsByTagName("global_csv"):
|
if parsed.getElementsByTagName("global_csv"):
|
||||||
case.global_csv = parsed.getElementsByTagName("global_csv")[0].getAttribute("value").encode().decode("utf-8")
|
case.global_csv = parsed.getElementsByTagName("global_csv")[0].getAttribute("value").encode().decode("utf_8")
|
||||||
|
|
||||||
# Generate the top navbar of the HTML for easy access to all images
|
# Generate the top navbar of the HTML for easy access to all images
|
||||||
case.global_csv = make_local_path(case.global_csv)
|
case.global_csv = make_local_path(case.global_csv)
|
||||||
values = []
|
values = []
|
||||||
for element in parsed.getElementsByTagName("image"):
|
for element in parsed.getElementsByTagName("image"):
|
||||||
value = element.getAttribute("value").encode().decode("utf-8")
|
value = element.getAttribute("value").encode().decode("utf_8")
|
||||||
if file_exists(value):
|
if file_exists(value):
|
||||||
values.append(value)
|
values.append(value)
|
||||||
html_add_images(values)
|
html_add_images(values)
|
||||||
@ -390,7 +390,7 @@ def run_config_test(config_file):
|
|||||||
if(args.contin):
|
if(args.contin):
|
||||||
#set all times an image has been processed to 0
|
#set all times an image has been processed to 0
|
||||||
for element in parsed.getElementsByTagName("image"):
|
for element in parsed.getElementsByTagName("image"):
|
||||||
value = element.getAttribute("value").encode().decode("utf-8")
|
value = element.getAttribute("value").encode().decode("utf_8")
|
||||||
images.append(value)
|
images.append(value)
|
||||||
#Begin infiniloop
|
#Begin infiniloop
|
||||||
if(newDay()):
|
if(newDay()):
|
||||||
@ -812,14 +812,14 @@ def compare_tsk_objects():
|
|||||||
def generate_common_log():
|
def generate_common_log():
|
||||||
try:
|
try:
|
||||||
logs_path = make_local_path(case.output_dir, case.image_name, "logs")
|
logs_path = make_local_path(case.output_dir, case.image_name, "logs")
|
||||||
common_log = codecs.open(case.common_log_path, "w", "latin-1")
|
common_log = codecs.open(case.common_log_path, "w", "utf_8")
|
||||||
print(case.common_log_path)
|
print(case.common_log_path)
|
||||||
warning_log = codecs.open(case.warning_log, "w", "latin_1")
|
warning_log = codecs.open(case.warning_log, "w", "utf_8")
|
||||||
common_log.write("--------------------------------------------------\n")
|
common_log.write("--------------------------------------------------\n")
|
||||||
common_log.write(case.image_name + "\n")
|
common_log.write(case.image_name + "\n")
|
||||||
common_log.write("--------------------------------------------------\n")
|
common_log.write("--------------------------------------------------\n")
|
||||||
for file in os.listdir(logs_path):
|
for file in os.listdir(logs_path):
|
||||||
log = codecs.open(make_path(logs_path, file), "r", "latin-1")
|
log = codecs.open(make_path(logs_path, file), "r", "utf_8")
|
||||||
for line in log:
|
for line in log:
|
||||||
if line.startswith("Exception"):
|
if line.startswith("Exception"):
|
||||||
common_log.write("From " + file +":\n" + line + "\n")
|
common_log.write("From " + file +":\n" + line + "\n")
|
||||||
@ -845,14 +845,14 @@ def generate_common_log():
|
|||||||
|
|
||||||
def compare_errors():
|
def compare_errors():
|
||||||
gold_dir = make_local_path(case.gold, case.image_name, case.image_name + "SortedErrors.txt")
|
gold_dir = make_local_path(case.gold, case.image_name, case.image_name + "SortedErrors.txt")
|
||||||
common_log = codecs.open(case.sorted_log, "r", "latin-1")
|
common_log = codecs.open(case.sorted_log, "r", "utf_8")
|
||||||
gold_log = codecs.open(gold_dir, "r", "latin-1")
|
gold_log = codecs.open(gold_dir, "r", "utf_8")
|
||||||
gold_dat = gold_log.read()
|
gold_dat = gold_log.read()
|
||||||
common_dat = common_log.read()
|
common_dat = common_log.read()
|
||||||
patrn = re.compile("\d")
|
patrn = re.compile("\d")
|
||||||
if (re.sub(patrn, 'd', gold_dat) != re.sub(patrn, 'd', common_dat)):
|
if (not((re.sub(patrn, 'd', gold_dat)) == (re.sub(patrn, 'd', common_dat)))):
|
||||||
diff_dir = make_local_path(case.output_dir, case.image_name, "ErrorDiff.txt")
|
diff_dir = make_local_path(case.output_dir, case.image_name, "ErrorDiff.txt")
|
||||||
diff_file = open(diff_dir, "w")
|
diff_file = open(diff_dir, "w")
|
||||||
dffcmdlst = ["diff", case.sorted_log, gold_dir]
|
dffcmdlst = ["diff", case.sorted_log, gold_dir]
|
||||||
subprocess.call(dffcmdlst, stdout = diff_file)
|
subprocess.call(dffcmdlst, stdout = diff_file)
|
||||||
global attachl
|
global attachl
|
||||||
@ -1034,7 +1034,7 @@ def get_exceptions():
|
|||||||
results = []
|
results = []
|
||||||
for file in os.listdir(logs_path):
|
for file in os.listdir(logs_path):
|
||||||
if "autopsy.log" in file:
|
if "autopsy.log" in file:
|
||||||
log = codecs.open(make_path(logs_path, file), "r", "latin-1")
|
log = codecs.open(make_path(logs_path, file), "r", "utf_8")
|
||||||
ex = re.compile("\SException")
|
ex = re.compile("\SException")
|
||||||
er = re.compile("\SError")
|
er = re.compile("\SError")
|
||||||
for line in log:
|
for line in log:
|
||||||
@ -1046,7 +1046,7 @@ def get_exceptions():
|
|||||||
# Returns a list of all the warnings listed in the common log
|
# Returns a list of all the warnings listed in the common log
|
||||||
def get_warnings():
|
def get_warnings():
|
||||||
warnings = []
|
warnings = []
|
||||||
common_log = codecs.open(case.warning_log, "r", "latin-1")
|
common_log = codecs.open(case.warning_log, "r", "utf_8")
|
||||||
for line in common_log:
|
for line in common_log:
|
||||||
if "warning" in line.lower():
|
if "warning" in line.lower():
|
||||||
warnings.append(line)
|
warnings.append(line)
|
||||||
@ -1068,7 +1068,7 @@ def regex_search_logs(regex):
|
|||||||
logs_path = make_local_path(case.output_dir, case.image_name, "logs")
|
logs_path = make_local_path(case.output_dir, case.image_name, "logs")
|
||||||
results = []
|
results = []
|
||||||
for file in os.listdir(logs_path):
|
for file in os.listdir(logs_path):
|
||||||
log = codecs.open(make_path(logs_path, file), "r", "latin-1")
|
log = codecs.open(make_path(logs_path, file), "r", "utf_8")
|
||||||
for line in log:
|
for line in log:
|
||||||
if regex.search(line):
|
if regex.search(line):
|
||||||
results.append(line)
|
results.append(line)
|
||||||
@ -1082,7 +1082,7 @@ def search_logs(string):
|
|||||||
logs_path = make_local_path(case.output_dir, case.image_name, "logs")
|
logs_path = make_local_path(case.output_dir, case.image_name, "logs")
|
||||||
results = []
|
results = []
|
||||||
for file in os.listdir(logs_path):
|
for file in os.listdir(logs_path):
|
||||||
log = codecs.open(make_path(logs_path, file), "r", "latin-1")
|
log = codecs.open(make_path(logs_path, file), "r", "utf_8")
|
||||||
for line in log:
|
for line in log:
|
||||||
if string in line:
|
if string in line:
|
||||||
results.append(line)
|
results.append(line)
|
||||||
@ -1092,7 +1092,7 @@ def search_logs(string):
|
|||||||
# Searches the common log for any instances of a specific string.
|
# Searches the common log for any instances of a specific string.
|
||||||
def search_common_log(string):
|
def search_common_log(string):
|
||||||
results = []
|
results = []
|
||||||
log = codecs.open(case.common_log_path, "r", "latin-1")
|
log = codecs.open(case.common_log_path, "r", "utf_8")
|
||||||
for line in log:
|
for line in log:
|
||||||
if string in line:
|
if string in line:
|
||||||
results.append(line)
|
results.append(line)
|
||||||
@ -1105,7 +1105,7 @@ def search_log(log, string):
|
|||||||
logs_path = make_local_path(case.output_dir, case.image_name, "logs", log)
|
logs_path = make_local_path(case.output_dir, case.image_name, "logs", log)
|
||||||
try:
|
try:
|
||||||
results = []
|
results = []
|
||||||
log = codecs.open(logs_path, "r", "latin-1")
|
log = codecs.open(logs_path, "r", "utf_8")
|
||||||
for line in log:
|
for line in log:
|
||||||
if string in line:
|
if string in line:
|
||||||
results.append(line)
|
results.append(line)
|
||||||
@ -1122,7 +1122,7 @@ def search_log_set(type, string):
|
|||||||
results = []
|
results = []
|
||||||
for file in os.listdir(logs_path):
|
for file in os.listdir(logs_path):
|
||||||
if type in file:
|
if type in file:
|
||||||
log = codecs.open(make_path(logs_path, file), "r", "latin-1")
|
log = codecs.open(make_path(logs_path, file), "r", "utf_8")
|
||||||
for line in log:
|
for line in log:
|
||||||
if string in line:
|
if string in line:
|
||||||
results.append(line)
|
results.append(line)
|
||||||
@ -1731,11 +1731,11 @@ def send_email():
|
|||||||
if(len(element)<=0):
|
if(len(element)<=0):
|
||||||
return
|
return
|
||||||
element = element[0]
|
element = element[0]
|
||||||
toval = element.getAttribute("value").encode().decode("utf-8")
|
toval = element.getAttribute("value").encode().decode("utf_8")
|
||||||
if(toval==None):
|
if(toval==None):
|
||||||
return
|
return
|
||||||
element = parsed.getElementsByTagName("mail_server")[0]
|
element = parsed.getElementsByTagName("mail_server")[0]
|
||||||
serverval = element.getAttribute("value").encode().decode("utf-8")
|
serverval = element.getAttribute("value").encode().decode("utf_8")
|
||||||
# Create the container (outer) email message.
|
# Create the container (outer) email message.
|
||||||
msg = MIMEMultipart()
|
msg = MIMEMultipart()
|
||||||
msg['Subject'] = 'Email Test'
|
msg['Subject'] = 'Email Test'
|
||||||
|
Loading…
x
Reference in New Issue
Block a user