debug - speed up processing and display of file contents and services
Use bash-internal globs and parameter expasion in preference to assignment from output of ls or basename per file/directory. When displaying file contents, call sed once and preprocesses the entire file (eg pihole.toml), rather than spawning a new sed process for every line of the file. When checking services, call awk once to extract all data for each ip:port pair, rather than three times. Signed-off-by: Rob Gill <rrobgill@protonmail.com>
This commit is contained in:
@@ -593,18 +593,21 @@ check_required_ports() {
|
|||||||
# Add port 53
|
# Add port 53
|
||||||
ports_configured+=("53")
|
ports_configured+=("53")
|
||||||
|
|
||||||
|
local protocol_type port_number service_name
|
||||||
# Now that we have the values stored,
|
# Now that we have the values stored,
|
||||||
for i in "${!ports_in_use[@]}"; do
|
for i in "${!ports_in_use[@]}"; do
|
||||||
# loop through them and assign some local variables
|
# loop through them and assign some local variables
|
||||||
local service_name
|
read -r protocol_type port_number service_name <<< "$(
|
||||||
service_name=$(echo "${ports_in_use[$i]}" | awk '{gsub(/users:\(\("/,"",$7);gsub(/".*/,"",$7);print $7}')
|
awk '{
|
||||||
local protocol_type
|
p=$1; n=$5; s=$7
|
||||||
protocol_type=$(echo "${ports_in_use[$i]}" | awk '{print $1}')
|
gsub(/users:\(\("/,"",s)
|
||||||
local port_number
|
gsub(/".*/,"",s)
|
||||||
port_number="$(echo "${ports_in_use[$i]}" | awk '{print $5}')" # | awk '{gsub(/^.*:/,"",$5);print $5}')
|
print p, n, s
|
||||||
|
}' <<< "${ports_in_use[$i]}"
|
||||||
|
)"
|
||||||
|
|
||||||
# Check if the right services are using the right ports
|
# Check if the right services are using the right ports
|
||||||
if [[ ${ports_configured[*]} =~ $(echo "${port_number}" | rev | cut -d: -f1 | rev) ]]; then
|
if [[ ${ports_configured[*]} =~ ${port_number##*:} ]]; then
|
||||||
compare_port_to_service_assigned "${ftl}" "${service_name}" "${protocol_type}:${port_number}"
|
compare_port_to_service_assigned "${ftl}" "${service_name}" "${protocol_type}:${port_number}"
|
||||||
else
|
else
|
||||||
# If it's not a default port that Pi-hole needs, just print it out for the user to see
|
# If it's not a default port that Pi-hole needs, just print it out for the user to see
|
||||||
@@ -816,42 +819,27 @@ ftl_full_status(){
|
|||||||
|
|
||||||
make_array_from_file() {
|
make_array_from_file() {
|
||||||
local filename="${1}"
|
local filename="${1}"
|
||||||
|
|
||||||
|
# If the file is a directory do nothing since it cannot be parsed
|
||||||
|
[[ -d "${filename}" ]] && return
|
||||||
|
|
||||||
# The second argument can put a limit on how many line should be read from the file
|
# The second argument can put a limit on how many line should be read from the file
|
||||||
# Since some of the files are so large, this is helpful to limit the output
|
# Since some of the files are so large, this is helpful to limit the output
|
||||||
local limit=${2}
|
local limit=${2}
|
||||||
# A local iterator for testing if we are at the limit above
|
# A local iterator for testing if we are at the limit above
|
||||||
local i=0
|
local i=0
|
||||||
# If the file is a directory
|
|
||||||
if [[ -d "${filename}" ]]; then
|
|
||||||
# do nothing since it cannot be parsed
|
|
||||||
:
|
|
||||||
else
|
|
||||||
# Otherwise, read the file line by line
|
|
||||||
while IFS= read -r line;do
|
|
||||||
# Otherwise, strip out comments and blank lines
|
|
||||||
new_line=$(echo "${line}" | sed -e 's/^\s*#.*$//' -e '/^$/d')
|
|
||||||
# If the line still has content (a non-zero value)
|
|
||||||
if [[ -n "${new_line}" ]]; then
|
|
||||||
|
|
||||||
# If the string contains "### CHANGED", highlight this part in red
|
# Process the file, strip out comments and blank lines
|
||||||
if [[ "${new_line}" == *"### CHANGED"* ]]; then
|
local processed
|
||||||
new_line="${new_line//### CHANGED/${COL_RED}### CHANGED${COL_NC}}"
|
processed=$(sed -e 's/^\s*#.*$//' -e '/^$/d' "${filename}")
|
||||||
fi
|
|
||||||
|
|
||||||
# Finally, write this line to the log
|
while IFS= read -r line; do
|
||||||
log_write " ${new_line}"
|
# If the string contains "### CHANGED", highlight this part in red
|
||||||
fi
|
log_write " ${line//### CHANGED/${COL_RED}### CHANGED${COL_NC}}"
|
||||||
# Increment the iterator +1
|
((i++))
|
||||||
i=$((i+1))
|
# if the limit of lines we want to see is exceeded do nothing
|
||||||
# but if the limit of lines we want to see is exceeded
|
[[ -n ${limit} && $i -eq ${limit} ]] && break
|
||||||
if [[ -z ${limit} ]]; then
|
done <<< "$processed"
|
||||||
# do nothing
|
|
||||||
:
|
|
||||||
elif [[ $i -eq ${limit} ]]; then
|
|
||||||
break
|
|
||||||
fi
|
|
||||||
done < "${filename}"
|
|
||||||
fi
|
|
||||||
}
|
}
|
||||||
|
|
||||||
parse_file() {
|
parse_file() {
|
||||||
@@ -924,38 +912,38 @@ list_files_in_dir() {
|
|||||||
fi
|
fi
|
||||||
|
|
||||||
# Store the files found in an array
|
# Store the files found in an array
|
||||||
mapfile -t files_found < <(ls "${dir_to_parse}")
|
local files_found=("${dir_to_parse}"/*)
|
||||||
# For each file in the array,
|
# For each file in the array,
|
||||||
for each_file in "${files_found[@]}"; do
|
for each_file in "${files_found[@]}"; do
|
||||||
if [[ -d "${dir_to_parse}/${each_file}" ]]; then
|
if [[ -d "${each_file}" ]]; then
|
||||||
# If it's a directory, do nothing
|
# If it's a directory, do nothing
|
||||||
:
|
:
|
||||||
elif [[ "${dir_to_parse}/${each_file}" == "${PIHOLE_DEBUG_LOG}" ]] || \
|
elif [[ "${each_file}" == "${PIHOLE_DEBUG_LOG}" ]] || \
|
||||||
[[ "${dir_to_parse}/${each_file}" == "${PIHOLE_RAW_BLOCKLIST_FILES}" ]] || \
|
[[ "${each_file}" == "${PIHOLE_RAW_BLOCKLIST_FILES}" ]] || \
|
||||||
[[ "${dir_to_parse}/${each_file}" == "${PIHOLE_INSTALL_LOG_FILE}" ]] || \
|
[[ "${each_file}" == "${PIHOLE_INSTALL_LOG_FILE}" ]] || \
|
||||||
[[ "${dir_to_parse}/${each_file}" == "${PIHOLE_LOG}" ]] || \
|
[[ "${each_file}" == "${PIHOLE_LOG}" ]] || \
|
||||||
[[ "${dir_to_parse}/${each_file}" == "${PIHOLE_LOG_GZIPS}" ]]; then
|
[[ "${each_file}" == "${PIHOLE_LOG_GZIPS}" ]]; then
|
||||||
:
|
:
|
||||||
elif [[ "${dir_to_parse}" == "${DNSMASQ_D_DIRECTORY}" ]]; then
|
elif [[ "${dir_to_parse}" == "${DNSMASQ_D_DIRECTORY}" ]]; then
|
||||||
# in case of the dnsmasq directory include all files in the debug output
|
# in case of the dnsmasq directory include all files in the debug output
|
||||||
log_write "\\n${COL_GREEN}$(ls -lhd "${dir_to_parse}"/"${each_file}")${COL_NC}"
|
log_write "\\n${COL_GREEN}$(ls -lhd "${each_file}")${COL_NC}"
|
||||||
make_array_from_file "${dir_to_parse}/${each_file}"
|
make_array_from_file "${each_file}"
|
||||||
else
|
else
|
||||||
# Then, parse the file's content into an array so each line can be analyzed if need be
|
# Then, parse the file's content into an array so each line can be analyzed if need be
|
||||||
for i in "${!REQUIRED_FILES[@]}"; do
|
for i in "${!REQUIRED_FILES[@]}"; do
|
||||||
if [[ "${dir_to_parse}/${each_file}" == "${REQUIRED_FILES[$i]}" ]]; then
|
if [[ "${each_file}" == "${REQUIRED_FILES[$i]}" ]]; then
|
||||||
# display the filename
|
# display the filename
|
||||||
log_write "\\n${COL_GREEN}$(ls -lhd "${dir_to_parse}"/"${each_file}")${COL_NC}"
|
log_write "\\n${COL_GREEN}$(ls -lhd "${each_file}")${COL_NC}"
|
||||||
# Check if the file we want to view has a limit (because sometimes we just need a little bit of info from the file, not the entire thing)
|
# Check if the file we want to view has a limit (because sometimes we just need a little bit of info from the file, not the entire thing)
|
||||||
case "${dir_to_parse}/${each_file}" in
|
case "${each_file}" in
|
||||||
# If it's Web server log, give the first and last 25 lines
|
# If it's Web server log, give the first and last 25 lines
|
||||||
"${PIHOLE_WEBSERVER_LOG}") head_tail_log "${dir_to_parse}/${each_file}" 25
|
"${PIHOLE_WEBSERVER_LOG}") head_tail_log "${each_file}" 25
|
||||||
;;
|
;;
|
||||||
# Same for the FTL log
|
# Same for the FTL log
|
||||||
"${PIHOLE_FTL_LOG}") head_tail_log "${dir_to_parse}/${each_file}" 35
|
"${PIHOLE_FTL_LOG}") head_tail_log "${each_file}" 35
|
||||||
;;
|
;;
|
||||||
# parse the file into an array in case we ever need to analyze it line-by-line
|
# parse the file into an array in case we ever need to analyze it line-by-line
|
||||||
*) make_array_from_file "${dir_to_parse}/${each_file}";
|
*) make_array_from_file "${each_file}";
|
||||||
esac
|
esac
|
||||||
else
|
else
|
||||||
# Otherwise, do nothing since it's not a file needed for Pi-hole so we don't care about it
|
# Otherwise, do nothing since it's not a file needed for Pi-hole so we don't care about it
|
||||||
@@ -991,6 +979,7 @@ head_tail_log() {
|
|||||||
local filename="${1}"
|
local filename="${1}"
|
||||||
# The number of lines to use for head and tail
|
# The number of lines to use for head and tail
|
||||||
local qty="${2}"
|
local qty="${2}"
|
||||||
|
local filebasename="${filename##*/}"
|
||||||
local head_line
|
local head_line
|
||||||
local tail_line
|
local tail_line
|
||||||
# Put the current Internal Field Separator into another variable so it can be restored later
|
# Put the current Internal Field Separator into another variable so it can be restored later
|
||||||
@@ -999,14 +988,14 @@ head_tail_log() {
|
|||||||
IFS=$'\r\n'
|
IFS=$'\r\n'
|
||||||
local log_head=()
|
local log_head=()
|
||||||
mapfile -t log_head < <(head -n "${qty}" "${filename}")
|
mapfile -t log_head < <(head -n "${qty}" "${filename}")
|
||||||
log_write " ${COL_CYAN}-----head of $(basename "${filename}")------${COL_NC}"
|
log_write " ${COL_CYAN}-----head of ${filebasename}------${COL_NC}"
|
||||||
for head_line in "${log_head[@]}"; do
|
for head_line in "${log_head[@]}"; do
|
||||||
log_write " ${head_line}"
|
log_write " ${head_line}"
|
||||||
done
|
done
|
||||||
log_write ""
|
log_write ""
|
||||||
local log_tail=()
|
local log_tail=()
|
||||||
mapfile -t log_tail < <(tail -n "${qty}" "${filename}")
|
mapfile -t log_tail < <(tail -n "${qty}" "${filename}")
|
||||||
log_write " ${COL_CYAN}-----tail of $(basename "${filename}")------${COL_NC}"
|
log_write " ${COL_CYAN}-----tail of ${filebasename}------${COL_NC}"
|
||||||
for tail_line in "${log_tail[@]}"; do
|
for tail_line in "${log_tail[@]}"; do
|
||||||
log_write " ${tail_line}"
|
log_write " ${tail_line}"
|
||||||
done
|
done
|
||||||
|
|||||||
Reference in New Issue
Block a user