#!/usr/bin/python3
"""
ngcp-log-flow create a call flow of a single Call-ID taking NGCP logs as input.
"""

from cmath import exp
import getopt
import sys
import re
import os
import datetime
import curses
import multiprocessing as mp
import shutil
import tempfile
import signal
import glob
import getpass
import collections
from functools import reduce
from types import SimpleNamespace
import paramiko
import io
from tabulate import tabulate
import pymysql.cursors

SIP_RESP_CODES = {
    "100": "Trying",
    "180": "Ringing",
    "181": "Call Forwarded",
    "182": "Queued",
    "183": "Session Prog",
    "199": "Early Dialog End",
    "200": "OK",
    "202": "Accepted",
    "204": "No Notification",
    "300": "Multi Choices",
    "301": "Moved Permanently",
    "302": "Moved Temporarily",
    "305": "Use Proxy",
    "380": "Alternative Svc",
    "400": "Bad Request",
    "401": "Unauthorized",
    "402": "Payment Required",
    "403": "Forbidden",
    "404": "Not Found",
    "405": "Method Not Allwd",
    "406": "Not Acceptable",
    "407": "Proxy Auth Req",
    "408": "Request Timeout",
    "409": "Conflict",
    "410": "Gone",
    "411": "Length Required",
    "412": "Cond Req Failed",
    "413": "Req Entity Too Lg",
    "414": "Req-URI Too Long",
    "415": "Unsupp Media",
    "416": "Unsupp URI Scheme",
    "417": "Unknown Resource",
    "420": "Bad Extension",
    "421": "Extension Req",
    "422": "Sess Interval Sm",
    "423": "Int Too Brief",
    "424": "Bad Loc Info",
    "425": "Bad Alert Msg",
    "428": "Use Identity Hdr",
    "429": "Prov Referrer ID",
    "430": "Flow Failed",
    "433": "Anonymity Disallwd",
    "436": "Bad Identity Info",
    "437": "Unsupp Cert",
    "438": "Invalid ID Header",
    "439": "1st Hop Not Outbnd",
    "440": "Max-Breadth Excd",
    "469": "Bad Info Package",
    "470": "Consent Needed",
    "480": "Temp Unavailable",
    "481": "Call/Trans No Exst",
    "482": "Loop Detected",
    "483": "Too Many Hops",
    "484": "Addr Incomplete",
    "485": "Ambiguous",
    "486": "Busy Here",
    "487": "Req Terminated",
    "488": "Not Acceptbl Here",
    "489": "Bad Event",
    "491": "Request Pending",
    "493": "Undecipherable",
    "494": "Sec Agrmt Req",
    "500": "Internal Svr Err",
    "501": "Not Implemented",
    "502": "Bad Gateway",
    "503": "Svc Unavail",
    "504": "Server Timeout",
    "505": "Ver Not Supported",
    "513": "Msg Too Large",
    "555": "Push Not Not Supp",
    "580": "Precondition Fail",
    "600": "Busy Everywhere",
    "603": "Decline",
    "604": "Not Exst Anywhere",
    "606": "Not Acceptable",
    "607": "Unwanted",
    "608": "Rejected",
}

mix_services = [
    "lbproxy",
    "lbws",
    "proxyws",
    "lbproxyws",
    "lbrtp",
    "proxyrtp",
    "lbproxyrtp",
]
sole_services = ["proxy", "lb", "ws", "rtp"]
list_services = sole_services + mix_services

ports_limits = [
    "b2b_port_min",
    "b2b_port_max",
    "asterisk_port_min",
    "asterisk_port_max",
    "rtp_port_min",
    "rtp_port_max",
]
COMPACT = False

L_INFOS_WINDOW = 6
L_EXTRA_LINES = 5

VERTICAL_LINE = "│"
HORIZONTAL_LINE = "─"
RIGHT_ARROW = "›"
LEFT_ARROW = "‹"

# VERTICAL_LINE = '|'
# HORIZONTAL_LINE = '-'
# RIGHT_ARROW = '>'
# LEFT_ARROW = '<'

# Flow draw static values
SPAN_DATE = 25
EMPTY_DATE = "".center(SPAN_DATE, " ")
SPAN_CODE = 0
EMPTY_CODE = "".center(SPAN_CODE, " ")
SPAN_FLOW = 27  # It has to be odd
EMPTY_FLOW = "".center(SPAN_FLOW, " ")
FULL_FLOW = "".center(SPAN_FLOW, HORIZONTAL_LINE)
TAG_FLOW = VERTICAL_LINE.center(SPAN_FLOW, " ")
LOCAL_FLOW = (
    RIGHT_ARROW.rjust(SPAN_FLOW // 2, " ")
    + VERTICAL_LINE
    + LEFT_ARROW.ljust(SPAN_FLOW // 2, " ")
)
ARROW_RIGHT = (
    "".center(SPAN_FLOW // 2, " ")
    + VERTICAL_LINE
    + LEFT_ARROW.ljust(SPAN_FLOW // 2, HORIZONTAL_LINE)
)
ARROW_LEFT = (
    RIGHT_ARROW.rjust(SPAN_FLOW // 2, HORIZONTAL_LINE)
    + VERTICAL_LINE
    + "".center(SPAN_FLOW // 2, " ")
)
STOP_RIGHT = (
    "".center(SPAN_FLOW // 2, " ")
    + VERTICAL_LINE
    + "".ljust(SPAN_FLOW // 2, HORIZONTAL_LINE)
)
STOP_LEFT = (
    "".center(SPAN_FLOW // 2, HORIZONTAL_LINE)
    + VERTICAL_LINE
    + "".ljust(SPAN_FLOW // 2, " ")
)

# Regex to find information in log lines
RE_DATE = re.compile(
    r"^(\w{3}\s{1,2}\d{1,2}\s{1,2}\d{1,2}:\d{2}:\d{2}.\d{1,6}|"
    r"^\w{3}\s{1,2}\d{1,2}\s{1,2}\d{1,2}:\d{2}:\d{2}|"
    r"\d{4}-\d{2}-\d{2}\s{1,2}\d{1,2}:\d{2}:\d{2}.\d{4,6})"
)
RE_M = re.compile(r"M=(\S*)")
RE_S = re.compile(r"S=(\S*)")
RE_PRX_IP = re.compile(r"\(«(\S*)»\)")
RE_LB_IP = re.compile(r"IP=«(\S*)»")
RE_FS = re.compile(r"fs=\'«(\S*)»\'")
RE_DU = re.compile(r"du=\'«(\S*)»\'")
RE_DESTIP = re.compile(r"DESTIP=«(\S*)»")

RE_WS_STATE_STATUS = re.compile(r'"(?:state|status)":"(\w+)"')
RE_WS_M_REPLY = re.compile(r'"name":"(\w+)"')
RE_WS_CODE = re.compile(r'"code":"(\w+)"')
RE_WS_CHANNEL = re.compile(r"channel: (?:csta\.)*([^\]]+)")
RE_WS_FROM = re.compile(r"Incoming from (\s+)")
RE_WS_QUEUE = re.compile(r"queue: (?:sp\.)*([^\]\-]+)")
RE_WS_NAME = re.compile(r'"name":"([^"]+)')
# RE_WS_DEVICE = re.compile(r'"device":"sip:([^"]+)')
RE_WS_DEVICE = re.compile(r'"device":"sip:(\w+)@')
# RE_WS_TO = re.compile(r'"to":"sip:([^"]+)')
RE_WS_TO = re.compile(r'"to":"sip:(\w+)@')

# RTPEngine :......TO BE COMPLETED
RE_CALLER_IP = re.compile(r"Port\ {1,10}(\S*)")
RE_CALLEE_IP = re.compile(r"<> «\ {1,10}(\S*)")
RE_MEDIA = re.compile(r"Media #\d \((\S*)")
RE_PROTOCOLE = re.compile(r" over (\S*)\)")
RE_PACKETS = re.compile(r"», (\S*)")
RE_ERRORS = re.compile(r"b, (\S*)")

# Regex for IPv4 and IPv6 URI handling
IPv4 = r"\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}"
RE_URI_IPv4 = re.compile(r"(%s)" % IPv4)
RE_URI_PORTv4 = re.compile(r"(%s):(\d+)" % IPv4)
RE_URI_PROTv4 = re.compile(r"(\w{3}):%s" % IPv4)
IPv6 = "[a-fA-F0-9:]+"
RE_URI_IPv6 = re.compile(r"(%s)" % IPv6)
RE_URI_PORTv6 = re.compile(r"\[(%s)\]:(\d+)" % IPv6)
RE_URI_PROTv6 = re.compile(r"(\w{3}):\[%s\]" % IPv6)

# Regex to hide sensitive data due to GDPR
GDPR_DATA = re.compile(r"«\S*»")

# Regex to find caller and callee UUID's
RE_SUBSCRIBER_UUID = re.compile(r"uuid '«([\w-]+)»'")
RE_PEER_HOST_ID = re.compile(r"peer host '([\w-]+)'")
RE_PEER_GROUP_ID = re.compile(r"peer group '([\w-]+)'")
RE_DP_ID = re.compile(r"dpid '([\w-]+)'")
RE_RESELLER_ID = re.compile(r"reseller id '«([\w-]+)")
RE_CONTRACT_ID = re.compile(r"contract id '«([\w-]+)")
RE_TYPE_ID = re.compile(r"type=(cf[a-z]{1,2})")

# Multi-process definition
PROC_NUM = 4
CHUNK_SIZE = 1024 * 1024
NB_LINES = 1048576
PS_TIMEOUT = 600

# Files definition
CONFIG_FILE = "/etc/default/ngcp-log-flow.conf"
NGCP_LOGS_DIR = "/var/log/ngcp"
NGCP_OLD_LOGS_DIR = "%s/old" % NGCP_LOGS_DIR
LB_LOG = "kamailio-lb.log"
PRX_LOG = "kamailio-proxy.log"
WS_LOG = "websocket.log"
RTP_LOG = "rtp.log"
system_ip = {}

file_locations = {
    LB_LOG: ["localhost"],
    PRX_LOG: ["localhost"],
    WS_LOG: ["localhost"],
    RTP_LOG: ["localhost"],
}

ssh = paramiko.SSHClient()
# ssh.load_system_host_keys()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
key = None

# Shell foregrounds colors
BColors = SimpleNamespace(
    **{
        "REQCOLOR": "\033[34;1;1m",
        "RESPCOLOR_OK": "\033[92;1;1m",
        "RESPCOLOR_PROV": "\033[90;1;1m",
        "RESPCOLOR_ERR": "\033[31;1;1m",
        "ENDC_ALL": "\033[0;0;0m",
        "HEADER": "\033[95m",
        "ENDC": "\033[0m",
    }
)


class CallidsBGColors:
    """Callids properties management class"""

    # Set this to True to start returning colorized codes
    background_colorize = False

    """
    List of foreground colors used in interactive mode
    - Index 0 for requests
    - Index 1 for errors
    - Index 2 for provisional resp
    - Index 3 for all others errors
    """

    fgColorsAnsiCodes = [
        1,  # blue (requests)
        2,  # red (error responses)
        3,  # cyan (provisional responses)
        4,  # green (ok responses)
    ]

    """
    This is a list of colors related to callids.

    bgcolor with index 0 will be applied to the first callid found on the
    log file
    bgcolor with index 1 will be applied to the second callid found
    and so on...
    """

    bg_colors_ansi_codes = [
        "48",  # Light grey
        "46",  # Light blue
        "52",  # Sea blue
    ]

    # This is used if there are more call ids then expected
    # All messages with exceeding callids will be colored with
    # the color inside bg_colors_ansi_default_code
    bg_colors_ansi_default_code = "39"  # Black

    # List of all collected CallIds (turned into a set when using it)
    callIdSet = set()

    @staticmethod
    def add_call_id_to_set(line):
        """
        This method extracts a callid from a line and adds it to a set"
        """
        # first get the callid
        call_id = CallidsBGColors.get_call_id_from_log_line(line)
        if call_id is not None:
            CallidsBGColors.callIdSet.add(call_id)

    # regex used to extract callif from log line
    callid_regex = "ID=«([^»]*)»"

    @staticmethod
    def get_call_id_from_log_line(line):
        """
        This method takes a line from the log file and tries to get the callid
        The callid is calculated looking for the word "ID=" and takes
        everything between ID=«43cf767f-f84a1b8c»@10.0.0.204
        """
        result = re.search(CallidsBGColors.callid_regex, line)
        if result:
            return result.group(1)
        return None

    @staticmethod
    def get_bg_ansi_code(color_index):
        """
        Get the background color value
        """
        if color_index == -1 or not CallidsBGColors.background_colorize:
            return ""
        try:
            return str(CallidsBGColors.bg_colors_ansi_codes[color_index])
        except IndexError:
            return str(CallidsBGColors.bg_colors_ansi_default_code)

    @staticmethod
    def find_call_id_bg_color(call_id):
        """
        Get the background color value
        """
        call_id_grouped = sorted(set(CallidsBGColors.callIdSet))
        index = 0
        for call_id_str in call_id_grouped:
            if call_id == call_id_str:
                return CallidsBGColors.get_bg_ansi_code(index)
            index = index + 1
        # length = len(set(CallidsBGColors.callIdSet))
        return CallidsBGColors.get_bg_ansi_code(-1)

    nCursesPairs = []

    @staticmethod
    def create_ncurses_color_pair(fg_color, bg_color):
        """
        Get the both background and foreground color values
        """
        new_pair = {"fgColor": fg_color, "bgColor": bg_color}
        index = 6
        for pair in CallidsBGColors.nCursesPairs:
            # if (new_pair == pair):
            if pair["fgColor"] == fg_color and pair["bgColor"] == bg_color:
                return index
            index = index + 1

        CallidsBGColors.nCursesPairs.append(new_pair)
        curses.init_pair(
            index, CallidsBGColors.translate_to_ncurses(fg_color), bg_color
        )
        return index

    @staticmethod
    def translate_to_ncurses(color):
        """
        Convert the color code to ncurses color
        """
        if color is None:
            return curses.COLOR_YELLOW
        if color == 1:
            return curses.COLOR_BLUE
        if color == 2:
            return curses.COLOR_RED
        if color == 3:
            return curses.COLOR_WHITE
        if color == 4:
            return curses.COLOR_GREEN
        return curses.COLOR_YELLOW


class FlowWindow:
    """Curses window to draw the call flow"""

    def __init__(self, screen, lines, start_line):
        global L_INFOS
        L_INFOS = L_INFOS_WINDOW + L_EXTRA_LINES
        screen_size = screen.getmaxyx()
        self.lines = lines
        self.n_out_lines = len(self.lines)
        self.top_line_num = 0
        self.highlight_line_num = 0
        self.window_rows = screen_size[0]
        window_columns = screen_size[1]

        # logical window for line texts
        self.window = curses.newwin(
            self.window_rows, window_columns, start_line, 0
        )

        # create empty array for line infos
        # (needed to parse the line to colorize it)
        self.line_infos = []

    def show(self):
        """print the flow"""
        self.window.clear()

        top = self.top_line_num
        bottom = self.top_line_num + self.window_rows
        for index, line in enumerate(self.lines[top:bottom]):

            line_info = self.line_infos[index + top]

            # try to colour inside the line
            if line_info["start_replace"] > 0 and line_info["end_replace"] > 0:

                # get colors information
                if CallidsBGColors.background_colorize:
                    if line_info["bgColor"]:
                        bg_color = int(line_info["bgColor"]) - 40
                    else:
                        bg_color = 0
                    fg_color = int(line_info["fgColor"])

                first_section = SPAN_DATE + line_info["start_replace"] + 1
                second_section = SPAN_DATE + line_info["end_replace"] + 1
                replace_len = second_section - first_section

                if HORIZONTAL_LINE != "─":
                    underline = curses.A_UNDERLINE
                else:
                    underline = curses.A_BOLD

                # highlight current line
                if index != self.highlight_line_num:
                    self.window.addstr(index, 0, line[:first_section])
                    if CallidsBGColors.background_colorize:
                        color_pair_idx = (
                            CallidsBGColors.create_ncurses_color_pair(
                                fg_color, bg_color
                            )
                        )
                    else:
                        color_pair_idx = int(line_info["color_pair"])

                    self.window.addstr(
                        index,
                        first_section,
                        line[first_section : first_section + replace_len],
                        underline
                        | curses.A_BOLD
                        | curses.color_pair(color_pair_idx),
                    )

                    self.window.addstr(
                        index,
                        first_section + replace_len,
                        line[first_section + replace_len :],
                        curses.A_BOLD,
                    )
                else:
                    self.window.addstr(
                        index,
                        0,
                        line[:first_section],
                        # curses.A_UNDERLINE |
                        curses.A_BOLD | curses.color_pair(1),
                    )

                    self.window.addstr(
                        index,
                        first_section,
                        line[first_section : first_section + replace_len],
                        underline | curses.A_BOLD | curses.color_pair(1),
                    )

                    self.window.addstr(
                        index,
                        first_section + replace_len,
                        line[first_section + replace_len :],
                        # curses.A_UNDERLINE |
                        curses.A_BOLD | curses.color_pair(1),
                    )
            else:
                self.window.addstr(index, 0, line)

        self.window.refresh()

    def key_up(self):
        """move highlight up one line"""
        next_line_num = self.highlight_line_num - 1

        # paging
        if self.highlight_line_num == 0 and self.top_line_num != 0:
            self.top_line_num -= 1
            return

        # scroll highlight line
        if self.top_line_num != 0 or self.highlight_line_num != 0:
            self.highlight_line_num = next_line_num

    def key_down(self):
        """move highlight down one line"""
        global L_INFOS
        next_line_num = self.highlight_line_num + 1

        # paging
        if (
            next_line_num == self.window_rows - L_INFOS
            and self.top_line_num + self.window_rows - L_INFOS
            != self.n_out_lines
        ):
            self.top_line_num += 1
            return

        # scroll highlight line
        if (
            self.top_line_num + self.highlight_line_num + 1 != self.n_out_lines
            and self.highlight_line_num != self.window_rows
        ):
            self.highlight_line_num = next_line_num

    def page_down(self):
        """move highlight down one step"""
        for _ in range(4):
            self.key_down()

    def page_up(self):
        """move highlight up one step"""
        for _ in range(4):
            self.key_up()


class LogWindow:
    """Curses window to print the highlighted log line"""

    def __init__(self, screen, text, start_line):
        curses.initscr()
        self.screen = screen
        self.content = text
        self.start_line = start_line
        self.window_rows = curses.LINES
        self.window_columns = curses.COLS

        # logical window for line texts
        # self.window = curses.newwin(10, size[1] - 2, self.start_line, 1)
        self.window = curses.newwin(
            self.window_rows, self.window_columns, self.start_line, 0
        )
        self.window.addstr(0, 0, text)

    def show(self):
        """print log line"""
        self.window.refresh()

    def update(self, text):
        """update the text content"""
        self.content = text
        self.window.clear()
        self.window.addstr(0, 0, text)

    def clear(self):
        """update the text content"""
        self.window.erase()
        self.window.clear()


class Debugger:
    """
    A helper class that queries a predefined database and prints the fetched
    data as a debugging log.

    Attributes:
        callIdLogs: list of str
            A list containing the log findings for the specified call Id
    Output
        The output of this class instance is generated by the __str__ method.
    """

    def __init__(self, callIdLog):
        self.callIdLog = " ".join(callIdLog)
        self.debugQueries = [
            {
                "logline": "Load peer preferences for peer host {peerHostId}",
                "query": {
                    "requests": [
                        """select s.id, s.peer_host_id, p.attribute,
                                  s.value, s.modify_timestamp
                           from provisioning.voip_peer_preferences s,
                                provisioning.voip_preferences p
                           where p.id = s.attribute_id and
                                 s.peer_host_id = '{peerHostId}';""",
                    ],
                    "responses": [],
                },
            },
            {
                "logline": "Checking PSTN peer group {peerGroupId}",
                "query": {
                    "requests": [
                        """select *
                           from provisioning.voip_peer_groups
                           where id = '{peerGroupId}';""",
                    ],
                    "responses": [],
                },
            },
            {
                "logline": "Checking PSTN peer group {peerGroupId}",
                "query": {
                    "requests": [
                        """select *
                           from provisioning.voip_peer_hosts
                           where group_id = '{peerGroupId}';""",
                    ],
                    "responses": [],
                },
            },
            {
                "logline": "Callee is local, uuid='«{subscriberId}»'",
                "query": {
                    "requests": [
                        """select *
                           from provisioning.voip_subscribers
                           where uuid = '{subscriberId}';""",
                    ],
                    "responses": [],
                },
            },
            {
                "logline": "Load callee preferences for uuid {subscriberId}",
                "query": {
                    "requests": [
                        """select s.id as sub_id, s.username,
                                  p.attribute, up.value, up.modify_timestamp
                           from provisioning.voip_subscribers s,
                                provisioning.voip_usr_preferences up,
                                provisioning.voip_preferences p
                           where p.id = up.attribute_id and
                                 s.id = up.subscriber_id and
                                 s.uuid = '{subscriberId}';""",
                    ],
                    "responses": [],
                },
            },
            {
                "logline": "Load caller preferences for uuid {subscriberId}",
                "query": {
                    "requests": [
                        """select *
                           from provisioning.voip_subscribers
                           where uuid = '{subscriberId}';""",
                        """select s.id as sub_id, s.username,
                                  p.attribute, up.value, up.modify_timestamp
                           from provisioning.voip_subscribers s,
                                provisioning.voip_usr_preferences up,
                                provisioning.voip_preferences p
                           where p.id = up.attribute_id and
                                 s.id = up.subscriber_id and
                                 s.uuid = '{subscriberId}';""",
                    ],
                    "responses": [],
                },
            },
            {
                "logline": "domain preferences",
                "query": {
                    "requests": [
                        """select d.id as domain_id, d.domain,
                                  p.attribute, dp.value, dp.modify_timestamp
                           from provisioning.voip_domains d,
                                provisioning.voip_dom_preferences dp,
                                provisioning.voip_preferences p
                           where p.id = dp.attribute_id and
                                 d.id = dp.domain_id and
                                 d.domain = '192.168.1.154';""",
                    ],
                    "responses": [],
                },
            },
            {
                "logline": (
                    "Load caller/callee contract preferences for "
                    "contract id '«{contractId}»'"
                ),
                "query": {
                    "requests": [
                        """select cp.id as contract_id,
                                  p.attribute, cp.value, cp.modify_timestamp
                           from provisioning.voip_contract_preferences cp,
                                provisioning.voip_preferences p
                           where p.id = cp.attribute_id and
                                 cp.contract_id = '{contractId}';""",
                    ],
                    "responses": [],
                },
            },
            {
                "logline": (
                    "Load caller/callee reseller preferences for "
                    "reseller id '«{resellerId}»'"
                ),
                "query": {
                    "requests": [
                        """select r.id as reseller_id, r.name,
                                  p.attribute, rp.value, rp.modify_timestamp
                           from billing.resellers r,
                                provisioning.voip_reseller_preferences rp,
                                provisioning.voip_preferences p
                           where p.id = rp.attribute_id and
                                 r.id = rp.reseller_id and
                                 r.id = '{resellerId}';""",
                    ],
                    "responses": [],
                },
            },
            {
                "logline": (
                    "Applying $var(dpid_desc) rewrite rules on "
                    "$var(dp_input_desc) using dpid {dpId}"
                ),
                "query": {
                    "requests": [
                        """select s.id as set_id, s.name, s.description,
                                  r.id, r.match_pattern, r.replace_pattern,
                                  r.description, r.direction, r.field,
                                  r.priority
                           from provisioning.voip_rewrite_rule_sets s,
                                provisioning.voip_rewrite_rules r
                           where (s.caller_in_dpid = '{dpId}' or
                                  s.callee_in_dpid = '{dpId}' or
                                  s.caller_out_dpid = '{dpId}' or
                                  s.callee_out_dpid = '{dpId}' or
                                  s.caller_lnp_dpid = '{dpId}' or
                                  s.callee_lnp_dpid = '{dpId}') and
                                 s.id = r.set_id and
                                 r.enabled = 1
                           order by direction, field, priority;""",
                    ],
                    "responses": [],
                },
            },
            {
                "logline": (
                    "Loaded CF for map_id=0 type={typeId} uuid={subscriberId}"
                ),
                "query": {
                    "requests": [
                        """SELECT id, cf_type, uuid,
                                  source_name, source_mode,
                                  source_is_regex, source,
                                  destination_name, destination,
                                  priority, timeout, announcement_id,
                                  bnumber_name, bnumber_mode, bnumber_is_regex,
                                  bnumber, time_name,
                                  concat(if(year is null, '',
                                            concat('yr{{', year, '}} ')),
                                         if(month is null, '',
                                            concat('mo{{', month, '}} ')),
                                         if(mday is null, '',
                                            concat('md{{', mday, '}} ')),
                                         if(wday is null, '',
                                            concat('wd{{', wday, '}} ')),
                                         if(hour is null, '',
                                            concat('hr{{', hour, '}} ')),
                                         if(minute is null, '',
                                            concat('min{{', minute, '}} ')))
                                  as period
                           FROM provisioning.v_subscriber_cfs
                           WHERE cf_type = '{typeId}' AND
                                 uuid = '{subscriberId}'
                           ORDER BY priority ASC, id ASC, destination ASC;""",
                    ],
                    "responses": [],
                },
            },
        ]

        self.dbConfig = {
            "host": "",
            "user": "",
            "password": "",
            "database": "",
        }
        self.queryIds = {
            "subscriber": None,
            "peerHost": None,
            "peerGroup": None,
            "dp": None,
            "reseller": None,
            "contract": None,
            "type": None,
        }
        self.__get_query_ids()
        self.__load_database_configuration()
        self.__fetch_query_responses()

    def __str__(self):
        return self.__stringify_debug_object()

    def __get_query_ids(self):
        reIds = [
            RE_SUBSCRIBER_UUID,
            RE_PEER_HOST_ID,
            RE_PEER_GROUP_ID,
            RE_DP_ID,
            RE_RESELLER_ID,
            RE_CONTRACT_ID,
            RE_TYPE_ID,
        ]
        for index, reId in enumerate(reIds):
            uri_match = re.search(reId, self.callIdLog)
            if uri_match:
                idKey = list(self.queryIds)[index]
                self.queryIds[idKey] = uri_match.group(1)

    def __load_database_configuration(self):
        with open(CONFIG_FILE, "r", encoding="UTF-8") as configuration:
            for line in configuration:
                if "mysql_host" in line:
                    self.dbConfig["host"] = line.rstrip().split("=")[-1]
                elif "mysql_user" in line:
                    self.dbConfig["user"] = line.rstrip().split("=")[-1]
                elif "mysql_password" in line:
                    self.dbConfig["password"] = line.rstrip().split("=")[-1]
                elif "mysql_database" in line:
                    self.dbConfig["database"] = line.rstrip().split("=")[-1]
                else:
                    continue

    def __open_database_connection(self):
        return pymysql.connect(
            host=self.dbConfig["host"],
            user=self.dbConfig["user"],
            password=self.dbConfig["password"],
            database=self.dbConfig["database"],
            cursorclass=pymysql.cursors.DictCursor,
        )

    @staticmethod
    def __stringify_database_response(response):
        if len(response) > 0:
            return tabulate(response, headers="keys", tablefmt="pretty")
        else:
            return "NO RECORDS FOUND"

    def __query_database(self, query):
        try:
            connection = self.__open_database_connection()
        except pymysql.Error as e:
            sys.tracebacklimit = 0
            print(f"ERROR: {str(e.args[1])}")
        finally:
            with connection:
                with connection.cursor() as cursor:
                    cursor.execute(query)
                    response = cursor.fetchall()
                    return self.__stringify_database_response(response)

    def __format_query_string(self, obj):
        return obj.format(
            subscriberId=self.queryIds["subscriber"],
            peerHostId=self.queryIds["peerHost"],
            peerGroupId=self.queryIds["peerGroup"],
            dpId=self.queryIds["dp"],
            resellerId=self.queryIds["reseller"],
            contractId=self.queryIds["contract"],
            typeId=self.queryIds["type"],
        )

    def __format_debug_queries(self, obj):
        if isinstance(obj, str):
            return self.__format_query_string(obj)
        if isinstance(obj, dict):
            for k, v in obj.items():
                obj[k] = self.__format_debug_queries(v)
        if isinstance(obj, list):
            for i, item in enumerate(obj):
                obj[i] = self.__format_debug_queries(item)

        return obj

    def __fetch_query_responses(self):
        self.__format_debug_queries(self.debugQueries)
        for queryObj in self.debugQueries:
            for request in queryObj["query"]["requests"]:
                queryObj["query"]["responses"].append(
                    self.__query_database(request)
                )
        return self.debugQueries

    def __stringify_debug_object(self):
        output = ""
        for queryObj in self.debugQueries:
            if queryObj["logline"].find("None") != -1:
                pass
            else:
                output += ">>> " + queryObj["logline"] + "\n"
                for response in queryObj["query"]["responses"]:
                    output += response + "\n"
                output += "\n"
        return output


def ssh_check_file(locations, file_path):
    lines = []
    command = "[[ -f {} ]] && printf ok".format(file_path)
    for location in locations:
        node_types = get_type_node([location]) or []
        if any([True for x in node_types if x in file_path]):
            ssh.connect(location, pkey=key, timeout=2)
            _, ssh_stdout, _ = ssh.exec_command(command)
            receive_status = ssh_stdout.channel.recv_exit_status()
            lines = ssh_stdout.readlines()
            ssh.close()
    return len(lines) > 0 and lines[0] == "ok"


def ssh_get_nb_lines(locations, file_path):
    remote_file = []
    try:
        for location in locations:
            ssh.connect(location, pkey=key, timeout=2)
            ssh_stdout = None
            _, ssh_stdout, _ = ssh.exec_command("wc -l < {}".format(file_path))
            remote_file += ssh_stdout
            ssh.close()
        return max([int(t) for t in remote_file])
    except paramiko.ssh_exception.AuthenticationException:
        print("use a valid private key")
    except paramiko.ssh_exception.SSHException:
        print("use a valid public key")


def ssh_get_file_size(locations, file_path):
    remote_file = []
    try:
        for location in locations:
            ssh.connect(location, pkey=key, timeout=2)
            ssh_stdout = None
            _, ssh_stdout, _ = ssh.exec_command("wc -c < {}".format(file_path))
            remote_file += ssh_stdout
            ssh.close()
        return remote_file
    except paramiko.ssh_exception.AuthenticationException:
        print("use a valid private key")


def ssh_get_file(
    locations, file_path, callid="", start=0, size=-1, max_nb_line=-1
):
    remote_file = []
    try:
        for location in locations:
            ssh.connect(location, pkey=key, timeout=2)
            ssh_stdout = None
            command = ""
            if len(callid) > 0:
                if file_path.endswith(".gz"):
                    command = (
                        "tfile=$(mktemp) && gzip --keep -c -d {} > $tfile &&"
                        " grep $tfile -e '{}'".format(file_path, callid)
                    )
                elif (
                    not location == "localhost"
                    and size < 0
                    and max_nb_line < 0
                ):
                    command = (
                        "grep {} -e '{}' | sed -e 's/127.0.0.1/{}/'".format(
                            file_path, callid, location
                        )
                    )
                elif size >= 0:
                    command = (
                        "grep {} -e '{}' | tail -c $(( $(wc -c < {}) - {} )) |"
                        " head -c {}".format(
                            file_path, callid, file_path, start, size
                        )
                    )
                elif max_nb_line >= 0:
                    command = "grep {} -e '{}' | tail -n+{} | head -{}".format(
                        file_path, callid, start, max_nb_line
                    )
                else:
                    command = "grep {} -e '{}'".format(file_path, callid)
            else:
                command = "cat {}".format(file_path)
            _, ssh_stdout, _ = ssh.exec_command(command)
            remote_file += ssh_stdout
            ssh.close()
        return remote_file
    except paramiko.ssh_exception.AuthenticationException:
        print("use a valid private key")


def get_remote_nodes():
    with open("/etc/default/ngcp-roles") as fp:
        for line in fp:
            if "NGCP_NEIGHBOURS=" in line:
                neighbours = (
                    line.split("=")[1].strip().replace('"', "").split(" ")
                )
    return neighbours


def get_type_node(location):
    try:
        roles = ssh_get_file(location, "/etc/default/ngcp-roles")
        return [
            x.replace("NGCP_IS_", "")
            .split("=")[0]
            .lower()
            .replace("mgmt", "websocket")
            for x in roles
            if "NGCP_IS_" in x and "yes" in x
        ]
    except paramiko.ssh_exception.SSHException:
        print(" /!\\ Couldn't get roles on node {} ".format(location))


def load_ips_ports(verbose, config):
    """Load the current ips and ports of system services"""
    global ports_ranges

    ports_ranges = {
        "b2b_port_min": 0,
        "b2b_port_max": 0,
        "asterisk_port_min": 0,
        "asterisk_port_max": 0,
        "rtp_port_min": 0,
        "rtp_port_max": 0,
    }
    config_file = CONFIG_FILE if not config else config

    if not os.path.isfile(config_file):
        print("File %s does not exist" % config)
        sys.exit(2)

    with open(config_file, "r", encoding="UTF-8") as configuration:
        for line in configuration:
            if line.startswith("#"):
                continue
            line = line.rstrip()
            if line:
                name, address = line.split("=")
                system_ip[address] = name

                for limit in ports_limits:
                    if name == limit:
                        ports_ranges[limit] = address

    if verbose:
        print("ngcp-log-flow > System IPs and Ports: %s" % system_ip)


def prepare_file(verbose, service, date, custom_file, call_id):
    """Find and open the log file"""
    compressed = False
    service = service.replace("websocket", "ws")
    if service == "proxy":
        service_log = PRX_LOG
    elif service == "ws":
        service_log = WS_LOG
    elif service == "rtp":
        service_log = RTP_LOG
    else:
        service_log = LB_LOG

    if custom_file:
        if not os.path.isfile(custom_file):
            if "/" not in custom_file:
                sys.exit(
                    f"ERROR: Please provide an absolute path for "
                    f"the custom file: {custom_file}"
                )
            sys.exit(f"ERROR: File {custom_file} does not exist")
        file_path = custom_file
    else:
        if date:
            # Name now contains an unknown datetime that prevents to know
            # directly the name for the file
            file_paths = glob.glob(
                "%s/%s-%s-*.gz" % (NGCP_OLD_LOGS_DIR, service_log, date)
            )
            file_path = next(iter(file_paths), "")
        else:
            file_path = "%s/%s" % (NGCP_LOGS_DIR, service_log)

    if not custom_file and not ssh_check_file(
        file_locations[service_log], file_path
    ):
        print("File %s does not exist" % file_path)
        sys.exit(2)

    if any(
        [x == "localhost" for x in file_locations[service_log]]
    ) and file_path.endswith(".gz"):
        file_path = decompress_gzip(
            file_locations[service_log], file_path, call_id
        )
        file_locations.update({f"{file_path.split('/')[-1]}": ["localhost"]})
        compressed = True
        if verbose:
            print("ngcp-log-flow > Extracted gzip in tmp file %s" % file_path)

    return file_path, compressed


def decompress_gzip(locations, compressed_file, call_id):
    """Decompress the gzip file before proceed"""
    try:
        file_descriptor, text_file = tempfile.mkstemp()
    except IOError:
        print("Not able to open a temporary file")
        sys.exit(5)

    try:
        f_in = ssh_get_file(locations, compressed_file, callid=call_id)
        f_in = io.BytesIO(bytes("\n".join(f_in), "utf-8"))
        with os.fdopen(file_descriptor, "wb") as f_out:
            shutil.copyfileobj(f_in, f_out)
    except (IOError, KeyboardInterrupt):
        if os.path.isfile(text_file):
            os.remove(text_file)
        sys.exit(5)

    return text_file


def getchuncks_file(file_object, size=CHUNK_SIZE):
    while True:
        start = file_object.tell()
        file_object.seek(size, 1)
        line = file_object.readline()
        yield start, file_object.tell() - start
        if not line:
            break


def getchunks(file_to_use, size=CHUNK_SIZE, callid=""):
    """Split the file in chunks of size CHUNK_SIZE"""
    locations = file_locations[file_to_use.split("/")[-1]]

    try:
        file_object = ssh_get_file(locations, file_to_use, callid)
        file_object = io.BytesIO(bytes("\n".join(file_object), "utf-8"))
        yield from getchuncks_file(file_object, size)
    except Exception:
        print(" >err>> getchunks {}".format(file_to_use))


def parser_process_file(file_object, call_id, temp_list, verbose, ret_obj):
    for line in file_object.readlines():
        if isinstance(line, bytes):
            line = line.decode()
        if call_id in line:
            temp_list.append(line)
            # the line contains a callid so
            # I pass the line to CallidsBGColors
            # for callid extraction and storing
            CallidsBGColors.add_call_id_to_set(line)
    if verbose:
        print("ngcp-log-flow > Process concluded")

    ret_obj["temp_list"] = temp_list
    ret_obj["callid_set"] = CallidsBGColors.callIdSet
    return ret_obj


def parser_process(verbose, file_to_use, chunk, call_id):
    """Read all call id entries from file"""
    # return dict with lines to print and callids found
    ret_obj = {"temp_list": [], "callid_set": []}

    if verbose:
        print("ngcp-log-flow > Process started")
    temp_list = []

    location = file_locations[file_to_use.split("/")[-1]]

    try:
        file_object = ssh_get_file(
            location, file_to_use, call_id, start=chunk[0], size=CHUNK_SIZE
        )
        file_object = io.BytesIO(bytes("\n".join(file_object), "utf-8"))
        return parser_process_file(
            file_object, call_id, temp_list, verbose, ret_obj
        )
    except Exception:
        print(" >err>> parser_process {} {}".format(file_to_use, chunk))


def parser_process_lines(verbose, file_to_use, chunk, call_id, service_file):
    """Read all call id entries from file"""
    # return dict with lines to print and callids found
    ret_obj = {"temp_list": [], "callid_set": []}

    if verbose:
        print("ngcp-log-flow > Process started")
    temp_list = []

    if not service_file:
        service_file = file_to_use.split("/")[-1]

    location = file_locations[service_file]

    try:
        file_object = ssh_get_file(
            location,
            file_to_use,
            call_id,
            start=chunk[0],
            max_nb_line=NB_LINES,
        )
        file_object = io.BytesIO(bytes("\n".join(file_object), "utf-8"))
        return parser_process_file(
            file_object, call_id, temp_list, verbose, ret_obj
        )
    except Exception:
        print(" >err>> parser_process {} {}".format(file_to_use, chunk))


def uri_parse(uri, dictionary, ips):
    """Parse the uri"""
    address_check = ""
    uri = uri.strip(",").strip("'")
    uri = uri.replace("«", "")
    uri = uri.replace("»", "")

    # Look for an IPv4 address and Look for an IPv6 address
    # It is possible that the IPv6 is written with a different notation
    # probably better to use a library that convert them in the same format
    uri_match = re.search(RE_URI_PORTv4, uri) or re.search(RE_URI_PORTv6, uri)
    if uri_match:
        dictionary["ip"] = uri_match.group(1)
        dictionary["port"] = uri_match.group(2)
        if COMPACT:
            address_check = uri_match.group(2)
        else:
            address_check = uri_match.group(1) + ":" + uri_match.group(2)
    else:
        uri_match = re.search(RE_URI_IPv4, uri) or re.search(RE_URI_IPv6, uri)
        if uri_match:
            dictionary["ip"] = uri_match.group(1)
            dictionary["port"] = "5060"
            address_check = "5060" if COMPACT else uri_match.group(1) + ":5060"

    if address_check not in ips:
        ips.append(address_check)


def log_parser(admin, _service, callid_findings, print_log, debug, call_id):
    """Parse the log line"""
    log_dictionary = []
    involved_ips = []
    line_media_prot = {"media": "", "protocole": ""}

    for line in callid_findings:
        service = next(
            iter([x for x in list_services if line.find(x) == 0]), _service
        )
        if service in list_services and line.find(service) == 0:
            line = line[len(service) + 1 :]

        if print_log:
            if admin:
                print(line)
            else:
                print(GDPR_DATA.sub("XXXX", line))

        # Prepare the dictionary
        line_dict = {
            "date": "",
            "method": "",
            "code": "",
            "from": "",
            "to": "",
            "from_ip": {
                "protocol": "",
                "ip": "",
                "port": "",
            },
            "to_ip": {
                "protocol": "",
                "ip": "",
                "port": "",
            },
            "du": {
                "protocol": "",
                "ip": "",
                "port": "",
            },
            "fs": {
                "protocol": "",
                "ip": "",
                "port": "",
            },
            "relaying": False,
            "reply": False,
            "full_line": line,
            "media": "",
            "protocole": "",
            "packets": "",
            "errors": "",
            "_service": service,
        }

        # Kamailo Proxy or Kamailio LB
        if service in ("proxy", "lb"):
            # Skip all the lines that are not NOTICE because not interesting
            if "NOTICE:" not in line:
                continue

            # Date
            match = re.search(RE_DATE, line)
            if match:
                line_dict["date"] = match.group(1)

            # Method
            match = re.search(RE_M, line)
            if match:
                line_dict["method"] = match.group(1)

            # Code
            match = re.search(RE_S, line)
            if match:
                line_dict["code"] = match.group(1)

            # Relaying
            if "Relaying" in line:
                line_dict["relaying"] = True

            # Sending reply
            if "Sending reply" in line:
                line_dict["relaying"] = True

            # Reply
            if "Reply" in line:
                line_dict["reply"] = True

            # source IP
            if service == "proxy":
                match = re.search(RE_PRX_IP, line)
            else:
                match = re.search(RE_LB_IP, line)
            if match:
                uri_parse(match.group(1), line_dict["from_ip"], involved_ips)

            # source URI
            match = re.search(RE_FS, line)
            if match:
                uri_parse(match.group(1), line_dict["from_ip"], involved_ips)

            # destination IP
            match = re.search(RE_DESTIP, line)
            if match:
                uri_parse(match.group(1), line_dict["to_ip"], involved_ips)

            # destination URI
            match = re.search(RE_DU, line)
            if match:
                uri_parse(match.group(1), line_dict["to_ip"], involved_ips)

        # RTPEngine
        elif service == "rtp":
            # Skip all the lines that are not interesting
            if (
                (", SSRC " not in line and " Media #" not in line)
                or ("(RTCP)" in line)
                or (", SSRC «0»," in line)
            ):
                continue

            # Date
            match = re.search(RE_DATE, line)
            if match:
                line_dict["date"] = match.group(1)

            # source IP
            match = re.search(RE_CALLER_IP, line)
            if match:
                uri_parse(match.group(1), line_dict["from_ip"], involved_ips)
                line_dict["media"] = line_media_prot["media"]
                line_media_prot["media"] = ""
                line_dict["protocole"] = line_media_prot["protocole"]
                line_media_prot["protocole"] = ""

            # source URI
            match = re.search(RE_FS, line)
            if match:
                uri_parse(match.group(1), line_dict["fs"], involved_ips)

            # dest IP
            match = re.search(RE_CALLEE_IP, line)
            if match:
                uri_parse(match.group(1), line_dict["to_ip"], involved_ips)

            # destination URI
            match = re.search(RE_DU, line)
            if match:
                uri_parse(match.group(1), line_dict["du"], involved_ips)

            # media
            match = re.search(RE_MEDIA, line)
            if match:
                line_media_prot["media"] = match.group(1)

            # protocole
            match = re.search(RE_PROTOCOLE, line)
            if match:
                line_media_prot["protocole"] = match.group(1)

            # packets and errors number
            line1 = line.replace("», SSRC", "*, SSRC")
            match = re.search(RE_PACKETS, line1)
            if match:
                line_dict["packets"] = match.group(1)

            match = re.search(RE_ERRORS, line1)
            if match:
                line_dict["errors"] = match.group(1)

        # WebSocket
        elif service == "ws":
            if (
                ": debug " in line
                or "Remove completed call" in line
                or "info  Route event: " in line
                or "info  Discard " in line
                or "info  Request " in line
            ):
                continue

            # Date
            match = re.search(RE_DATE, line)
            if match:
                line_dict["date"] = match.group(1)

            # Code
            match = re.search(RE_WS_CODE, line)
            if match:
                line_dict["code"] = match.group(1)

            # Relaying
            if "Relaying" in line:
                line_dict["relaying"] = True

            # Sending reply
            if "Sending reply" in line:
                line_dict["relaying"] = True

            # Reply
            if "Reply" in line:
                line_dict["reply"] = True
                match = re.search(RE_WS_NAME, line)
            elif (
                "info  CSTA XML request" in line or "Dispatch to Redis" in line
            ):
                match = re.search(RE_WS_NAME, line)
            else:
                match = re.search(RE_WS_STATE_STATUS, line)

            # Method
            if match:
                line_dict["method"] = match.group(1)

            # Source
            match = re.search(RE_WS_CHANNEL, line)
            if match:
                line_dict["from"] = match.group(1)
            elif "info  CSTA XML request" in line:
                match = re.search(RE_WS_DEVICE, line)
                if match:
                    line_dict["from"] = match.group(1)
                else:
                    line_dict["from"] = "3PCC"
            else:
                line_dict["from"] = "WebSocket"

            match = re.search(RE_WS_FROM, line)
            if match:
                line_dict["from"] = match.group(1)

            # Destination
            match = re.search(RE_WS_TO, line)
            if match:
                line_dict["to"] = match.group(1)

            if "Dispatch to Redis" in line:
                match = re.search(RE_WS_QUEUE, line)
            else:
                match = re.search(RE_WS_TO, line)

            if match:
                line_dict["to"] = match.group(1)
            elif "info  Reply:" in line:
                match = re.search(RE_WS_DEVICE, line)
                if match:
                    line_dict["to"] = match.group(1)
                else:
                    line_dict["to"] = "3PCC"
            else:
                line_dict["to"] = "WebSocket"

            if line_dict["from"] not in involved_ips:
                involved_ips.append(line_dict["from"])

            if line_dict["to"] not in involved_ips:
                involved_ips.append(line_dict["to"])

        is_empty = (
            not line_dict["from_ip"]["ip"] and not line_dict["to_ip"]["ip"]
        )
        is_empty = (
            not line_dict["du"]["ip"]
            and not line_dict["fs"]["ip"]
            and is_empty
        )
        is_empty = not line_dict["from"] and not line_dict["to"] and is_empty

        if is_empty:
            continue

        log_dictionary.append(line_dict)

    if print_log and debug:
        print("\n\nFetching Debugging Logs\n")
        print(Debugger(callid_findings))

    if print_log:
        print("\n\n")

    return log_dictionary, involved_ips


def process_data(_service, verbose, log_parser_result, involved_ips):
    """Process the log information"""
    if verbose:
        print("Processing data")

    output_lines = []
    last_info = ""

    for element in log_parser_result:

        service = element["_service"] or _service

        temp_line = {
            "datetime": "",
            "code": "",
            "flow": [],
            "media": [],
            "protocole": [],
            "packets": [],
            "errors": [],
            "callId": "",
            "_service": service,
        }

        if COMPACT:
            ip_port_from = "{}".format(element["from_ip"]["port"])
            ip_port_to = "{}".format(element["to_ip"]["port"])
            ip_port_fs = "{}".format(element["fs"]["port"])
            ip_port_du = "{}".format(element["du"]["port"])
        else:
            ip_port_from = "{}:{}".format(
                element["from_ip"]["ip"], element["from_ip"]["port"]
            )
            ip_port_to = "{}:{}".format(
                element["to_ip"]["ip"], element["to_ip"]["port"]
            )
            ip_port_fs = "{}:{}".format(
                element["fs"]["ip"], element["fs"]["port"]
            )
            ip_port_du = "{}:{}".format(
                element["du"]["ip"], element["du"]["port"]
            )

        src_find = False
        dest_find = False
        current_line = []

        for address in involved_ips:
            if address in (
                ip_port_from,
                ip_port_fs,
                element["from"],
            ) and address in (
                ip_port_to,
                ip_port_du,
                element["to"],
            ):
                src_find = True
                dest_find = True
                current_line.append(LOCAL_FLOW)
            elif address in (ip_port_from, ip_port_fs, element["from"]):
                src_find = True
                if dest_find:
                    current_line.append(STOP_LEFT)
                else:
                    if service == "rtp":
                        current_line.append(ARROW_RIGHT)
                    else:
                        current_line.append(STOP_RIGHT)
            elif address in (ip_port_to, ip_port_du, element["to"]):
                dest_find = True
                if src_find:
                    current_line.append(ARROW_LEFT)
                else:
                    current_line.append(ARROW_RIGHT)
            else:
                if (src_find and dest_find) or (
                    not src_find and not dest_find
                ):
                    current_line.append(TAG_FLOW)
                else:
                    current_line.append(FULL_FLOW)

        if src_find and dest_find:
            # OK
            temp_line["flow"] = current_line
        elif not src_find and not dest_find:
            # If both source and destination are missing
            temp_line["flow"] = ["Missing all info"]
        elif not src_find:
            # If source is missing
            temp_line["flow"] = ["Missing source info"]
        elif not dest_find:
            # If destination is missing
            temp_line["flow"] = ["Missing destination info"]

        temp_line["datetime"] = element["date"]

        if element["code"] != "":
            temp_line["code"] = element["code"]
        elif element["method"] != "":
            temp_line["code"] = element["method"]
        elif element["relaying"] and last_info != "":
            temp_line["code"] = last_info
        last_info = temp_line["code"]
        temp_line["method"] = element["method"]

        # added for RTP Engine service
        if service == "rtp":
            element["full_line"] = element["full_line"].replace(
                "INFO: [", "ID="
            )
        if service == "rtp":
            if (element["packets"] != "") or (element["errors"] != ""):
                temp_line["flow"] += ["media"]
                temp_line["flow"] += (": ", element["media"])
                temp_line["flow"] += [", prot"]
                temp_line["flow"] += (": ", element["protocole"])
                temp_line["flow"] += [", packets"]
                temp_line["flow"] += (": ", element["packets"])
                temp_line["flow"] += [", errors"]
                temp_line["flow"] += (": ", element["errors"])

            if temp_line["flow"] == []:
                temp_line = ""
                continue

        # add callId to the structure - needed for coloring BG
        temp_line["callId"] = CallidsBGColors.get_call_id_from_log_line(
            element["full_line"]
        )

        output_lines.append(temp_line)

    return output_lines


def interactive_screen(
    admin, _service, output_lines, involved_ips, log_parser_result
):
    """Generates the interactive screen"""
    global L_INFOS
    # get the curses screen window
    screen = curses.initscr()

    # current screen size - rows: size[0], columns: size[1]
    screen_size = screen.getmaxyx()

    # prepare colors
    curses.start_color()

    # Hilight color is YELLOW
    curses.init_pair(1, curses.COLOR_YELLOW, curses.COLOR_BLACK)

    curses.init_pair(2, curses.COLOR_MAGENTA, curses.COLOR_BLACK)

    # colors for SIP requests and responses

    # error responses color in interactive mode
    curses.init_pair(5, curses.COLOR_RED, curses.COLOR_BLACK)

    # provisional responses color in interactive mode
    curses.init_pair(4, curses.COLOR_CYAN, curses.COLOR_BLACK)

    # ok responses color in interactive mode
    curses.init_pair(3, curses.COLOR_GREEN, curses.COLOR_BLACK)

    # turn off input echoing
    curses.noecho()

    # make cursor invisible
    curses.curs_set(0)

    # respond to keys immediately (don't wait for enter)
    curses.cbreak()

    # map arrow keys to special values
    screen.keypad(True)

    # preparing flow strings to print
    print_lines = []

    # data for colorizing and parse the lines
    print_lines_infos = []

    for output_line in output_lines:
        service = output_line["_service"] or _service
        flow_line = output_line["datetime"].center(SPAN_DATE, " ")

        flow_line += " ".center(SPAN_CODE, " ")

        full_flow_line = ""
        for element in output_line["flow"]:
            if service == "rtp":
                full_flow_line += element
            else:
                full_flow_line += element.center(SPAN_FLOW, " ")

        retobj = insert_flowline_info(
            service, full_flow_line, output_line, False
        )

        flow_line += retobj["new_flow_line"]

        print_lines.append(flow_line)
        print_lines_infos.append(retobj)

        if len(flow_line) > (screen_size[1] - 2):
            # shut down cleanly
            curses.nocbreak()
            screen.keypad(0)
            curses.echo()
            curses.endwin()
            print("\nError: screen size too small to print interactive flow")
            print("Try removing '-i' from the executed command\n")
            sys.exit(6)

    log_num = 0
    try:
        # print header and footer
        print_out_interactive_header(admin, service, involved_ips, screen, 1)
        screen.refresh()

        # initialize and print the initial flow
        flow_window = FlowWindow(screen, print_lines, 4)

        # information for colorizing
        flow_window.line_infos = print_lines_infos

        flow_window.show()

        # initialize and print the initial log line
        if admin or service == "ws":
            log_to_print = log_parser_result[log_num]["full_line"]
        else:
            log_to_print = GDPR_DATA.sub(
                "XXXX", log_parser_result[log_num]["full_line"]
            )

        start_line = curses.LINES - L_INFOS_WINDOW
        log_window = LogWindow(screen, log_to_print, start_line)
        log_window.show()

        print_out_interactive_footer(screen)

        while True:
            char = screen.getch()
            if char == ord("q"):
                break

            if char == curses.KEY_UP:
                if log_num > 0:
                    log_num -= 1
                flow_window.key_up()
                flow_window.show()
                if admin or service == "ws":
                    log_to_print = log_parser_result[log_num]["full_line"]
                else:
                    log_to_print = GDPR_DATA.sub(
                        "XXXX", log_parser_result[log_num]["full_line"]
                    )
                log_window.update(log_to_print)
                log_window.show()
                print_out_interactive_footer(screen)

            elif char == curses.KEY_DOWN:
                if log_num < len(log_parser_result) - 1:
                    log_num += 1
                flow_window.key_down()
                flow_window.show()
                if admin or service == "ws":
                    log_to_print = log_parser_result[log_num]["full_line"]
                else:
                    log_to_print = GDPR_DATA.sub(
                        "XXXX", log_parser_result[log_num]["full_line"]
                    )
                log_window.update(log_to_print)
                log_window.show()
                print_out_interactive_footer(screen)

            # manage page down keypress
            elif char == curses.KEY_NPAGE:
                step = 4
                if log_num < len(log_parser_result) - step:
                    log_num += step
                flow_window.page_down()
                flow_window.show()
                if admin or service == "ws":
                    log_to_print = log_parser_result[log_num]["full_line"]
                else:
                    log_to_print = GDPR_DATA.sub(
                        "XXXX", log_parser_result[log_num]["full_line"]
                    )
                log_window.update(log_to_print)
                log_window.show()
                print_out_interactive_footer(screen)

            # manage page up keypress
            elif char == curses.KEY_PPAGE:
                if log_num > 4:
                    log_num -= 4
                flow_window.page_up()
                flow_window.show()
                if admin or service == "ws":
                    log_to_print = log_parser_result[log_num]["full_line"]
                else:
                    log_to_print = GDPR_DATA.sub(
                        "XXXX", log_parser_result[log_num]["full_line"]
                    )
                log_window.update(log_to_print)
                log_window.show()
                print_out_interactive_footer(screen)

            # manage '9' keypress (to increase log_window size)
            elif char == ord("9"):
                if start_line <= L_EXTRA_LINES:
                    continue
                start_line = start_line - 1
                L_INFOS = L_INFOS + 1
                log_window = LogWindow(screen, log_to_print, start_line)
                log_window.show()
                print_out_interactive_footer(screen)

            # manage '0' keypress (to decrease log_window size)
            elif char == ord("0"):
                if start_line >= curses.LINES:
                    continue
                log_window.clear()
                del log_window
                flow_window.show()
                start_line = start_line + 1
                L_INFOS = L_INFOS - 1
                log_window = LogWindow(screen, log_to_print, start_line)
                log_window.show()
                print_out_interactive_footer(screen)

    except KeyboardInterrupt:
        pass

    finally:
        # shut down cleanly
        curses.nocbreak()
        screen.keypad(0)
        curses.echo()
        curses.endwin()


def print_out_interactive_header(
    admin, service, involved_ips, screen, start_line=0
):
    """print of the header with the service name"""
    service_header = EMPTY_DATE
    service_header += EMPTY_CODE
    global ports_ranges

    for address in involved_ips:
        if address in system_ip:
            service_header += system_ip[address].center(SPAN_FLOW, " ")
            continue

        # determine additional column names using ports ranges
        address1 = ""
        if (address != "") and (":" in address):
            address1, port = address.split(":")

        if (address1 != "") and (address1 in system_ip):
            if port == "":
                service_header += system_ip[address1].center(SPAN_FLOW, " ")
                continue

            else:
                if (int(port) >= int(ports_ranges["b2b_port_min"])) and (
                    int(port) <= int(ports_ranges["b2b_port_max"])
                ):
                    service_header += "b2b_rtp".center(SPAN_FLOW, " ")
                elif (
                    int(port) >= int(ports_ranges["asterisk_port_min"])
                ) and (int(port) <= int(ports_ranges["asterisk_port_max"])):
                    service_header += "asterisk_rtp".center(SPAN_FLOW, " ")
                elif (int(port) >= int(ports_ranges["rtp_port_min"])) and (
                    int(port) <= int(ports_ranges["rtp_port_max"])
                ):
                    service_header += "rtpengine".center(SPAN_FLOW, " ")
                else:
                    service_header += EMPTY_FLOW
        else:
            service_header += EMPTY_FLOW

    screen.addstr(
        start_line, 0, service_header, curses.A_BOLD | curses.color_pair(2)
    )

    # print of the header with the IPs
    ip_header = EMPTY_DATE
    ip_header += EMPTY_CODE
    for address in involved_ips:
        if admin or service == "ws":
            ip_header += address.center(SPAN_FLOW, " ")
        else:
            ip_header += "XXX.XXX.XXX.XXX".center(SPAN_FLOW, " ")
    screen.addstr(
        start_line + 1, 0, ip_header, curses.A_BOLD | curses.color_pair(2)
    )

    # print first call flow line
    flow_line = EMPTY_DATE + " " + EMPTY_CODE
    for _ in involved_ips:
        flow_line += TAG_FLOW
    screen.addstr(start_line + 2, 0, flow_line)


def print_out_interactive_footer(screen):
    """print helper line"""
    screen.addstr(
        (curses.LINES - 1),
        0,
        "'q' Exit | '9' Increase Raw | '0' Decrease Raw",
        curses.A_BOLD,
    )


def print_out_normal(admin, _service, output_lines, involved_ips):
    """Print to screen and exit"""
    # print of the header with the service name
    service_header = BColors.HEADER + EMPTY_DATE
    service_header += EMPTY_CODE
    global ports_ranges

    for address in involved_ips:
        if address in system_ip:
            service_header += system_ip[address].center(SPAN_FLOW, " ")
            continue

        # determine additional column names using ports ranges
        address1 = ""
        if (address != "") and (":" in address):
            address1, port = address.split(":")

        if (address1 != "") and (address1 in system_ip):
            if port == "":
                service_header += system_ip[address1].center(SPAN_FLOW, " ")
                continue

            else:
                if (int(port) >= int(ports_ranges["b2b_port_min"])) and (
                    int(port) <= int(ports_ranges["b2b_port_max"])
                ):
                    service_header += "b2b_rtp".center(SPAN_FLOW, " ")
                elif (
                    int(port) >= int(ports_ranges["asterisk_port_min"])
                ) and (int(port) <= int(ports_ranges["asterisk_port_max"])):
                    service_header += "asterisk_rtp".center(SPAN_FLOW, " ")
                elif (int(port) >= int(ports_ranges["rtp_port_min"])) and (
                    int(port) <= int(ports_ranges["rtp_port_max"])
                ):
                    service_header += "rtpengine".center(SPAN_FLOW, " ")
                else:
                    service_header += EMPTY_FLOW
        else:
            service_header += EMPTY_FLOW

    service_header += BColors.ENDC
    print(service_header)

    # print of the header with the IPs
    ip_header = BColors.HEADER + EMPTY_DATE
    ip_header += EMPTY_CODE
    for address in involved_ips:
        if admin or _service == "ws":
            ip_header += address.center(SPAN_FLOW, " ")
        else:
            ip_header += "XXX.XXX.XXX.XXX".center(SPAN_FLOW, " ")
    ip_header += BColors.ENDC
    print(ip_header)

    # print first call flow line
    flow_line = EMPTY_DATE + EMPTY_CODE
    for _ in involved_ips:
        flow_line += TAG_FLOW
    print(flow_line)

    # print of the call flow
    for line in output_lines:
        service = line["_service"] or _service

        flow_line = line["datetime"].ljust(SPAN_DATE, " ")
        flow_line += "".ljust(SPAN_CODE, " ")

        # build flow line into tmp variable
        full_flow_line = "".join(line["flow"])
        retobj = insert_flowline_info(service, full_flow_line, line, True)
        flow_line += retobj["new_flow_line"]
        print(flow_line)

    # print last call flow line
    flow_line = EMPTY_DATE + EMPTY_CODE
    for _ in involved_ips:
        flow_line += TAG_FLOW
    print(flow_line)


def insert_flowline_info(service, flow_line, line, colorize):
    """
    FUNCTION: insert_flowline_info
    This function parses a text flow line entry in this format:
        ' Jan  8 08:02:17                    \
            |<------------------|                    |          '
    parses it an returns something like
    ' Jan  8 08:02:17                    \
        |<     100 Trying    |                    |          '

    In other words,
    it replaces the flow line with a centered description containing:
    - the sip method (if the text flow line entry refers to a SIP Request)
    - the sip response (if the text flow line entry refers to a SIP Response)
    In the latter case a description of the response is added.

    Input :
        - flow_line to analyze
        - flow_information (can be a sip requesto or sip response code)
        - colorize (boolean) : if true return new string with ansi colors
          inside

    Output :
        - dictionary with new line infos
    """
    reto_bj = {
        "new_flow_line": "",
        "start_replace": -1,
        "end_replace": -1,
        "color_pair": 0,
        "bgColor": "",
        "fgColor": 0,
    }

    flow_information = line["code"]

    # calculate callid and if we want to colorize
    call_id = line["callId"]
    bg_color_ansi_string = CallidsBGColors.find_call_id_bg_color(call_id)
    reto_bj["bgColor"] = bg_color_ansi_string

    space_available = flow_line.count(HORIZONTAL_LINE)
    if space_available == 0:
        reto_bj["new_flow_line"] = flow_line
        return reto_bj

    # check if It is a sip response
    if flow_information[:3].isnumeric():
        if flow_information[0] == "2":
            color = BColors.RESPCOLOR_OK
            reto_bj["color_pair"] = 3
            reto_bj["fgColor"] = CallidsBGColors.fgColorsAnsiCodes[3]
        elif flow_information[0] == "1" or (
            service == "ws" and flow_information == "0"
        ):
            color = BColors.RESPCOLOR_PROV
            reto_bj["color_pair"] = 4
            reto_bj["fgColor"] = CallidsBGColors.fgColorsAnsiCodes[2]
        else:
            color = BColors.RESPCOLOR_ERR
            reto_bj["color_pair"] = 5
            reto_bj["fgColor"] = CallidsBGColors.fgColorsAnsiCodes[1]
        try:
            if service == "ws":
                flow_line_txt = line["method"] + " " + flow_information[:3]
            else:
                flow_line_txt = (
                    flow_information[:3]
                    + " "
                    + SIP_RESP_CODES[flow_information[:3]]
                )
            flow_line_txt = flow_line_txt.center(
                space_available, HORIZONTAL_LINE
            )
        except KeyError:
            flow_line_txt = flow_information.center(
                space_available, HORIZONTAL_LINE
            )
    else:
        flow_line_txt = flow_information.center(
            space_available, HORIZONTAL_LINE
        )
        color = BColors.REQCOLOR
        reto_bj["fgColor"] = CallidsBGColors.fgColorsAnsiCodes[0]

    flow_line_placeholder = HORIZONTAL_LINE * space_available

    if len(flow_line_txt) > space_available:
        flow_line_txt = flow_line_txt[:space_available]

    if HORIZONTAL_LINE * 3 + RIGHT_ARROW + VERTICAL_LINE in flow_line:
        flow_line_placeholder = flow_line_placeholder + RIGHT_ARROW
        flow_line_txt = flow_line_txt + RIGHT_ARROW
    elif VERTICAL_LINE + LEFT_ARROW + HORIZONTAL_LINE * 3 in flow_line:
        flow_line_placeholder = LEFT_ARROW + flow_line_placeholder
        flow_line_txt = LEFT_ARROW + flow_line_txt

    if colorize:

        #  change background color if here is a bgcoloransistring
        #  and if requested from cmd line
        if bg_color_ansi_string:
            regexp = r"\[[\d]*(;[\d]*;)"
            result = re.search(regexp, color)
            if result:
                color = color.replace(
                    result.group(1), ";" + bg_color_ansi_string + ";"
                )

        flow_line_txt = color + flow_line_txt + BColors.ENDC_ALL

    new_flow_line = flow_line.replace(flow_line_placeholder, flow_line_txt)
    reto_bj["start_replace"] = flow_line.find(flow_line_placeholder)
    reto_bj["end_replace"] = reto_bj["start_replace"] + len(
        flow_line_placeholder
    )

    reto_bj["new_flow_line"] = new_flow_line
    return reto_bj


def process_file(
    pool, jobs, verbose, file_path, compressed, call_id, service_file=""
):
    """Threat each files to its job"""
    call_id_findings = []
    try:
        # create chunks and jobs
        # file_size = int(
        #     ssh_get_file_size(
        #         file_locations[file_path.split("/")[-1]], file_path
        #     )[0].strip()
        # )
        # for chunk in [
        #     [x, file_size] for x in range(file_size)[::CHUNK_SIZE]
        # ]:
        #     jobs.append(
        #         pool.apply_async(
        #             parser_process, (verbose, file_path, chunk, call_id)
        #         )
        #     )
        # print("")

        print(" >>> {}".format(file_path))
        if not service_file:
            print(" >>> {}".format(file_locations))
        if service_file:
            service_file = service_file.split("/")[-1]
        else:
            service_file = file_path.split("/")[-1]
        file_nb_lines = int(
            ssh_get_nb_lines(file_locations[service_file], file_path) or 0
        )
        for chunk in [
            [x, file_nb_lines] for x in range(file_nb_lines)[::NB_LINES]
        ]:
            jobs.append(
                pool.apply_async(
                    parser_process_lines,
                    (verbose, file_path, chunk, call_id, service_file),
                )
            )
        print("")

        if verbose:
            print("ngcp-log-flow > Waiting for results...")

        # Wait for all jobs to finish
        for job in jobs:
            returned = job.get(timeout=PS_TIMEOUT)
            if returned["temp_list"]:
                call_id_findings += returned["temp_list"]

                # Merge sets containing callids
                CallidsBGColors.callIdSet = CallidsBGColors.callIdSet.union(
                    returned["callid_set"]
                )
        return call_id_findings
    except KeyboardInterrupt:
        print("Caught Keyboard Interrupt, terminating processing")
        # Terminate processes
        pool.terminate()
        # Remove compressed file if necessary
        if compressed:
            if os.path.isfile(file_path):
                os.remove(file_path)
            if verbose:
                print("ngcp-log-flow > Deleted temp file '%s'" % file_path)
        sys.exit(4)


def remove_compressed_file(verbose, tasks):
    """Remove compressed file if necessary"""
    for key in tasks.keys():
        if tasks[key]["compress"]:
            if os.path.isfile(tasks[key]["filepath"]):
                os.remove(tasks[key]["filepath"])
            if verbose:
                print(
                    "ngcp-log-flow > Deleted temporary file '{}'".format(
                        tasks[key]["filepath"]
                    )
                )


def join_on_date(resource, list_of_lines, service_type):
    """Based on the date and time, combine into a common dict resource"""
    service_type = service_type.replace("websocket", "ws")
    for line in list_of_lines:
        try:
            if line[0].isalpha():
                month, day, time = re.split(r"\s+", line[:22])
                hour, minute, second = time.split(":")
            else:
                date, time = line[:24].split(" ")
                _, month, day = date.split("-")
                hour, minute, second = time.split(":")
            if month not in resource:
                resource[month] = {}
            if day not in resource[month]:
                resource[month][day] = {}
            if hour not in resource[month][day]:
                resource[month][day][hour] = {}
            if minute not in resource[month][day][hour]:
                resource[month][day][hour][minute] = {}
            if second not in resource[month][day][hour][minute]:
                resource[month][day][hour][minute][str(second)] = []
            msg = service_type + " " + line
            resource[month][day][hour][minute][str(second)].append(msg)
            resource[month][day][hour][minute] = collections.OrderedDict(
                sorted(resource[month][day][hour][minute].items())
            )
        except ValueError:
            print(
                "Date format should be or 'Jan 31 23:59:59'"
                "or '2021-12-31 23:59:59.9999'"
            )


def merge_dates(resource):
    """Based on the date and time, combine into a ordered list resource"""
    els = reduce(
        lambda a, b: a + b,
        [
            resource[a][b][c][d][e]
            for a in resource
            for b in resource[a]
            for c in resource[a][b]
            for d in resource[a][b][c]
            for e in resource[a][b][c][d]
        ],
    )
    return list(dict.fromkeys(els).keys())


def main():
    """The start of the script"""
    global key
    # general params
    verbose = False
    print_log = False
    debug = False
    interactive = False
    admin = False
    service = ""
    call_id = ""
    custom_config = ""
    date = ""
    custom_file = ""
    remote = "localhost"

    usage = (
        """
ngcp-log-flow create a call flow of a single Call-ID taking NGCP logs as input.

Usage:
  ngcp-log-flow -s ["""
        + "|".join(list_services)
        + """] -c <call_id> [OPTIONS]...

Options:
  -a, --admin\t\tprint sensitive information (by default obscured due"""
        """ to GDPR compliance)
  -b, --background\tcolorize bg according to callid
  -c, --callid=\t\tcall_id of the call to display (mandatory)
  -C, --config=\t\tload custom configuration file form provided directory
  -d, --date=\t\tsearch in the log with a particular date (format YYYYMMDD)
  -D, --debug\t\tprint debugging information of the call before the flow
  -f, --file=\t\tread the log from the indicated file (absolute path) instead
             \t\tof using default one.
  -h, --help\t\tprint this help
  -i, --interactive\tenable interactive log mode
  -k, --keyfile=\tif needed provide here a key file for remote servers access
  -p, --print\t\tprint the log of the call before the flow
  -r, --remote\t\tadd remote server to get
  -s, --service=\tlog service to parse (mandatory)
  -v, --verbose\t\tprint additional information

In case of bugs send an email to <support@sipwise.com>.
"""
    )

    # Parse input parameters:
    try:
        opts, _ = getopt.getopt(
            sys.argv[1:],
            "hvs:c:C:d:f:r:k:iapbD",
            [
                "help",
                "verbose",
                "service=",
                "callid=",
                "config=",
                "date=",
                "file=",
                "remote=",
                "keyfile=",
                "interactive",
                "admin",
                "print",
                "background",
                "debug",
            ],
        )
    except getopt.GetoptError:
        print(usage)
        sys.exit(2)
    for opt, arg in opts:
        if opt in ("-h", "--help"):
            print(usage)
            sys.exit()
        elif opt in ("-v", "--verbose"):
            verbose = True
            if verbose:
                print("ngcp-log-flow > verbose enabled")
        elif opt in ("-c", "--callid"):
            call_id = str(arg)
            if verbose:
                print("ngcp-log-flow > call_id is '%s'" % call_id)
        elif opt in ("-C", "--config"):
            custom_config = str(arg)
            if verbose:
                print("ngcp-log-flow > config file is '%s'" % custom_config)
        elif opt in ("-d", "--date"):
            date = str(arg)
            if verbose:
                print("ngcp-log-flow > date is '%s'" % date)
        elif opt in ("-f", "--file"):
            custom_file = str(arg)
            if verbose:
                print("ngcp-log-flow > log file is '%s'" % custom_file)
        elif opt in ("-i", "--interactive"):
            interactive = True
            if verbose:
                print("ngcp-log-flow > interactive mode enabled")
        elif opt in ("-p", "--print"):
            print_log = True
            if verbose:
                print("ngcp-log-flow > print log enabled")
        elif opt in ("-D", "--debug"):
            debug = True
            if verbose and print_log:
                print("ngcp-log-flow > debug log enabled")
        elif opt in ("-a", "--admin"):
            print("You have explicitly asked to show sensitive data")
            while True:
                ans = input("Are you sure? [y, n] ")
                if not ans:
                    continue
                ans = ans.lower()
                if ans in ("y", "yes"):
                    admin = True
                    print("ngcp-log-flow > admin mode enabled")
                    break
                if ans in ("n", "no"):
                    admin = False
                    print("ngcp-log-flow > admin mode not enabled")
                    break
                print("Please enter y or n")
                continue
        elif opt in ("-s", "--service"):
            service = str(arg)
            if verbose:
                print("ngcp-log-flow > service is '%s'" % service)
        elif opt in ("-r", "--remote"):
            remote = str(arg)
            if verbose:
                print("ngcp-log-flow > add remote location '%s'" % remote)
        elif opt in ("-k", "--keyfile"):
            keyfile = str(arg)
            key = paramiko.RSAKey.from_private_key_file(keyfile)
            if verbose:
                print("ngcp-log-flow > key file is '%s'" % keyfile)
        elif opt in ("-b", "--backgroud"):
            CallidsBGColors.background_colorize = True

    if call_id == "" or service == "":
        print("Call-ID and Service name are mandatory parameters\n")
        sys.exit(2)

    if service not in list_services:
        print(
            "Wrong service name. Select between "
            + " or ".join(
                [", ".join(list_services[:-1]), list_services[-1]]
                if len(list_services) > 2
                else list_services
            )
            + "\n"
        )
        sys.exit(2)

    if date:
        try:
            datetime.datetime.strptime(date, "%Y%m%d")
        except ValueError:
            print("Incorrect data format, it should be YYYYMMDD\n")
            sys.exit(2)

    # Read config file to load the used ips
    load_ips_ports(verbose, custom_config)

    tasks = {}

    service_name = {}

    neighbours = get_remote_nodes()
    if len(custom_file) > 0:
        file_locations[custom_file.split("/")[-1]] = ["localhost"]
    for node in neighbours:
        list_types = get_type_node([node.split(":")[0]]) or []
        for lt in list_types:
            file_name = [x for x in file_locations if lt in x]
            if len(file_name) > 0:
                if node not in file_locations[file_name[0]]:
                    file_locations[file_name[0]].append(node)
            if len(custom_file) > 0:
                if node not in file_locations[custom_file.split("/")[-1]]:
                    file_locations[custom_file.split("/")[-1]].append(remote)

    loc = 0
    fullname_service = service.replace("ws", "websocket")
    fullnames_sole_services = [
        x.replace("ws", "websocket") for x in sole_services
    ]
    while fullname_service != "":
        f_loc = loc
        s_loc = loc + 1
        loc = s_loc
        service_name[f_loc] = [
            x for x in fullnames_sole_services if fullname_service.find(x) == 0
        ][0]
        service_name[s_loc] = fullname_service.rsplit(
            service_name[f_loc], maxsplit=1
        )[-1]
        file_name = [x for x in file_locations if service_name[f_loc] in x][0]
        if remote not in file_locations[file_name]:
            file_locations[file_name].append(remote)
        fullname_service = service_name[s_loc]
        tasks[f_loc + 1] = {
            "filepath": "",
            "compress": "",
            "job": [],
            "callid_findings": [],
        }
        if (
            custom_file != ""
            and remote not in file_locations[custom_file.split("/")[-1]]
        ):
            file_locations[custom_file.split("/")[-1]].append(remote)
        if custom_file:
            print("Searching file [{}]".format(custom_file))
        else:
            print(
                "Searching file [{}] in the following nodes {}".format(
                    file_name, file_locations[file_name]
                )
            )
        tasks[f_loc + 1]["filepath"], tasks[f_loc + 1]["compress"] = (
            prepare_file(
                verbose, service_name[f_loc], date, custom_file, call_id
            )
        )
        if service_name[s_loc]:
            tasks[f_loc + 2] = {
                "filepath": "",
                "compress": "",
                "job": [],
                "callid_findings": [],
            }
            tasks[f_loc + 2]["filepath"], tasks[f_loc + 2]["compress"] = (
                prepare_file(
                    verbose, service_name[s_loc], date, custom_file, call_id
                )
            )

    # Make the process ignore SIGINT before a process Pool is created.
    # This way created child processes inherit SIGINT handler
    original_sigint_handler = signal.signal(signal.SIGINT, signal.SIG_IGN)

    # Init mp objects
    pool = mp.Pool(PROC_NUM)

    # Restore SIGINT handler in parent process after a Pool has been created
    signal.signal(signal.SIGINT, original_sigint_handler)

    for _, task in tasks.items():
        task["callid_findings"] = process_file(
            pool,
            task["job"],
            verbose,
            task["filepath"],
            task["compress"],
            call_id,
            service_file=custom_file,
        )

    # Clean up processes
    pool.close()

    remove_compressed_file(verbose, tasks)

    merge_logs_d = {}
    for idx, _ in service_name.items():
        if idx + 1 in tasks:
            join_on_date(
                merge_logs_d,
                tasks[idx + 1]["callid_findings"],
                service_name[idx],
            )
    if len(merge_logs_d):
        tasks[1]["callid_findings"] = merge_dates(merge_logs_d)

    if verbose:
        print(
            "ngcp-log-flow > Number of lines found for string '%s': %s"
            % (call_id, len(tasks[1]["callid_findings"]))
        )

    if len(tasks[1]["callid_findings"]) == 0:
        print("No results found for string '%s'" % call_id)
        sys.exit(1)

    log_parser_result, involved_ips = log_parser(
        admin, service, tasks[1]["callid_findings"], print_log, debug, call_id
    )

    output_lines = process_data(
        service, verbose, log_parser_result, involved_ips
    )

    if interactive:
        interactive_screen(
            admin, service, output_lines, involved_ips, log_parser_result
        )
    else:
        print_out_normal(admin, service, output_lines, involved_ips)

    return 0


if __name__ == "__main__":
    if getpass.getuser() == "root":
        main()
    else:
        print("You need to be 'root' " "to access the log files.")
        sys.exit(2)
