-
Notifications
You must be signed in to change notification settings - Fork 74
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
Showing
34 changed files
with
8,522 additions
and
15 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,5 @@ | ||
# Discovery Common | ||
|
||
Python code that is used by the Gateway/KDNRM and Commander. | ||
|
||
This is common code to interact with the DAG. |
Empty file.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1 @@ | ||
__version__ = '1.0.26' |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,33 @@ | ||
# This should the relationship between Keeper Vault record | ||
RECORD_LINK_GRAPH_ID = 0 | ||
|
||
# The rules | ||
DIS_RULES_GRAPH_ID = 10 | ||
|
||
# The discovery job history | ||
DIS_JOBS_GRAPH_ID = 11 | ||
|
||
# Discovery infrastructure | ||
DIS_INFRA_GRAPH_ID = 12 | ||
|
||
# The user-to-services graph | ||
USER_SERVICE_GRAPH_ID = 13 | ||
|
||
PAM_DIRECTORY = "pamDirectory" | ||
PAM_DATABASE = "pamDatabase" | ||
PAM_MACHINE = "pamMachine" | ||
PAM_USER = "pamUser" | ||
|
||
LOCAL_USER = "local" | ||
|
||
# The record types to process. | ||
# The order defined the order the user will be presented the new discovery objects. | ||
# The sort defined how the discovery objects for a record type are sorted and presented. | ||
# Cloud-based users are presented first, then directories second. | ||
# We want to prompt about users that may appear on machines before processing the machine. | ||
VERTICES_SORT_MAP = { | ||
PAM_USER: {"order": 1, "sort": "sort_infra_name", "item": "DiscoveryUser", "key": "user"}, | ||
PAM_DIRECTORY: {"order": 1, "sort": "sort_infra_name", "item": "DiscoveryDirectory", "key": "host_port"}, | ||
PAM_MACHINE: {"order": 2, "sort": "sort_infra_host", "item": "DiscoveryMachine", "key": "host"}, | ||
PAM_DATABASE: {"order": 3, "sort": "sort_infra_host", "item": "DiscoveryDatabase", "key": "host_port"}, | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,121 @@ | ||
from __future__ import annotations | ||
from .constants import VERTICES_SORT_MAP | ||
from .types import DiscoveryObject | ||
import logging | ||
import functools | ||
import re | ||
from typing import List, Optional, Union, TYPE_CHECKING | ||
|
||
Logger = Union[logging.RootLogger, logging.Logger] | ||
if TYPE_CHECKING: | ||
from keeper_dag.vertex import DAGVertex | ||
|
||
|
||
def sort_infra_name(vertices: List[DAGVertex]) -> List[DAGVertex]: | ||
""" | ||
Sort the vertices by name in ascending order. | ||
""" | ||
|
||
def _sort(t1: DAGVertex, t2: DAGVertex): | ||
t1_name = t1.content_as_dict.get("name") | ||
t2_name = t2.content_as_dict.get("name") | ||
if t1_name < t2_name: | ||
return -1 | ||
elif t1_name > t2_name: | ||
return 1 | ||
else: | ||
return 0 | ||
|
||
return sorted(vertices, key=functools.cmp_to_key(_sort)) | ||
|
||
|
||
def sort_infra_host(vertices: List[DAGVertex]) -> List[DAGVertex]: | ||
""" | ||
Sort the vertices by host name. | ||
Host name should appear first in ascending order. | ||
IP should appear second in ascending order. | ||
""" | ||
|
||
def _is_ip(host: str) -> bool: | ||
if re.match(r'^\d+\.\d+\.\d+\.\d+', host) is not None: | ||
return True | ||
return False | ||
|
||
def _make_ip_number(ip: str) -> int: | ||
ip_port = ip.split(":") | ||
parts = ip_port[0].split(".") | ||
value = "" | ||
for part in parts: | ||
value += part.zfill(3) | ||
return int(value) | ||
|
||
def _sort(t1: DAGVertex, t2: DAGVertex): | ||
t1_name = t1.content_as_dict.get("name") | ||
t2_name = t2.content_as_dict.get("name") | ||
|
||
# Both names are ip addresses | ||
if _is_ip(t1_name) and _is_ip(t2_name): | ||
t1_num = _make_ip_number(t1_name) | ||
t2_num = _make_ip_number(t2_name) | ||
|
||
if t1_num < t2_num: | ||
return -1 | ||
elif t1_num > t2_num: | ||
return 1 | ||
else: | ||
return 0 | ||
|
||
# T1 is an IP, T2 is a host name | ||
elif _is_ip(t1_name) and not _is_ip(t2_name): | ||
return 1 | ||
# T2 is not an IP and T2 is an IP | ||
elif not _is_ip(t1_name) and _is_ip(t2_name): | ||
return -1 | ||
# T1 and T2 are host name | ||
else: | ||
if t1_name < t2_name: | ||
return -1 | ||
elif t1_name > t2_name: | ||
return 1 | ||
else: | ||
return 0 | ||
|
||
return sorted(vertices, key=functools.cmp_to_key(_sort)) | ||
|
||
|
||
def sort_infra_vertices(current_vertex: DAGVertex, logger: Optional[Logger] = None) -> dict: | ||
|
||
if logger is None: | ||
logger = logging.getLogger() | ||
|
||
# Make a map, record type to list of vertices (of that record type) | ||
record_type_to_vertices_map = {k: [] for k, v in VERTICES_SORT_MAP.items()} | ||
|
||
# Collate the vertices into a record type lookup. | ||
vertices = current_vertex.has_vertices() | ||
logger.debug(f" found {len(vertices)} vertices") | ||
for vertex in vertices: | ||
if vertex.active is True: | ||
content = DiscoveryObject.get_discovery_object(vertex) | ||
logger.debug(f" * {content.description}") | ||
for vertex in vertices: | ||
if vertex.active is False: | ||
logger.debug(" vertex is not active") | ||
continue | ||
# We can't load into a pydantic object since Pydantic has a problem with Union type. | ||
# We only want the record type, so it is too much work to try to get into an object. | ||
content_dict = vertex.content_as_dict | ||
record_type = content_dict.get("record_type") | ||
if record_type in record_type_to_vertices_map: | ||
record_type_to_vertices_map[record_type].append(vertex) | ||
|
||
# Sort the vertices for each record type. | ||
for k, v in VERTICES_SORT_MAP.items(): | ||
if v["sort"] == "sort_infra_name": | ||
record_type_to_vertices_map[k] = sort_infra_name(record_type_to_vertices_map[k]) | ||
elif v["sort"] == "sort_infra_host": | ||
record_type_to_vertices_map[k] = sort_infra_host(record_type_to_vertices_map[k]) | ||
|
||
return record_type_to_vertices_map |
Oops, something went wrong.