Skip to content

Commit

Permalink
Merge pull request #101 from intezer/feat(endpoint)/filter-by-compute…
Browse files Browse the repository at this point in the history
…r-names

Add filter endpoint scan by computer-names
  • Loading branch information
ofirit authored Jul 3, 2023
2 parents 64de124 + c57324f commit 0718959
Show file tree
Hide file tree
Showing 3 changed files with 12 additions and 6 deletions.
11 changes: 5 additions & 6 deletions CHANGES
Original file line number Diff line number Diff line change
@@ -1,18 +1,17 @@
1.17.4
______
- Add verdict property to `UrlAnalysis` and `EndpointAnalysis`

- Add endpoint scans filter by computer names

1.17.3
______
- Raise `UrlOfflineError` when analyzing a url that seems offline.
- Don't set `composed_only` of `from_latest_hash_analysis` if not explicitly set.
- Add optional day limit to `from_latest_hash_analysis`.

- Raise `UrlOfflineError` when analyzing a url that seems offline.
- Don't set `composed_only` of `from_latest_hash_analysis` if not explicitly set.
- Add optional day limit to `from_latest_hash_analysis`.

1.17.2
______
- Support check availability.
- Support check availability.

1.17.1
______
Expand Down
6 changes: 6 additions & 0 deletions intezer_sdk/analyses_history.py
Original file line number Diff line number Diff line change
Expand Up @@ -70,6 +70,7 @@ def query_endpoint_analyses_history(*,
aggregated_view: bool = None,
sources: List[str] = None,
verdicts: List[str] = None,
computer_names: List[str] = None,
limit: int = DEFAULT_LIMIT,
offset: int = DEFAULT_OFFSET
) -> AnalysesHistoryResult:
Expand All @@ -82,6 +83,7 @@ def query_endpoint_analyses_history(*,
:param aggregated_view: Should the result be aggregated by latest computer.
:param sources: Filter the analyses by its source.
:param verdicts: Filter by the analysis's verdict
:param computer_names: Filter by computer names
:param limit: Number of analyses returned by the query.
:param offset: Number of analyses to skips the before beginning to return the analyses.
:return: Endpoint query result from server as Results iterator.
Expand All @@ -94,6 +96,7 @@ def query_endpoint_analyses_history(*,
aggregated_view=aggregated_view,
sources=sources,
verdicts=verdicts,
computer_names=computer_names,
limit=limit,
offset=offset
)
Expand Down Expand Up @@ -157,6 +160,7 @@ def generate_analyses_history_filter(*,
aggregated_view: bool = None,
sources: List[str] = None,
verdicts: List[str] = None,
computer_names: List[str] = None,
limit: int = DEFAULT_LIMIT,
offset: int = DEFAULT_OFFSET
) -> Dict[str, Any]:
Expand All @@ -172,4 +176,6 @@ def generate_analyses_history_filter(*,
base_filter['sources'] = sources
if verdicts:
base_filter['verdicts'] = verdicts
if computer_names:
base_filter['computer_names'] = computer_names
return base_filter
1 change: 1 addition & 0 deletions tests/unit/test_results.py
Original file line number Diff line number Diff line change
Expand Up @@ -166,6 +166,7 @@ def test_endpoint_analyses_history_happy_flow(self):
aggregated_view=False,
sources=["xsoar"],
verdicts=['malicious'],
computer_names=['host-name'],
)
for result in results:
assert result
Expand Down

0 comments on commit 0718959

Please sign in to comment.