Skip to content

Commit

Permalink
Merge pull request #53 from xtuml/52-cleanup-and-tidy-erebus-repo
Browse files Browse the repository at this point in the history
52-cleanup-and-tidy-erebus-repo
  • Loading branch information
ZacDCSIT authored May 30, 2024
2 parents b0b9638 + 77699ed commit 1daf435
Show file tree
Hide file tree
Showing 20 changed files with 23 additions and 934 deletions.
3 changes: 0 additions & 3 deletions test_harness/config/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -157,9 +157,6 @@ def calc_path(given_path: str, config_field: str) -> str:
calculated_path = str(
Path(__file__).parent.parent.parent / given_path
)
# debug print
print(f" The given path is: {given_path}")
print(f" The Path is : {Path(__file__).parent.parent.parent}")
if not os.path.exists(calculated_path):
raise RuntimeError(
f"The given path '{given_path}' does not exist for the config "
Expand Down
9 changes: 6 additions & 3 deletions test_harness/process_manager/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,8 +15,9 @@
from test_harness.protocol_verifier.config.config import (
ProtocolVerifierConfig,
)
except Exception:
pass
except ImportError:
ProtocolVerifierConfig = None
full_pv_test = None
from test_harness.utils import clean_directories
from test_harness import AsyncTestStopper

Expand Down Expand Up @@ -51,7 +52,9 @@ def harness_test_manager(
"""
# TODO: add generic test to else statement
try:
if isinstance(harness_config, ProtocolVerifierConfig):
if ProtocolVerifierConfig and isinstance(
harness_config, ProtocolVerifierConfig
):
full_pv_test(
harness_config=harness_config,
test_config=test_config,
Expand Down
11 changes: 6 additions & 5 deletions test_harness/protocol_verifier/tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@


def cleanup_folders():
"""Cuntion to execute clean directories"""
"""Functtion to execute clean directories"""
file_path = os.path.join(
Path(__file__).parent.parent.parent.parent
/ "tests/test_harness",
Expand All @@ -47,11 +47,12 @@ def cleanup_folders():

@pytest.fixture(autouse=True)
def run_before_and_after_tests(tmpdir):
"""Fixture to execute asserts before and after a test is run"""
# Setup: fill with any logic you want
"""Fixture to clean output directories before and after tests"""
# Before test
cleanup_folders()
yield # this is where the testing happens
# Teardown : fill with any logic you want
# This is where the testing happens
yield
# After test
cleanup_folders()


Expand Down
Binary file not shown.
10 changes: 9 additions & 1 deletion test_harness/run_app.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,14 @@
from test_harness import create_app, create_test_output_directory
from test_harness.config.config import HarnessConfig, TestConfig
from test_harness.process_manager import harness_test_manager
from test_harness.protocol_verifier import puml_files_test, get_puml_file_paths
try:
from test_harness.protocol_verifier import (
puml_files_test,
get_puml_file_paths,
)
except ImportError:
puml_files_test = None
get_puml_file_paths = None
from test_harness.utils import clean_directories

logging.basicConfig(level=logging.INFO)
Expand Down Expand Up @@ -101,6 +108,7 @@ def main(
:raises error: Raises an error if an error is raised in sub functions but
cleans directories first before re-raising
"""
# TODO update when CLI functionality working again
harness_config = HarnessConfig(harness_config_path)
test_config = TestConfig()
if test_config_yaml_path:
Expand Down
136 changes: 2 additions & 134 deletions tests/test_harness/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,21 +9,16 @@
import sys
from os.path import abspath
from pathlib import Path
from typing import Generator, Literal, Callable
import json
from typing import Generator
import pytest
from flask.testing import FlaskClient, FlaskCliRunner
from pygrok import Grok
from requests import PreparedRequest


# insert root directory into path
package_path = abspath(Path(__file__).parent.parent.parent)
sys.path.insert(0, package_path)
from test_harness.__init__ import create_app, HarnessApp # noqa

# grok file path
grok_file_path = Path(__file__).parent / "test_files" / "grok_file.txt"


@pytest.fixture()
def test_app() -> Generator[HarnessApp, None, None]:
Expand Down Expand Up @@ -69,130 +64,3 @@ def runner(test_app: HarnessApp) -> FlaskCliRunner:
:rtype: :class:`FlaskCliRunner`
"""
return test_app.test_cli_runner()


@pytest.fixture
def grok_priority_patterns() -> list[Grok]:
"""Fixture providing a list of grok patterns in priority order
:return: List of grok patterns
:rtype: `list`[:class:`Grok`]
"""
return [
Grok(
"%{TIMESTAMP_ISO8601:timestamp} %{NUMBER} %{WORD:field} :"
" JobId = %{UUID} : EventId = %{UUID:event_id} : "
"EventType = %{WORD}"
),
Grok(
"%{TIMESTAMP_ISO8601:timestamp} %{NUMBER} %{WORD:field} :"
" JobId = %{UUID:job_id}"
),
]


@pytest.fixture
def expected_verifier_grok_results() -> list[dict[str, str]]:
"""Fixture providing expected verifier groked results
:return: Returns a list of groked results
:rtype: `list`[`dict`[`str`, `str`]]
"""
return [
{
"timestamp": "2023-09-28T19:27:23.434758Z",
"field": "svdc_new_job_started",
"event_id": "3cf78438-8084-494d-8d7b-efd7ea46f7d4",
},
{
"timestamp": "2023-09-28T19:27:23.514683Z",
"field": "aeordering_job_processed",
"job_id": "4cdbe6d0-424a-4a96-9357-3b19144ee07b",
},
{
"timestamp": "2023-09-28T19:27:23.514745Z",
"field": "aeordering_events_processed",
"event_id": "7a231b76-8062-47da-a2c9-0a764dfa3dd9",
},
{
"timestamp": "2023-09-28T19:27:23.515067Z",
"field": "aeordering_events_blocked",
"event_id": "7a231b76-8062-47da-a2c9-0a764dfa3dd9",
},
{
"timestamp": "2023-09-28T19:10:57.012539Z",
"field": "svdc_job_success",
"job_id": "85619f16-f04f-4f60-8525-2f643c6b417e",
},
]


@pytest.fixture
def get_log_file_names_call_back() -> Callable[
...,
tuple[Literal[400], dict, Literal["Error response"]]
| tuple[Literal[400], dict, str]
| tuple[Literal[200], dict, str],
]:
"""Fixture to provide a call back request function for a
POST request endpoint to get the file names for a domain location of the
PV with specified file prefix. The request contains a json payload
containing:
* "location" - Domain location of the log files to get
* "file_prefix" - The file prefix of the log file names to get
:return: Returns the fixture
:rtype: :class:`Callable`[
`...`,
`tuple`[:class:`Literal`[`400`], `dict`, :class:`Literal`[
`"Error response"`
]]
| `tuple`[:class:`Literal`[`400`], `dict`, `str`]
| `tuple`[:class:`Literal`[`200`], `dict`, `str`],
]
"""

def request_callback(
request: PreparedRequest,
) -> (
tuple[Literal[400], dict, Literal["Error response"]]
| tuple[Literal[400], dict, str]
| tuple[Literal[200], dict, str]
):
payload = json.loads(request.body)
headers = {}
file_names = []
if set(["location", "file_prefix"]) != set(payload.keys()):
return (400, headers, "Error response")
match payload["location"]:
case "RECEPTION":
match payload["file_prefix"]:
case "AEReception":
file_names.append("AEReception.log")
case _:
file_names.append("Reception.log")
case "VERIFIER":
match payload["file_prefix"]:
case "AEOrdering":
file_names.append("AEOrdering.log")
case "AESequenceDC":
file_names.append("AESequenceDC.log")
case "IStore":
file_names.append("IStore.log")
case _:
file_names.append("Verifier.log")
pass
case _:
return (
400,
headers,
(
"Request error: the input key"
f" {payload['location']} does not exist"
),
)
resp_body = {"fileNames": file_names}

return (200, headers, json.dumps(resp_body))

return request_callback
11 changes: 0 additions & 11 deletions tests/test_harness/test_files/Reception_json_validity_test.log

This file was deleted.

16 changes: 0 additions & 16 deletions tests/test_harness/test_files/Reception_test1.log

This file was deleted.

Loading

0 comments on commit 1daf435

Please sign in to comment.