diff --git a/.github/workflows/end-to-end-test.yml b/.github/workflows/end-to-end-test.yml index 8040204..28bb018 100644 --- a/.github/workflows/end-to-end-test.yml +++ b/.github/workflows/end-to-end-test.yml @@ -77,6 +77,8 @@ jobs: docker compose -f ./docker-compose-end-to-end-test.yml logs test-harness | grep -Po "Test Harness test run completed successfully" + rm ./munin/deploy/config/job_definitions/*.json + - name: Check for performance test failures run: | # Inspect report output for failures of performance test @@ -93,6 +95,47 @@ jobs: echo "No test failures." exit 0 fi + + - name: Run performance test job def json + run: | + echo "Running a performance test" + timeout 1m ./scripts/end-to-end-curl-commands-performance-test-jobdefjson.sh + + # this should return true as tests are running + echo "Testing that tests are running" + curl 127.0.0.1:8800/isTestRunning | grep 'true' + + # It takes this long to get any meaningful output from the test harness + # It's 60 seconds for the test harness to start the job ... + # ... and 10 seconds to run the job ... + # ... and 60 seconds to ensure the test is finished ... + # ... and another 50 seconds for grace time in starting up/waiting for logs/waiting for calculations + echo "Sleeping to give the test harness time to work" + date + sleep 120 + date + curl 127.0.0.1:8800/isTestRunning | grep 'false' + + docker compose -f ./docker-compose-end-to-end-test.yml logs test-harness | grep -Po "Test Harness test run completed successfully" + + rm ./munin/deploy/config/job_definitions/*.json + + - name: Check for performance test failures job def json + run: | + # Inspect report output for failures of performance test + failures=$(xmllint --xpath 'string(//testsuites/@failures)' ./report_output/performance_test_jobdef/Report.xml) + + # Print the result + echo "Performance test failures: $failures" + + # Check if failures is not zero + if [ "$failures" -ne 0 ]; then + echo "There are performance test failures!" + exit 1 + else + echo "No test failures." + exit 0 + fi - name: Run functional test run: | @@ -115,6 +158,8 @@ jobs: curl 127.0.0.1:8800/isTestRunning | grep 'false' docker compose -f ./docker-compose-end-to-end-test.yml logs test-harness | grep -Po "Test Harness test run completed successfully" + + rm ./munin/deploy/config/job_definitions/*.json - name: Check for functional test failures run: | @@ -139,7 +184,7 @@ jobs: echo "No test failures." exit 0 fi - + - name: Run tear down procedure run: | docker compose -f ./munin/deploy/docker-compose.prod.yml down diff --git a/scripts/end-to-end-curl-commands-performance-test-jobdefjson.sh b/scripts/end-to-end-curl-commands-performance-test-jobdefjson.sh new file mode 100755 index 0000000..edc5ffb --- /dev/null +++ b/scripts/end-to-end-curl-commands-performance-test-jobdefjson.sh @@ -0,0 +1,16 @@ +#!/bin/bash + +set -e + +# We do a while loop here to keep requesting the named-zip-files endpoint until the test harness is up and running +echo "Checking if test harness is up and running by uploading test files" +while ! curl --location --request POST 'http://127.0.0.1:8800/upload/named-zip-files' --form 'performance_test_jobdef=@"tests/test_harness/test_files/test_jobdefjson.zip"' -s -o /dev/null -w "%{http_code}" | grep -q 200; do + echo tried to do a curl request to named-zip-files endpoint but it failed, trying again in 1 second + echo printing test harness logs to see what is going on + docker compose -f ./docker-compose-end-to-end-test.yml logs test-harness | tail + sleep 1 +done +echo "Uploading test config" +curl -X POST -d '{"TestName": "performance_test_jobdef", "TestConfig":{"type":"Performance", "performance_options": {"num_files_per_sec":10}}}' -H 'Content-Type: application/json' 'http://127.0.0.1:8800/startTest' +echo "Waiting for performance test to start" +sleep 1 \ No newline at end of file diff --git a/test_harness/config/config.py b/test_harness/config/config.py index 19762b9..e130e28 100644 --- a/test_harness/config/config.py +++ b/test_harness/config/config.py @@ -3,7 +3,7 @@ """ import os -from typing import Optional +from typing import Optional, Literal from configparser import ConfigParser from pathlib import Path @@ -220,6 +220,7 @@ def parse_from_dict(self, test_config: dict[str, str | dict]) -> None: "finish_interval": `int` => 0, defaults to 30; "timeout": `int` => 0, defaults to 120; }, + "jobdef_type": `Literal`[`"uml"`, `"json"`], defaults to `"uml"`; } :type test_config: `dict`[`str`, `str` | `dict`] """ @@ -255,6 +256,7 @@ def set_default_config(self) -> None: self.sample_rate = 0 self.low_memory = False self.test_finish = {} + self.jobdef_type: Literal["uml", "json"] = "uml" def config_to_dict(self) -> dict: """Provide config as a dictionary""" @@ -267,6 +269,7 @@ def config_to_dict(self) -> dict: "sample_rate": self.sample_rate, "low_memory": self.low_memory, "test_finish": self.test_finish, + "jobdef_type": self.jobdef_type, } if self.type != "Functional": config_dict_to_return["performance_options"] = ( diff --git a/test_harness/config/default_test_config.yaml b/test_harness/config/default_test_config.yaml index e722742..5fb2caf 100644 --- a/test_harness/config/default_test_config.yaml +++ b/test_harness/config/default_test_config.yaml @@ -33,3 +33,5 @@ test_finish: metric_get_interval: 5 finish_interval: 30 timeout: 120 + +jobdef_type: "uml" \ No newline at end of file diff --git a/test_harness/protocol_verifier/__init__.py b/test_harness/protocol_verifier/__init__.py index ed44923..f34c669 100644 --- a/test_harness/protocol_verifier/__init__.py +++ b/test_harness/protocol_verifier/__init__.py @@ -16,7 +16,7 @@ generate_test_events_from_puml_files, get_test_events_from_test_file_jsons, ) -from test_harness.protocol_verifier.send_job_defs import send_job_defs_from_uml +from test_harness.protocol_verifier.send_job_defs import handle_send_job_defs from test_harness.protocol_verifier.testing_suite.base_test_classes import ( FunctionalTest, PerformanceTest, @@ -126,11 +126,12 @@ def puml_files_test( ) # send job definitions to pv - send_job_defs_from_uml( - url=harness_config.pv_send_job_defs_url, - uml_file_paths=puml_file_paths, + handle_send_job_defs( + file_paths=puml_file_paths, harness_config=harness_config, + file_type=test_config.jobdef_type, ) + logging.getLogger().info( "Waiting %ds for job defs to load", harness_config.pv_config_update_time, diff --git a/test_harness/protocol_verifier/send_job_defs.py b/test_harness/protocol_verifier/send_job_defs.py index 965fa94..0331553 100644 --- a/test_harness/protocol_verifier/send_job_defs.py +++ b/test_harness/protocol_verifier/send_job_defs.py @@ -1,6 +1,7 @@ """Methods to send job defs to PV using uml file paths""" from io import BytesIO +from typing import Literal from test_harness.protocol_verifier.config.config import ProtocolVerifierConfig from test_harness.utils import create_file_io_file_name_tuple_with_file_path @@ -10,25 +11,58 @@ from test_harness.requests_th.send_config import post_config_form_upload -def send_job_defs_from_uml( - url: str, uml_file_paths: list[str], harness_config: ProtocolVerifierConfig +def handle_send_job_defs( + file_paths: list[str], + harness_config: ProtocolVerifierConfig, + file_type: Literal["json", "uml"], ) -> None: - """Method to send job defs from a list of uml file paths to an url with + """Method to handle sending job defs to an url with :class:`ProtocolVerifierConfig` :param url: The url to send the request for uploading job definitions :type url: `str` - :param uml_file_paths: A list of filepaths to uml file job definitions - :type uml_file_paths: `list`[`str`] + :param file_paths: A list of file paths to job definitions + :type file_paths: `list`[`str`] + :param harness_config: Config for the test harness + :type harness_config: :class:`ProtocolVerifierConfig` + :param file_type: The type of file to send job defs from + :type file_type: `Literal`["json", "uml"] + """ + match file_type: + case "json": + send_job_defs_from_json( + harness_config.pv_send_job_defs_url, file_paths, harness_config + ) + case "uml": + send_job_defs_from_uml( + harness_config.pv_send_job_defs_url, file_paths, harness_config + ) + case _: + raise ValueError(f"Invalid file type: {file_type}") + + +def send_job_defs( + url: str, job_defs: list[str], + file_paths: list[str], + harness_config: ProtocolVerifierConfig +) -> None: + """Method to send job defs from a list of job defs and file paths to an url + with :class:`ProtocolVerifierConfig` + + :param url: The url to send the request for uploading job definitions + :type url: `str` + :param job_defs: A list of job definitions as strings + :type job_defs: `list`[`str`] + :param file_paths: A list of file paths to job definitions + :type file_paths: `list`[`str`] :param harness_config: Config for the test harness :type harness_config: :class:`ProtocolVerifierConfig` """ - job_defs = get_job_defs_from_uml_files(uml_file_paths) file_io_file_name_tuples = [ create_file_io_file_name_tuple_with_file_path( - file_path.replace(".puml", ".json"), file_string + file_path, job_def ) - for file_path, file_string in zip(uml_file_paths, job_defs) + for job_def, file_path in zip(job_defs, file_paths) ] send_job_defs_from_file_io_file_name_tuples( file_io_file_name_tuples=file_io_file_name_tuples, @@ -38,6 +72,59 @@ def send_job_defs_from_uml( ) +def get_job_defs_from_jsons(json_file_paths: list[str]) -> list[str]: + """Method to get job defs from a list of json file paths + + :param json_file_paths: A list of file paths to json file job definitions + :type json_file_paths: `list`[`str`] + :return: A list of job definitions as strings + :rtype: `list`[`str`] + """ + job_defs: list[str] = [] + for json_file_path in json_file_paths: + with open(json_file_path, "r") as json_file: + job_defs.append(json_file.read()) + return job_defs + + +def send_job_defs_from_json( + url: str, json_file_paths: list[str], + harness_config: ProtocolVerifierConfig +) -> None: + """Method to send job defs from a list of json file paths to an url with + :class:`ProtocolVerifierConfig` + + :param url: The url to send the request for uploading job definitions + :type url: `str` + :param json_file_paths: A list of file paths to json file job definitions + :type json_file_paths: `list`[`str`] + :param harness_config: Config for the test harness + :type harness_config: :class:`ProtocolVerifierConfig` + """ + job_defs = get_job_defs_from_jsons(json_file_paths) + send_job_defs(url, job_defs, json_file_paths, harness_config) + + +def send_job_defs_from_uml( + url: str, uml_file_paths: list[str], harness_config: ProtocolVerifierConfig +) -> None: + """Method to send job defs from a list of uml file paths to an url with + :class:`ProtocolVerifierConfig` + + :param url: The url to send the request for uploading job definitions + :type url: `str` + :param uml_file_paths: A list of filepaths to uml file job definitions + :type uml_file_paths: `list`[`str`] + :param harness_config: Config for the test harness + :type harness_config: :class:`ProtocolVerifierConfig` + """ + job_defs = get_job_defs_from_uml_files(uml_file_paths) + converted_file_paths = [ + file_path.replace(".puml", ".json") for file_path in uml_file_paths + ] + send_job_defs(url, job_defs, converted_file_paths, harness_config) + + def send_job_defs_from_file_io_file_name_tuples( file_io_file_name_tuples: list[tuple[BytesIO, str]], url: str, diff --git a/test_harness/protocol_verifier/tests/test_files/test_uml_1_jobdef.json b/test_harness/protocol_verifier/tests/test_files/test_uml_1_jobdef.json new file mode 100644 index 0000000..bf4a1c0 --- /dev/null +++ b/test_harness/protocol_verifier/tests/test_files/test_uml_1_jobdef.json @@ -0,0 +1,25 @@ +{ + "JobDefinitionName": "test_uml_1", + "Events": [ + { + "EventName": "A", + "OccurrenceId": 0, + "SequenceName": "test_uml_1", + "Application": "default_application_name", + "SequenceStart": true + }, + { + "EventName": "B", + "OccurrenceId": 0, + "SequenceName": "test_uml_1", + "Application": "default_application_name", + "SequenceEnd": true, + "PreviousEvents": [ + { + "PreviousEventName": "A", + "PreviousOccurrenceId": 0 + } + ] + } + ] +} diff --git a/test_harness/protocol_verifier/tests/test_protocol_verifier__init__.py b/test_harness/protocol_verifier/tests/test_protocol_verifier__init__.py index 012e32e..8001467 100644 --- a/test_harness/protocol_verifier/tests/test_protocol_verifier__init__.py +++ b/test_harness/protocol_verifier/tests/test_protocol_verifier__init__.py @@ -108,12 +108,16 @@ uuid4hex = re.compile("[0-9a-f]{12}4[0-9a-f]{3}[89ab][0-9a-f]{15}\\Z", re.I) +@pytest.mark.parametrize("jobdef_type", ["json", "uml"]) @responses.activate -def test_puml_files_test() -> None: +def test_puml_files_test(jobdef_type: Literal["json", "uml"]) -> None: """Tests method `puml_test_files`""" harness_config = ProtocolVerifierConfig(config_parser) test_config = TestConfig() - test_config.parse_from_dict({"event_gen_options": {"invalid": False}}) + test_config.parse_from_dict({ + "event_gen_options": {"invalid": False}, + "jobdef_type": jobdef_type, + }) with mock_pv_http_interface(harness_config): puml_files_test( puml_file_paths=[test_file_path], diff --git a/test_harness/protocol_verifier/tests/test_send_job_defs.py b/test_harness/protocol_verifier/tests/test_send_job_defs.py index 873c1ee..0d20409 100644 --- a/test_harness/protocol_verifier/tests/test_send_job_defs.py +++ b/test_harness/protocol_verifier/tests/test_send_job_defs.py @@ -4,15 +4,21 @@ from pathlib import Path import os from configparser import ConfigParser +from unittest.mock import patch import responses +from responses.matchers import multipart_matcher import pytest from test_harness.config.config import HarnessConfig +from test_harness.protocol_verifier.config.config import ProtocolVerifierConfig from test_harness.utils import create_file_io_file_name_tuple from test_harness.protocol_verifier.send_job_defs import ( send_job_defs_from_file_io_file_name_tuples, send_job_defs_from_uml, + get_job_defs_from_jsons, + send_job_defs_from_json, + handle_send_job_defs, ) # test file resources folder @@ -96,3 +102,114 @@ def test_send_job_defs_from_uml() -> None: uml_file_paths=[test_uml_file_path_1, test_uml_file_path_2], harness_config=harness_config, ) + + +class TestSendJobDefsFromJson: + """Tests for send_job_defs_from_json""" + @staticmethod + def json_string() -> str: + """Returns a json string""" + return ( + "{\n" + ' "JobDefinitionName": "test_uml_1",\n' + ' "Events": [\n' + " {\n" + ' "EventName": "A",\n' + ' "OccurrenceId": 0,\n' + ' "SequenceName": "test_uml_1",\n' + ' "Application": "default_application_name",\n' + ' "SequenceStart": true\n' + " },\n" + " {\n" + ' "EventName": "B",\n' + ' "OccurrenceId": 0,\n' + ' "SequenceName": "test_uml_1",\n' + ' "Application": "default_application_name",\n' + ' "SequenceEnd": true,\n' + ' "PreviousEvents": [\n' + " {\n" + ' "PreviousEventName": "A",\n' + ' "PreviousOccurrenceId": 0\n' + " }\n" + " ]\n" + " }\n" + " ]\n" + "}\n" + ) + + def test_get_job_defs_from_jsons(self) -> None: + """Tests get_job_defs_from_jsons""" + test_json_file_path_1 = os.path.join( + test_file_resources, "test_uml_1_jobdef.json" + ) + test_json_file_path_2 = os.path.join( + test_file_resources, "test_uml_1_jobdef.json" + ) + job_defs = get_job_defs_from_jsons( + json_file_paths=[test_json_file_path_1, test_json_file_path_2] + ) + json_string = self.json_string() + assert all(json_string == job_def for job_def in job_defs) + + @responses.activate + def test_send_job_defs_from_jsons(self) -> None: + """Tests send_job_defs_from_jsons""" + url = "http://mockserver.com/job-definitions" + responses.post( + url, + status=200, + match=[ + multipart_matcher( + files={ + "upload": ( + "test_uml_1_jobdef.json", + self.json_string().encode("utf-8"), + "application/octet-stream", + ) + } + ) + ], + ) + harness_config = HarnessConfig(config_parser) + test_json_file_path_1 = os.path.join( + test_file_resources, "test_uml_1_jobdef.json" + ) + send_job_defs_from_json( + url=url, + json_file_paths=[test_json_file_path_1], + harness_config=harness_config, + ) + + +def test_handle_send_job_defs() -> None: + """Tests handle_send_job_defs""" + harness_config = ProtocolVerifierConfig(config_parser) + test_json_file_path_1 = os.path.join( + test_file_resources, "test_uml_1_jobdef.json" + ) + test_uml_file_path_1 = os.path.join(test_file_resources, "test_uml_1.puml") + with patch( + "test_harness.protocol_verifier.send_job_defs.send_job_defs_from_json" + ) as mock: + handle_send_job_defs( + file_paths=[test_json_file_path_1, test_uml_file_path_1], + harness_config=harness_config, + file_type="json", + ) + mock.assert_called_once() + with patch( + "test_harness.protocol_verifier.send_job_defs.send_job_defs_from_uml" + ) as mock: + handle_send_job_defs( + file_paths=[test_json_file_path_1, test_uml_file_path_1], + harness_config=harness_config, + file_type="uml", + ) + mock.assert_called_once() + with pytest.raises(ValueError) as e_info: + handle_send_job_defs( + file_paths=[test_json_file_path_1, test_uml_file_path_1], + harness_config=harness_config, + file_type="invalid", + ) + assert e_info.value.args[0] == "Invalid file type: invalid" diff --git a/tests/test_harness/test_files/test_jobdefjson.zip b/tests/test_harness/test_files/test_jobdefjson.zip new file mode 100644 index 0000000..d6302a4 Binary files /dev/null and b/tests/test_harness/test_files/test_jobdefjson.zip differ diff --git a/tests/test_harness/test_run_app.py b/tests/test_harness/test_run_app.py index 7040832..ac7a15c 100644 --- a/tests/test_harness/test_run_app.py +++ b/tests/test_harness/test_run_app.py @@ -17,6 +17,7 @@ import requests import aiohttp import pandas as pd +import pytest from test_harness.run_app import run_harness_app from test_harness.config.config import TestConfig @@ -48,6 +49,11 @@ Path(__file__).parent / "test_files", "test_zip_file.zip" ) +# get path of 2nd test zip file inlcuding a job def json +test_file_zip_path_2 = os.path.join( + Path(__file__).parent / "test_files", "test_jobdefjson.zip" +) + uuid4hex = re.compile("[0-9a-f]{12}4[0-9a-f]{3}[89ab][0-9a-f]{15}\\Z", re.I) @@ -205,6 +211,8 @@ def reception_log_call_back( def run_performance_test_requests_zip_file_upload( results_capture: dict, + zip_file_path: str = test_file_zip_path, + zip_file_name: str = "test_zip_file.zip", ) -> None: """Function to run performance test using requests for uploaded zip file functionality @@ -215,7 +223,7 @@ def run_performance_test_requests_zip_file_upload( # this will post the file under the name "upload" response = post_config_form_upload( file_bytes_file_names=[ - (open(test_file_zip_path, "rb"), "test_zip_file.zip") + (open(zip_file_path, "rb"), zip_file_name) ], url="http://localhost:8800/upload/named-zip-files", )[2] @@ -236,8 +244,15 @@ def run_performance_test_requests_zip_file_upload( time.sleep(1) +@pytest.mark.parametrize("zip_file_path, zip_file_name, jobdef_file_name", [ + (test_file_zip_path, "test_zip_file.zip", "test_uml_1.puml"), + (test_file_zip_path_2, "test_jobdefjson.zip", "test_uml_1_jobdef.json"), + +]) @responses.activate -def test_run_harness_app_uploaded_zip_file() -> None: +def test_run_harness_app_uploaded_zip_file( + zip_file_path: str, zip_file_name: str, jobdef_file_name: str +) -> None: """Test the `run_harness_app` function. This function sets up a test environment for the `run_harness_app` @@ -299,7 +314,9 @@ def reception_log_call_back( ) thread_2 = Thread( target=run_performance_test_requests_zip_file_upload, - args=(response_results,), + args=( + response_results, zip_file_path, zip_file_name + ), ) thread_1.start() time.sleep(5) @@ -316,7 +333,7 @@ def reception_log_call_back( ) for folder, file in zip( ["uml_file_store", "test_file_store", "profile_store"], - ["test_uml_1.puml", "test_uml_1_events.json", "test_profile.csv"], + [jobdef_file_name, "test_uml_1_events.json", "test_profile.csv"], ): path = os.path.join(test_output_path, folder, file) assert os.path.exists(path)