Skip to content

Commit

Permalink
Fix mypy type hint errors, rename loop functions
Browse files Browse the repository at this point in the history
Fixed several errors found by the type hint linter mypy and
added underscore prefix for the callback functions
_loop_step_end and _loop_end.
  • Loading branch information
timopollmeier authored and greenbonebot committed Dec 5, 2024
1 parent 9d9d856 commit 5086958
Show file tree
Hide file tree
Showing 14 changed files with 84 additions and 56 deletions.
6 changes: 5 additions & 1 deletion greenbone/scap/cpe/cli/download.py
Original file line number Diff line number Diff line change
Expand Up @@ -424,10 +424,14 @@ async def download(console: Console, error_console: Console) -> None:
)

if run_time_file:
if until:
run_time = until
else:
run_time = datetime.now()
# ensure directories exist
run_time_file.parent.mkdir(parents=True, exist_ok=True)
run_time_file.write_text(
f"{until.isoformat()}\n",
f"{run_time.isoformat()}\n",
encoding="utf8", # type: ignore
)
console.log(f"Wrote run time to {run_time_file.absolute()}.")
Expand Down
26 changes: 13 additions & 13 deletions greenbone/scap/cpe_match/db/manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -107,7 +107,7 @@ async def add_cpe_match_strings(
)
await self._insert_foreign_data(transaction, match_strings)

self._cpes = []
self._cpe_match_strings = []

async def _insert_foreign_data(
self,
Expand Down Expand Up @@ -150,8 +150,8 @@ async def find(
index: int | None = None,
last_modification_start_date: datetime | None = None,
last_modification_end_date: datetime | None = None,
published_start_date: datetime | None = None,
published_end_date: datetime | None = None,
created_start_date: datetime | None = None,
created_end_date: datetime | None = None,
) -> AsyncIterator[CPEMatchStringDatabaseModel]:
clauses = []

Expand All @@ -171,13 +171,13 @@ async def find(
CPEMatchStringDatabaseModel.last_modified
<= last_modification_end_date
)
if published_start_date:
if created_start_date:
clauses.append(
CPEMatchStringDatabaseModel.published >= published_start_date
CPEMatchStringDatabaseModel.created >= created_start_date
)
if published_end_date:
if created_end_date:
clauses.append(
CPEMatchStringDatabaseModel.published <= published_end_date
CPEMatchStringDatabaseModel.created <= created_end_date
)

statement = (
Expand Down Expand Up @@ -221,8 +221,8 @@ async def count(
match_criteria_id: str | None,
last_modification_start_date: datetime | None = None,
last_modification_end_date: datetime | None = None,
published_start_date: datetime | None = None,
published_end_date: datetime | None = None,
created_start_date: datetime | None = None,
created_end_date: datetime | None = None,
) -> int:
clauses = []

Expand All @@ -242,13 +242,13 @@ async def count(
CPEMatchStringDatabaseModel.last_modified
<= last_modification_end_date
)
if published_start_date:
if created_start_date:
clauses.append(
CPEMatchStringDatabaseModel.published >= published_start_date
CPEMatchStringDatabaseModel.created >= created_start_date
)
if published_end_date:
if created_end_date:
clauses.append(
CPEMatchStringDatabaseModel.published <= published_end_date
CPEMatchStringDatabaseModel.created <= created_end_date
)

statement = select(
Expand Down
7 changes: 5 additions & 2 deletions greenbone/scap/cpe_match/json.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
from dataclasses import asdict, dataclass
from datetime import datetime
from pathlib import Path
from typing import Sequence

from pontos.nvd.models.cpe_match_string import CPEMatchString
from rich.console import Console
Expand Down Expand Up @@ -65,7 +66,7 @@ def __init__(
storage_path: Path,
*,
compress: bool = False,
schema_path: Path = None,
schema_path: Path | None = None,
raise_error_on_validation=False,
):
super().__init__(
Expand All @@ -88,7 +89,9 @@ def add_match_string(self, match_string: CPEMatchString) -> None:
MatchStringItem(match_string=match_string)
)

def add_match_strings(self, match_strings: list[CPEMatchString]) -> None:
def add_match_strings(
self, match_strings: Sequence[CPEMatchString]
) -> None:
for match_string in match_strings:
self.add_match_string(match_string)

Expand Down
17 changes: 12 additions & 5 deletions greenbone/scap/cpe_match/producer/nvd_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,8 @@
from rich.console import Console
from rich.progress import Progress

from ...cli import DEFAULT_RETRIES
from ...errors import ScapError
from ...generic_cli.producer.nvd_api import NvdApiProducer
from ..cli.processor import CPE_MATCH_TYPE_PLURAL

Expand Down Expand Up @@ -69,12 +71,12 @@ def __init__(
error_console: Console,
progress: Progress,
*,
retry_attempts: int = DEFAULT_RETRIES,
nvd_api_key: str | None = None,
retry_attempts: int = None,
request_results: int = None,
request_results: int | None = None,
request_filter_opts: dict = {},
start_index: int = 0,
verbose: int = None,
verbose: int | None = None,
):
"""
Constructor for a CPE match string NVD API producer.
Expand All @@ -90,19 +92,21 @@ def __init__(
start_index: index/offset of the first item to request
verbose: Verbosity level of log messages.
"""
self._nvd_api: CPEMatchApi

super().__init__(
console,
error_console,
progress,
nvd_api_key=nvd_api_key,
retry_attempts=retry_attempts,
nvd_api_key=nvd_api_key,
request_results=request_results,
request_filter_opts=request_filter_opts,
start_index=start_index,
verbose=verbose,
)

def _create_nvd_api(self, nvd_api_key: str) -> CPEMatchApi:
def _create_nvd_api(self, nvd_api_key: str | None) -> CPEMatchApi:
"""
Callback used by the constructor to create the NVD API object
that can be queried for CPE match strings.
Expand All @@ -123,6 +127,9 @@ async def _create_nvd_results(self) -> NVDResults[CPEMatchString]:
Returns: The new `NVDResults` object.
"""
if self._queue is None:
raise ScapError("No queue has been assigned")

return await self._nvd_api.cpe_matches(
last_modified_start_date=self._request_filter_opts.get(
"last_modified_start_date"
Expand Down
2 changes: 2 additions & 0 deletions greenbone/scap/cpe_match/worker/db.py
Original file line number Diff line number Diff line change
Expand Up @@ -92,6 +92,8 @@ def __init__(
echo_sql: Whether to print SQL statements.
verbose: Verbosity level of log messages.
"""
self._manager: CPEMatchStringDatabaseManager

super().__init__(
console,
error_console,
Expand Down
4 changes: 2 additions & 2 deletions greenbone/scap/cpe_match/worker/json.py
Original file line number Diff line number Diff line change
Expand Up @@ -111,11 +111,11 @@ async def _handle_chunk(self, chunk: Sequence[CPEMatchString]):
"""
self._json_manager.add_match_strings(chunk)

async def loop_end(self) -> None:
async def _loop_end(self) -> None:
"""
Callback handling the exiting the main worker loop.
Makes the JSON manager write the document to the file.
"""
self._json_manager.write()
await super().loop_end()
await super()._loop_end()
6 changes: 5 additions & 1 deletion greenbone/scap/cve/cli/download.py
Original file line number Diff line number Diff line change
Expand Up @@ -432,10 +432,14 @@ async def download(console: Console, error_console: Console):
)

if run_time_file:
if until:
run_time = until
else:
run_time = datetime.now()
# ensure directories exist
run_time_file.parent.mkdir(parents=True, exist_ok=True)
run_time_file.write_text(
f"{until.isoformat()}\n",
f"{run_time.isoformat()}\n",
encoding="utf8", # type: ignore
)
console.log(f"Wrote run time to {run_time_file.absolute()}.")
Expand Down
2 changes: 1 addition & 1 deletion greenbone/scap/data_utils/json.py
Original file line number Diff line number Diff line change
Expand Up @@ -123,7 +123,7 @@ class JsonManager:
def __init__(
self,
error_console: Console,
schema_path: Path = None,
schema_path: Path | None = None,
raise_error_on_validation=False,
):
"""
Expand Down
2 changes: 1 addition & 1 deletion greenbone/scap/generic_cli/processor.py
Original file line number Diff line number Diff line change
Expand Up @@ -118,7 +118,7 @@ def __init__(
self._worker.set_queue(self._queue)

self._verbose: int = (
verbose if not None else self._arg_defaults["verbose"],
verbose if verbose is not None else self._arg_defaults["verbose"]
)
"Verbosity level of log messages."

Expand Down
10 changes: 5 additions & 5 deletions greenbone/scap/generic_cli/producer/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@

from abc import ABC, abstractmethod
from argparse import ArgumentParser
from typing import AsyncContextManager, Generic, TypeVar
from typing import Any, AsyncContextManager, Generic, TypeVar

from rich.console import Console
from rich.progress import Progress
Expand All @@ -30,10 +30,10 @@ class BaseScapProducer(Generic[T], AsyncContextManager, ABC):
e.g. `BaseScapProducer[CPE]` will be a producer handling CPE objects.
"""

_item_type_plural = "SCAP items"
_item_type_plural: str = "SCAP items"
"Plural form of the type of items to use in log messages"

_arg_defaults = {
_arg_defaults: dict[str, Any] = {
"verbose": DEFAULT_VERBOSITY,
}
"Default values for optional arguments."
Expand Down Expand Up @@ -79,7 +79,7 @@ def __init__(
self._verbose = verbose if not None else self._arg_defaults["verbose"]
"Verbosity level of log messages."

self._queue: ScapChunkQueue[T] | None = None
self._queue: ScapChunkQueue[T]
"Queue chunks of SCAP items are added to."

@abstractmethod
Expand Down Expand Up @@ -110,7 +110,7 @@ async def run_loop(
It should also create a task for the `progress` object and update it
regularly.
"""
self._queue.set_producer_finished()
pass

def set_queue(self, queue: ScapChunkQueue[T]) -> None:
"""
Expand Down
22 changes: 12 additions & 10 deletions greenbone/scap/generic_cli/producer/nvd_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
from argparse import ArgumentParser, Namespace
from datetime import datetime
from pathlib import Path
from typing import Generic, TypeVar
from typing import Any, Generic, TypeVar

import httpx
import stamina
Expand Down Expand Up @@ -98,7 +98,9 @@ def add_args_to_parser(
)

@staticmethod
def since_from_args(args: Namespace, error_console: Console) -> datetime:
def since_from_args(
args: Namespace, error_console: Console
) -> datetime | None:
"""
Gets the lower limit for the modification time from the given
command line arguments, reading the time from a file if the
Expand Down Expand Up @@ -133,8 +135,8 @@ def __init__(
error_console: Console,
progress: Progress,
*,
retry_attempts: int = DEFAULT_RETRIES,
nvd_api_key: str | None = None,
retry_attempts: int,
request_results: int | None = None,
request_filter_opts: dict = {},
start_index: int = 0,
Expand Down Expand Up @@ -167,26 +169,26 @@ def __init__(
self._additional_retry_attempts: int = retry_attempts
"Number of retries after fetching initial data."

self._request_results: int = request_results
self._request_results: int | None = request_results
"Maximum number of results to request from the API."

self._request_filter_opts: dict = request_filter_opts
self._request_filter_opts: dict[str, Any] = request_filter_opts
"Filter options to pass to the API requests."

self._start_index: int = start_index
"Index/offset of the first item to request."

self._nvd_api_key = nvd_api_key
self._nvd_api_key: str | None = nvd_api_key
"API key to use for the requests to allow faster requests."

self._nvd_api = self._create_nvd_api(nvd_api_key)
self._nvd_api: NVDApi = self._create_nvd_api(nvd_api_key)
"The NVD API object used for querying SCAP items."

self._results = None
self._results: NVDResults[T]
"The NVD results object created by the API to get the SCAP items from."

@abstractmethod
def _create_nvd_api(self, nvd_api_key: str) -> NVDApi:
def _create_nvd_api(self, nvd_api_key: str | None) -> NVDApi:
"""
Callback used by the constructor to create the
NVD API object that can be queried for SCAP items.
Expand All @@ -199,7 +201,7 @@ def _create_nvd_api(self, nvd_api_key: str) -> NVDApi:
pass

@abstractmethod
def _create_nvd_results(self) -> NVDResults[T]:
async def _create_nvd_results(self) -> NVDResults[T]:
"""
Callback used during `fetch_initial_data` to get
the `NVDResults` object the SCAP items will be fetched from.
Expand Down
Loading

0 comments on commit 5086958

Please sign in to comment.