Skip to content

Commit

Permalink
Update docs
Browse files Browse the repository at this point in the history
  • Loading branch information
ramanan-ravi committed Nov 6, 2024
1 parent 197b847 commit eb169a4
Show file tree
Hide file tree
Showing 329 changed files with 11,227 additions and 989 deletions.
2 changes: 1 addition & 1 deletion docs/docs/cloudscanner/aws.md
Original file line number Diff line number Diff line change
Expand Up @@ -85,7 +85,7 @@ provider "aws" {

module "deepfence-cloud-scanner_example_single-account" {
source = "deepfence/cloud-scanner/aws//examples/single-account-ecs"
version = "0.6.0"
version = "0.9.0"
name = "deepfence-cloud-scanner"
# mgmt-console-url: deepfence.customer.com or 22.33.44.55
mgmt-console-url = "<Console URL>"
Expand Down
12 changes: 6 additions & 6 deletions docs/docs/cloudscanner/azure.md
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ provider "azurerm" {
module "cloud-scanner_example_single-subscription" {
source = "deepfence/cloud-scanner/azure//examples/single-subscription"
version = "0.6.0"
version = "0.9.0"
mgmt-console-url = "<Console URL> eg. XXX.XXX.XX.XXX"
mgmt-console-port = "443"
deepfence-key = "<Deepfence-key> eg. XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX"
Expand Down Expand Up @@ -59,7 +59,7 @@ provider "azurerm" {
module "cloud-scanner_example_tenant-subscriptions" {
source = "deepfence/cloud-scanner/azure//examples/tenant-subscriptions"
version = "0.6.0"
version = "0.9.0"
mgmt-console-url = "<Console URL> eg. XXX.XXX.XX.XXX"
mgmt-console-port = "<Console port> eg. 443"
deepfence-key = "<Deepfence-key> eg. XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX"
Expand Down Expand Up @@ -123,7 +123,7 @@ data "azurerm_kubernetes_cluster" "default" {
module "cloud-scanner" {
source = "deepfence/cloud-scanner/azure//examples/aks"
version = "0.7.0"
version = "0.9.0"
name = "<name of the app>"
mgmt-console-url = "<Console URL> eg. XXX.XXX.XX.XXX"
deepfence-key = "<DEEPFENCE API KEY>"
Expand Down Expand Up @@ -158,7 +158,7 @@ data "azurerm_kubernetes_cluster" "default" {
module "test" {
source = "deepfence/cloud-scanner/azure//examples/aks"
version = "0.7.0"
version = "0.9.0"
name = "<name of the app>"
mgmt-console-url = "<Console URL> eg. XXX.XXX.XX.XXX"
deepfence-key = "<DEEPFENCE API KEY>"
Expand Down Expand Up @@ -204,7 +204,7 @@ module "test" {
module "infrastructure_cloud-scanner-app" {
source = "deepfence/cloud-scanner/azure//modules/infrastructure/cloud-scanner-app"
version = "0.7.0"
version = "0.9.0"
name = "deepfence-cloud-scanner"
subscription_ids_access = [data.azurerm_subscription.current.subscription_id]
}
Expand Down Expand Up @@ -235,7 +235,7 @@ module "test" {
module "infrastructure_cloud-scanner-app" {
source = "deepfence/cloud-scanner/azure//modules/infrastructure/cloud-scanner-app"
version = "0.7.0"
version = "0.9.0"
name = "deepfence-cloud-scanner"
subscription_ids_access = [list of tenant subscriptions ids]
}
Expand Down
14 changes: 7 additions & 7 deletions docs/docs/cloudscanner/gcp.md
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ Copy and paste the following (single project or multiple projects) into a new fi
```terraform
module "cloud-scanner_example_single-project" {
source = "deepfence/cloud-scanner/gcp//examples/single-project"
version = "0.6.0"
version = "0.9.0"
name = "deepfence-cloud-scanner"
# mgmt-console-url: deepfence.customer.com or 22.33.44.55
mgmt-console-url = "<Console URL>"
Expand Down Expand Up @@ -53,7 +53,7 @@ module "cloud-scanner_example_single-project" {
```terraform
module "cloud-scanner_example_multiple-projects" {
source = "deepfence/cloud-scanner/gcp//examples/multi-project"
version = "0.6.0"
version = "0.9.0"
name = "deepfence-cloud-scanner"
# org_domain: root project name
org_domain = ""
Expand Down Expand Up @@ -128,7 +128,7 @@ data "google_container_cluster" "target_cluster" {
module "cloud_scanner_example_single_project" {
source = "deepfence/cloud-scanner/gcp//examples/gke"
version = "0.7.2"
version = "0.9.0"
gke_host = "https://${data.google_container_cluster.target_cluster.endpoint}"
gke_token = data.google_client_config.current.access_token
gke_cluster_ca_certificate = base64decode(data.google_container_cluster.target_cluster.master_auth[0].cluster_ca_certificate,)
Expand Down Expand Up @@ -164,7 +164,7 @@ data "google_container_cluster" "target_cluster" {
module "cloud_scanner_example_multiple_project" {
source = "deepfence/cloud-scanner/gcp//examples/gke"
version = "0.7.2"
version = "0.9.0"
name = "deepfence-cloud-scanner"
gke_host = "https://${data.google_container_cluster.target_cluster.endpoint}"
gke_token = data.google_client_config.current.access_token
Expand Down Expand Up @@ -214,7 +214,7 @@ module "cloud_scanner_example_multiple_project" {
module "cloud_scanner_example_single_project" {
source = "deepfence/cloud-scanner/gcp//examples/gce-vm"
version = "0.7.2"
version = "0.9.0"
# gcp service account name
name = "deepfence-cloud-scanner"
# project_id example: dev1-123456
Expand All @@ -233,7 +233,7 @@ module "cloud_scanner_example_multiple_project" {
module "cloud_scanner_example_multiple_project" {
source = "deepfence/cloud-scanner/gcp//examples/gce-vm"
version = "0.7.2"
version = "0.9.0"
# gcp service account name
name = "deepfence-cloud-scanner"
# project_id example: dev1-123456
Expand Down Expand Up @@ -277,7 +277,7 @@ module "cloud_scanner_example_multiple_project" {
SUCCESS_SIGNAL_URL: ""
DF_LOG_LEVEL: info
SCAN_INACTIVE_THRESHOLD: "21600"
CLOUD_SCANNER_POLICY: "arn:aws:iam::aws:policy/SecurityAudit"
CLOUD_SCANNER_POLICY: ""
```
6. Start the cloud scanner using docker compose
```
Expand Down
4 changes: 2 additions & 2 deletions docs/docs/console/docker.md
Original file line number Diff line number Diff line change
Expand Up @@ -18,10 +18,10 @@ You can install the Management Console on a single Docker host or [in a dedicate

The following instructions explain how to get started with a docker-based installation on a single host system:

1. Download the file [docker-compose.yml](https://github.com/deepfence/ThreatMapper/blob/release-2.3/deployment-scripts/docker-compose.yml) to the system that will host the Console
1. Download the file [docker-compose.yml](https://github.com/deepfence/ThreatMapper/blob/release-2.4/deployment-scripts/docker-compose.yml) to the system that will host the Console

```bash
wget https://github.com/deepfence/ThreatMapper/raw/release-3.0/deployment-scripts/docker-compose.yml
wget https://github.com/deepfence/ThreatMapper/raw/release-2.4/deployment-scripts/docker-compose.yml
```

2. Execute the following command to install and start the latest build of the Console
Expand Down
2 changes: 1 addition & 1 deletion docs/docs/console/upgrade-from-v2.1.md
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ title: Upgrade from v2.1
Please choose upgrade steps by console deployment type (docker or kubernetes)

### Prerequisite
1. Download [pre-upgrade-to-v5.sh](https://github.com/deepfence/ThreatMapper/blob/release-2.3/deepfence_neo4j/pre-upgrade-to-v5.sh) script to current directory
1. Download [pre-upgrade-to-v5.sh](https://github.com/deepfence/ThreatMapper/blob/release-2.4/deepfence_neo4j/pre-upgrade-to-v5.sh) script to current directory
2. Make `pre-upgrade-to-v5.sh` executable
```
chmod +x pre-upgrade-to-v5.sh
Expand Down
4 changes: 2 additions & 2 deletions docs/docs/developers/deploy-console.md
Original file line number Diff line number Diff line change
Expand Up @@ -14,10 +14,10 @@ You should first [build the management console](build) and push the images to a
Refer to the [Docker Installation Instructions](/docs/console/docker) along with the modifications below.
:::

1. Download the file [docker-compose.yml](https://github.com/deepfence/ThreatMapper/blob/release-2.3/deployment-scripts/docker-compose.yml) to the system that will host the Console
1. Download the file [docker-compose.yml](https://github.com/deepfence/ThreatMapper/blob/release-2.4/deployment-scripts/docker-compose.yml) to the system that will host the Console

```bash
wget https://github.com/deepfence/ThreatMapper/raw/release-3.0/deployment-scripts/docker-compose.yml
wget https://github.com/deepfence/ThreatMapper/raw/release-2.4/deployment-scripts/docker-compose.yml
```

2. Execute the following command to install and start the Console. Note the override to specify your repository `myorg`, rather than the `deepfenceio` default:
Expand Down
10 changes: 10 additions & 0 deletions docs/docusaurus.config.js
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,17 @@ const config = {
lastVersion: 'current',
versions: {
"current": {
label: 'v2.5',
banner: 'none',
},
"v2.4": {
label: 'v2.4',
path: 'v2.4',
banner: 'none',
},
"v2.3": {
label: 'v2.3',
path: 'v2.3',
banner: 'none',
},
"v2.2": {
Expand Down
6 changes: 3 additions & 3 deletions docs/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -14,8 +14,8 @@
"write-heading-ids": "docusaurus write-heading-ids"
},
"dependencies": {
"@docusaurus/core": "^3.4.0",
"@docusaurus/preset-classic": "^3.4.0",
"@docusaurus/core": "^3.6.0",
"@docusaurus/preset-classic": "^3.6.0",
"@mdx-js/react": "^3.0.0",
"clsx": "^2.1.0",
"prism-react-renderer": "^2.1.0",
Expand All @@ -24,7 +24,7 @@
"react-player": "^2.15.1"
},
"devDependencies": {
"@docusaurus/module-type-aliases": "^3.4.0"
"@docusaurus/module-type-aliases": "^3.6.0"
},
"browserslist": {
"production": [
Expand Down
40 changes: 40 additions & 0 deletions docs/versioned_docs/version-v2.3/architecture/cloudscanner.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,40 @@
---
title: Cloud Scanner task
---

# Cloud Scanner Overview

ThreatMapper performs Compliance Posture Scanning to:

* Build an inventory of cloud assets, such as network security groups, storage objects, key management services. The types of assets discovered are specific to each cloud platform.
* Perform 'posture scanning', where ThreatMapper matches infrastructure configuration against a set of best-practice benchmarks, such as CIS, PCI-DSS and HIPAA. The benchmarks that are supported are specific to each cloud platform.

ThreatMapper then summarises the results in a 'Threat Graph' visualization, to help you to prioritize compliance issues that pose the greatest risk of exploit.

## Implementation

* Compliance Posture Scanning for **Clouds** requires access (typically read-only) to the cloud platform APIs, and uses the Cloud Scanner task
* Compliance Posture Scanning for **Hosts** requires direct access to the host, and uses the Sensor Agent.

### Compliance Posture Scanning for Clouds

The ThreatMapper Console does not access the cloud platform APIs directly; there is no need to open the APIs up for remote access. Instead, you deploy a 'Cloud Scanner' task which acts as a local relay, taking instructions from the remote ThreatMapper console and performing local API calls from within your cloud infrastructure.

Each Cloud Scanner task runs in your cloud environment, gathering inventory and compliance information for the assets deployed in that environment. It submits that information to your ThreatMapper console. You can deploy as many Cloud Scanner tasks as are required by your security policy and any restrictions in place that affect API access.

Cloud Scanner tasks are deployed using the appropriate Terraform module for each cloud, and are configured with the address and API key of your management console. They 'phone home' to your management console and take instructions on demand; they do not listen for remote connections or control.

:::info
Refer to the Installation Documentation to [Learn how to install Cloud Scanner tasks](/docs/cloudscanner)
:::


### Compliance Posture Scanning for Hosts

ThreatMapper can perform compliance posture scanning on linux hosts and Kubernetes master and worker nodes.

Scanning is done directly, using a local [Sensor Agent](sensors) rather than by using the Cloud Scanner task employed by the cloud platform integrations.




38 changes: 38 additions & 0 deletions docs/versioned_docs/version-v2.3/architecture/console.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,38 @@
---
title: Management Console
---

# The ThreatMapper Management Console

The ThreatMapper Management Console ("Console") is a standalone application, implemented as a fleet of containers. It should be deployed on either a single docker host, or (for larger deployments) a dedicated Kubernetes cluster. The console is self-contained, and exposes an HTTPS interface for administration and API automation.

The console allows you to:

* Manage the users who can access the console.
* Configure Infrastructure API access and interrogate platform configurations.
* Visualize and drill down into Kubernetes clusters, virtual machines, containers and images, running processes, and network connections in near real time.
* Invoke vulnerability scans on running containers and applications and review the results, ranked by risk-of-exploit.
* Invoke compliance scans on infrastructure configuration ('agentless') and on infrastructure hosts ('agent-based'), manually or automatically when they are added to a cluster.
* Scan container registries for vulnerabilities, to review workloads before they are deployed.
* Scan image builds during the CI/CD pipeline, supporting CircleCI, Jenkins, and GitLab.
* Scan containers and host filesystems for unprotected secrets, including access tokens, keys and passwords.
* Configure integrations with external notification, SIEM and ticketing systems, including Slack, PagerDuty, Jira, Splunk, ELK, Sumo Logic, and AWS S3.
ThreatMapper supports multiple production deployments simultaneously, so that you can visualize and scan workloads across a large production estate.

### ThreatMapper Compliance Posture Scanning

ThreatMapper performs compliance posture scanning for cloud platforms by querying the infrastructure APIs for these platforms.

This is achieved using a **cloud scanner** task that is deployed within each cloud instance using a terraform module. The cloud scanner is granted appropriate access to the local APIs, and operates under instruction from the remote ThreatMapper console.

### ThreatMapper Registry Scanning

The ThreatMapper console can scan container images at rest in a wide range of supported registries.

This is achieved by providing appropriate credentials to the ThreatMapper console so that it can discover and download assets directly from these registries.

### ThreatMapper Vulnerability, Secret and Local Compliance Scanning

ThreatMapper performs vulnerability and secret scanning directly on production and non-production hosts using a **sensor agent** container.

The sensor agent is also used for local compliance scanning (Kubernetes and Linux posture) where it has access to configuration and assets that are not exposed through an API.
28 changes: 28 additions & 0 deletions docs/versioned_docs/version-v2.3/architecture/index.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
---
title: ThreatMapper Architecture
---

# ThreatMapper Architecture

The ThreatMapper product consists of a Management Console, and multiple Sensor Agents and Cloud Scanner tasks that are deployed within your production platform(s).

![ThreatMapper Components](../img/threatmapper-components.jpg)

The Management Console is deployed first. The Management console generates an API key and a URL which you will need when you install the Sensor containers and Cloud Scanner tasks.

The Management Console is managed over TLS (port 443), used for administrative traffic (web browser and API) and for sensor traffic. You should firewall or secure access to this port so that only authorised admin users and remote production platforms are able to connect.

# Agent-Less and Agent-Based operation

ThreatMapper uses both agent-less and agent-based operations to discover the widest-possible range of threats and render them in 'Threat Graphs' and 'Threat Maps'. You can use either or both operations, and can configure their access to your production and non-production systems in line with your own security posture.

| | Agent-Less (Cloud Connector) | Agent-Based (Sensor Agent) |
|----------------|----------------------------------------------------------------------------------------------------------------------|----------------------------------------------------------------------------------------------------------------------------|
| Implementation | Direct access to infrastructure APIs, using a secured **Cloud Connector** task, deployed local to the cloud instance | Lightweight, privileged **sensor agent** container with access to local resources on the host |
| Visibility | Cloud configuration and assets, as exposed through cloud or infrastructure API | Local assets, including filesystem, process list, local containers and pods, and kernel interfaces |
| Capability | Identifies deviation from good practice configuration ("Compliance Scanning") for cloud platforms | Identifies network flows and performs vulnerability, secret and local host (Linux/Kubernetes) compliance scanning |
| Output | Agent-less data is reported in the 'Threat Graph', which shows compliance-related issues | Agent-based data is reported in the Threat Map (for vulnerabilities, secrets etc) and Threat Graph (for compliance issues) |




18 changes: 18 additions & 0 deletions docs/versioned_docs/version-v2.3/architecture/sensors.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
---
title: Sensor Agent
---

# Sensor Agent

ThreatMapper Sensors are deployed on your production platforms, directly on each production host. They are deployed in the form of a privileged container (the 'Sensor Agent'). They communicate securely with your ThreatMapper Management Console, taking instructions to retrieve SBOMs and run scans, and forwarding telemetry data.

The sensors support the following production platforms:

* **Kubernetes:** The sensors are deployed as a daemonset, similar to other kubernetes services.
* **Docker:** The sensor is deployed as a docker container on each docker host.
* **Bare metal and VM-based platforms:** Sensors are deployed as a Docker container on each Linux operating system instance, using a Docker runtime. Linux instances are supported; Windows Server is not supported, although an experimental implementation is available.
* **AWS Fargate** The sensor is deployed as a daemon service alongside each serverless instance.

:::info
Refer to the Installation Documentation to [Learn how to install Sensor Agents](/docs/sensors)
:::
Loading

0 comments on commit eb169a4

Please sign in to comment.