Skip to content

Commit

Permalink
Merge pull request #2319 from alan-turing-institute/file_logs
Browse files Browse the repository at this point in the history
Add logging for file shares
  • Loading branch information
JimMadge authored Nov 29, 2024
2 parents 235d89f + d73704a commit c7d775c
Show file tree
Hide file tree
Showing 3 changed files with 108 additions and 14 deletions.
14 changes: 0 additions & 14 deletions data_safe_haven/infrastructure/programs/declarative_sre.py
Original file line number Diff line number Diff line change
Expand Up @@ -198,20 +198,6 @@ def __call__(self) -> None:
tags=self.tags,
)

# Deploy monitoring
monitoring = SREMonitoringComponent(
"sre_monitoring",
self.stack_name,
SREMonitoringProps(
dns_private_zones=dns.private_zones,
location=self.config.azure.location,
resource_group_name=resource_group.name,
subnet=networking.subnet_monitoring,
timezone=self.config.sre.timezone,
),
tags=self.tags,
)

# Deploy data storage
data = SREDataComponent(
"sre_data",
Expand Down
79 changes: 79 additions & 0 deletions data_safe_haven/infrastructure/programs/sre/data.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
from pulumi import ComponentResource, Input, Output, ResourceOptions
from pulumi_azure_native import (
authorization,
insights,
keyvault,
managedidentity,
network,
Expand Down Expand Up @@ -425,6 +426,45 @@ def __init__(
resource_group_name=kwargs["resource_group_name"],
)
)
# Add diagnostic setting for files
insights.DiagnosticSetting(
f"{storage_account_data_configuration._name}_diagnostic_setting",
name=f"{storage_account_data_configuration._name}_diagnostic_setting",
log_analytics_destination_type="Dedicated",
logs=[
{
"category_group": "allLogs",
"enabled": True,
"retention_policy": {
"days": 0,
"enabled": False,
},
},
{
"category_group": "audit",
"enabled": True,
"retention_policy": {
"days": 0,
"enabled": False,
},
},
],
metrics=[
{
"category": "Transaction",
"enabled": True,
"retention_policy": {
"days": 0,
"enabled": False,
},
}
],
# This is the URI of the automatically created fileService resource
resource_uri=Output.concat(
storage_account_data_configuration.id, "/fileServices/default"
),
workspace_id=props.log_analytics_workspace.id,
)
# Set up a private endpoint for the configuration data storage account
storage_account_data_configuration_private_endpoint = network.PrivateEndpoint(
f"{storage_account_data_configuration._name}_private_endpoint",
Expand Down Expand Up @@ -625,6 +665,45 @@ def __init__(
opts=child_opts,
tags=child_tags,
)
# Add diagnostic setting for files
insights.DiagnosticSetting(
f"{storage_account_data_private_user._name}_diagnostic_setting",
name=f"{storage_account_data_private_user._name}_diagnostic_setting",
log_analytics_destination_type="Dedicated",
logs=[
{
"category_group": "allLogs",
"enabled": True,
"retention_policy": {
"days": 0,
"enabled": False,
},
},
{
"category_group": "audit",
"enabled": True,
"retention_policy": {
"days": 0,
"enabled": False,
},
},
],
metrics=[
{
"category": "Transaction",
"enabled": True,
"retention_policy": {
"days": 0,
"enabled": False,
},
}
],
# This is the URI of the automatically created fileService resource
resource_uri=Output.concat(
storage_account_data_private_user.id, "/fileServices/default"
),
workspace_id=props.log_analytics_workspace.id,
)
storage.FileShare(
f"{storage_account_data_private_user._name}_files_home",
access_tier=storage.ShareAccessTier.PREMIUM,
Expand Down
29 changes: 29 additions & 0 deletions docs/source/management/logs.md
Original file line number Diff line number Diff line change
Expand Up @@ -43,6 +43,35 @@ There are two tables,
: Various metrics on blob container utilisation and performance.
: This table is not reserved for the desired state data container and other resources may log to it.

### User data logs

The user data file share holds the {ref}`researchers'<role_researcher>` [home directories](https://refspecs.linuxfoundation.org/FHS_3.0/fhs/ch03s08.html), where they will store their personal data and configuration.
Logs from the share are ingested into the [SRE's log analytics workspace](#log-workspace).
There are two tables,

`StorageFileLogs`
: NFS events occurring on the file share.
: For example data being written or directories being accessed

`AzureMetrics`
: Various metrics on file share utilisation and performance.
: This table is not reserved for the user data share and other resources may log to it.

### Configuration data logs

There are multiple configuration data file shares.
Each contains the configuration and state data for the Data Safe Haven [services deployed as containers](#container-logs).
Logs from the share are ingested into the [SRE's log analytics workspace](#log-workspace).
There are two tables,

`StorageFileLogs`
: SMB events occurring on the file share.
: For example data being written or directories being accessed

`AzureMetrics`
: Various metrics on file share utilisation and performance.
: This table is not reserved for the configuration data shares and other resources may log to it.

## Container logs

Some of the Data Safe Haven infrastructure is provisioned as containers.
Expand Down

0 comments on commit c7d775c

Please sign in to comment.