Skip to content

Commit

Permalink
Merge pull request #171 from jagilber/dev_30
Browse files Browse the repository at this point in the history
Dev 30 sdl workflow and pipeline updates
  • Loading branch information
dbucce authored Mar 11, 2024
2 parents 8bb710a + eaf09d8 commit e0f5304
Show file tree
Hide file tree
Showing 8 changed files with 115 additions and 47 deletions.
11 changes: 11 additions & 0 deletions .github/dependabot.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
# To get started with Dependabot version updates, you'll need to specify which
# package ecosystems to update and where the package manifests are located.
# Please see the documentation for all configuration options:
# https://docs.github.com/code-security/dependabot/dependabot-version-updates/configuration-options-for-the-dependabot.yml-file

version: 2
updates:
- package-ecosystem: "nuget" # See documentation for possible values
directory: "/" # Location of package manifests
schedule:
interval: "weekly"
18 changes: 18 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,23 @@
# Change log

## 03/11/2024

- update kusto quickstart doc with modified image and minimum sas permissions
- update kusto quickstart doc with new exception message
- update kusto functions

## 03/07/2024

- upgrade git action labeler@v5

## 02/07/2024

- add updates for SDL
- add dependabot config
- remove stale links in documentation
- update workflow for git for deprecated env variables
- update prod and dev pipelines azure pipelines for ado

## 11/19/2023

- add support for .net8.0
Expand Down
68 changes: 37 additions & 31 deletions azure-pipelines-dev.yml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
# used for internal ado builds

name: ADO CI CollectSFData
name: ADO CI CollectSFData-Dev

trigger:
branches:
Expand All @@ -15,24 +15,17 @@ variables:
project_root: .\src
start_time: $[format('{0}-{1:yyyy}{1:MM}{1:dd}-{1:HH}{1:mm}{1:ss}', variables['project_name'], pipeline.startTime)]
artifacts_drive: Z
artifacts_share_target: Z:\$(System.TeamProject)/$(System.DefinitionName)/$(System.JobId)
artifacts_storage_uri_fileshare: /artifacts
System.Debug: true
buildConfiguration: Release
Codeql.PublishDatabaseLog: true
# testing
Codeql.Cadence: 0
Codeql.LogLevel: 5
Codeql.Language: csharp,powershell
Codeql.PublishDatabase: true
artifacts_share_target: 'Z:\$(System.DefinitionName)\$(System.JobId)\$(start_time)'
system.debug: true
buildConfiguration: debug

steps:
- task: PowerShell@2
displayName: 'agent environment'
inputs:
targetType: 'inline'
script: |
[environment]::getenvironmentvariables().getenumerator()|sort Name
[environment]::getEnvironmentVariables().getEnumerator()|sort Name
dotnet --info
dotnet nuget locals all --clear
errorActionPreference: 'continue'
Expand All @@ -51,7 +44,12 @@ steps:
- task: CodeQL3000Init@0
inputs:
Enabled: true
AnalyzeInPipeline: true
AnalyzeInPipeline: false
PublishDatabase: true
PublishDatabaseLog: true
Language: csharp,powershell
Cadence: 72 # 72 hours default, use 0 for debug
LogLevel: 4

- task: PowerShell@2
displayName: 'dotnet build'
Expand Down Expand Up @@ -112,25 +110,31 @@ steps:
targetPath: $(System.ArtifactsDirectory)
artifactName: artifacts-$(start_time)-$(system.JobId)

## not working
- task: PowerShell@2
displayName: 'map artifacts drive'
displayName: 'copy artifacts'
inputs:
targetType: 'inline'
script: |
[environment]::getenvironmentvariables().getenumerator()|sort Name
write-host "Test-NetConnection -ComputerName $env:mapped_artifacts_storage_uri -Port 445 -informationLevel Detailed"
$connectTestResult = Test-NetConnection -ComputerName $env:mapped_artifacts_storage_uri -Port 445 -informationLevel Detailed
if ($connectTestResult.TcpTestSucceeded) {
# Save the password so the drive will persist on reboot
write-host "cmd.exe /C cmdkey /add:`"$env:mapped_artifacts_storage_uri/$env:artifacts_storage_uri_fileshare`" /user:`"$env:mapped_artifacts_user`" /pass:`"$env:mapped_artifacts_pass`""
cmd.exe /C "cmdkey /add:`"$env:mapped_artifacts_storage_uri/$env:artifacts_storage_uri_fileshare`" /user:`"$env:mapped_artifacts_user`" /pass:`"$env:mapped_artifacts_pass`""
# Mount the drive
New-PSDrive -Name $env:artifacts_drive -PSProvider FileSystem -Root "$env:mapped_artifacts_share" -Persist
[environment]::getEnvironmentVariables().getEnumerator()|sort Name
write-host "test-netConnection -computerName $env:mapped_artifacts_storage_uri -Port 445 -informationLevel detailed"
$connectTestResult = test-netConnection -computerName $env:mapped_artifacts_storage_uri -Port 445 -informationLevel detailed
if ($connectTestResult.tcpTestSucceeded) {
$securePassword = ConvertTo-SecureString -String $env:mapped_artifacts_pass -Force -AsPlainText
$credentials = [psCredential]::new($env:mapped_artifacts_user, $securePassword)
write-host "new-psDrive -name $drive -psProvider fileSystem -root "$env:mapped_artifacts_share" -credential $credentials"
new-psDrive -name $env:artifacts_drive -psProvider fileSystem -root "$env:mapped_artifacts_share" -credential $credentials -scope global
} else {
Write-Error -Message "Unable to reach the Azure storage account via port 445. Check to make sure your organization or ISP is not blocking port 445, or use Azure P2S VPN, Azure S2S VPN, or Express Route to tunnel SMB traffic over a different port."
write-error -message "Unable to reach the azure storage account via port 445."
}
mkdir "$env:artifacts_share_target"
if(!(test-path $env:artifacts_share_target))
{
write-host "mkdir "$env:artifacts_share_target""
mkdir "$env:artifacts_share_target"
}
write-host "copy $(System.DefaultWorkingDirectory)/src/bin/$(buildConfiguration) "$env:artifacts_share_target" -recurse"
copy $(System.DefaultWorkingDirectory)/src/bin/$(buildConfiguration) "$env:artifacts_share_target" -recurse
write-host "copy $(System.ArtifactsDirectory) "$env:artifacts_share_target" -recurse"
copy $(System.ArtifactsDirectory) "$env:artifacts_share_target" -recurse
errorActionPreference: 'continue'
verbosePreference: 'continue'
debugPreference: 'continue'
Expand All @@ -140,8 +144,10 @@ steps:
mapped_artifacts_pass: $(artifacts_pass)
mapped_artifacts_share: $(artifacts_share)

- task: DownloadPipelineArtifact@2
inputs:
buildType: 'current'
artifactName: 'artifacts-$(start_time)-$(system.JobId)'
targetPath: '$(artifacts_share_target)'
# - task: DownloadPipelineArtifact@2
# inputs:
# buildType: 'current'
# artifactName: 'artifacts-$(start_time)-$(system.JobId)'
# targetPath: '$(artifacts_share_target)'
# allowPartiallySucceededBuilds: true
# allowFailedBuilds: true
25 changes: 11 additions & 14 deletions azure-pipelines.yml
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ trigger:
branches:
include:
- 'master'
- 'release*'

pool:
vmImage: 'windows-latest'
Expand All @@ -14,25 +15,16 @@ variables:
project_name: CollectSFData
project_root: .\src
start_time: $[format('{0}-{1:yyyy}{1:MM}{1:dd}-{1:HH}{1:mm}{1:ss}', variables['project_name'], pipeline.startTime)]
artifacts_drive: Z
artifacts_share_target: Z:\$(System.TeamProject)/$(System.DefinitionName)/$(System.JobId)
artifacts_storage_uri_fileshare: /artifacts
System.Debug: true
buildConfiguration: Release
Codeql.PublishDatabaseLog: true
# testing
Codeql.Cadence: 0
Codeql.LogLevel: 5
Codeql.Language: csharp,powershell
Codeql.PublishDatabase: true

system.debug: false
buildConfiguration: release

steps:
- task: PowerShell@2
displayName: 'agent environment'
inputs:
targetType: 'inline'
script: |
[environment]::getenvironmentvariables().getenumerator()|sort Name
[environment]::getEnvironmentVariables().getEnumerator()|sort Name
dotnet --info
dotnet nuget locals all --clear
errorActionPreference: 'continue'
Expand All @@ -51,7 +43,12 @@ steps:
- task: CodeQL3000Init@0
inputs:
Enabled: true
AnalyzeInPipeline: true
AnalyzeInPipeline: false
PublishDatabase: false
PublishDatabaseLog: false
Language: csharp,powershell
Cadence: 72 # 72 hours default, use 0 for debug
LogLevel: 4

- task: PowerShell@2
displayName: 'dotnet build'
Expand Down
36 changes: 36 additions & 0 deletions docs/kustoQuickStart.md
Original file line number Diff line number Diff line change
Expand Up @@ -53,6 +53,21 @@ From Azure portal https://portal.azure.com navigate to service fabric cluster re

Once correct storage account is identified, select 'Shared access signature' and then 'Generate SAS and connection string'. Copy 'Blob service SAS URL' or 'Connection Sting'. This is the value that will be used for CollectSFData argument 'SasKey'.

Ensure at least the following permissions are selected:

- Allowed Services
- Blob
- Table

- Allowed Resource Types
- Service
- Container
- Object

- Allowed Permissions
- Read
- List

- ![](media/azure.portal.3.png)

- ![](media/azure.portal.4.png)
Expand Down Expand Up @@ -459,6 +474,27 @@ pause
```

4. Microsoft.Identity.Client.MsalServiceException.
Verify configuration settings are correct:
- azureClientId
- azureTenantId
- azureClientSecret
- azureClientCertificate

.net framework will use ADAL and .net core will use MSAL.
If using .net framework (net462), use cmd.exe or powershell.exe to execute collectsfdata.exe.
If using .net core (net6.0+), use pwsh.exe (powershell core) to execute collectsfdata.exe.

```text
Authenticate:exception: AggregateException:System.AggregateException: One or more errors occurred. ---> Microsoft.Identity.Client.MsalServiceException:
AADSTS50194: Application 'xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx'(collectsfdata service fabric data collection) is not configured as a multi-tenant application.
Usage of the /common endpoint is not supported for such applications created after '10/15/2018'.
Use a tenant-specific endpoint or configure the application to be multi-tenant.
Trace ID: 952e918a-1414-4206-91bd-ad74eb1cbc00
Correlation ID: 56e0075d-cc4f-4974-a651-0b25e82faaee
Timestamp: 2024-03-08 22:05:00Z
```

## reference

### kusto reference
Expand Down
Binary file modified docs/media/azure.portal.3.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
.create-or-alter function with (docstring = "[T:string] where T=table name, [A:string] where App=application table name. function to search service fabric sflogs for application related entries", folder = "sflogs/application")
.create-or-alter function with (docstring = "[T:string] where T=table name, [A:string] where App=application name. function to search service fabric sflogs for application related entries", folder = "sflogs/application")
TraceApplicationUpgrade(T:string, A:string) {
let extractPattern_Context1 = @'(ApplicationUpgradeContext\()([^\)]*)\)\[([^,]*),\s([^,]*),\s([^,]*),\s([^,]*),\s([^\s]*)\s=\s([^,]*),\s([^\s]*)\s=\s';
let extractPattern_Context2 = @'(Monitoring = action:)([^,]*),\swait:([^,]*),\sstable:([^,]*),\sretry:([^,]*),\sud:([^,]*),\soverall:([^,]*)';
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
| extend fabric_error = tostring(extract_all(@"(?P<fabric_error>FABRIC_E\w+)", dynamic(['fabric_error']), Text))
| summarize count(fabric_error), First = arg_min(Timestamp, *), Last = arg_max(Timestamp, *) by fabric_error
| order by fabric_error asc
| extend exception_pattern = replace("'","",replace(",",@".*",trim('\"|\\[|\\]',fabric_error)))
| extend exception_pattern = replace("'","",replace(",",@".*",trim(@'[\"\[\]]*',fabric_error)))
| extend encoded_pattern = url_encode(strcat(T,'| where Text matches regex "',exception_pattern,'"'))
| extend link = strcat(baseUri, encoded_pattern)
| project fabric_error, Level, Count = count_fabric_error, ['First occurence'] = tostring(First), ['Last occurrence'] = iif(First == Last, "", tostring(Last)), link
Expand Down

0 comments on commit e0f5304

Please sign in to comment.