diff --git a/.config/dotnet-tools.json b/.config/dotnet-tools.json new file mode 100644 index 00000000..6b93cca8 --- /dev/null +++ b/.config/dotnet-tools.json @@ -0,0 +1,12 @@ +{ + "version": 1, + "isRoot": true, + "tools": { + "dotnet-ef": { + "version": "7.0.3", + "commands": [ + "dotnet-ef" + ] + } + } +} \ No newline at end of file diff --git a/.dockerignore b/.dockerignore index b6900a6f..c06bf04e 100644 --- a/.dockerignore +++ b/.dockerignore @@ -16,12 +16,33 @@ **/Gemfile.lock !updater/Gemfile.lock !updater/spec/fixtures/**/Gemfile.lock -**/node_modules !**/spec/fixtures/* git.store .DS_Store *.pyc -.dockerignore -Dockerfile* *.md CODEOWNERS + +**/.classpath +**/.dockerignore +**/.env +**/.git +**/.gitignore +**/.project +**/.settings +**/.toolstarget +**/.vs +**/.vscode +**/*.*proj.user +**/*.dbmdl +**/*.jfm +**/azds.yaml +**/bin +**/charts +**/docker-compose* +**/Dockerfile* +**/node_modules +**/npm-debug.log +**/obj +**/secrets.dev.yaml +**/values.dev.yaml diff --git a/.github/dependabot.yml b/.github/dependabot.yml index b29fdb6f..a8c45d85 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -26,6 +26,22 @@ updates: time: "04:00" open-pull-requests-limit: 10 +- package-ecosystem: "nuget" # See documentation for possible values + directory: "/" # Location of package manifests + schedule: + interval: "weekly" + time: "02:00" + open-pull-requests-limit: 10 + ignore: + - dependency-name: "Microsoft.*" + update-types: ["version-update:semver-patch"] + - dependency-name: "System.*" + update-types: ["version-update:semver-patch"] + - dependency-name: "Azure.*" + update-types: ["version-update:semver-patch"] + - dependency-name: "Moq" + update-types: ["version-update:semver-patch"] + - package-ecosystem: "npm" # See documentation for possible values directory: "/extension" # Location of package manifests schedule: @@ -39,3 +55,9 @@ updates: update-types: ["version-update:semver-patch"] - dependency-name: "@types/node" update-types: ["version-update:semver-patch"] + +registries: + tingle: + type: nuget-feed + url: 'https://pkgs.dev.azure.com/tingle/_packaging/tingle/nuget/v3/index.json' + token: '${{ secrets.TINGLE_NUGET_FEED_KEY }}' diff --git a/.github/workflows/cleanup.yml b/.github/workflows/cleanup.yml index 68808f3f..6b620427 100644 --- a/.github/workflows/cleanup.yml +++ b/.github/workflows/cleanup.yml @@ -12,6 +12,7 @@ jobs: matrix: package-name: - "dependabot-updater" + - "dependabot-server" runs-on: ubuntu-latest @@ -24,3 +25,4 @@ jobs: token: ${{ secrets.GITHUB_TOKEN }} package-name: ${{ matrix.package-name }} min-versions-to-keep: 20 + delete-only-pre-release-versions: true diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml index 1c444995..69461613 100644 --- a/.github/workflows/codeql.yml +++ b/.github/workflows/codeql.yml @@ -32,7 +32,7 @@ jobs: strategy: fail-fast: false matrix: - language: [ 'javascript', 'ruby' ] + language: [ 'javascript', 'ruby', 'csharp' ] # CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python', 'ruby' ] # Use only 'java' to analyze code written in Java, Kotlin or both # Use only 'javascript' to analyze code written in JavaScript, TypeScript or both @@ -54,6 +54,12 @@ jobs: # Details on CodeQL's query packs refer to : https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs # queries: security-extended,security-and-quality + - name: Setup NuGet Private Feed + if: matrix.language == 'csharp' + run: | + dotnet nuget remove source tingle + dotnet nuget add source ${{ secrets.PRIVATE_FEED_URL }} --name tingle --username az --password ${{ secrets.PRIVATE_FEED_API_KEY }} --store-password-in-clear-text + dotnet restore # Autobuild attempts to build any compiled languages (C/C++, C#, Go, or Java). # If this step fails, then you should remove it and run the build manually (see below) diff --git a/.github/workflows/docker.yml b/.github/workflows/docker.yml index 195c3797..380482f5 100644 --- a/.github/workflows/docker.yml +++ b/.github/workflows/docker.yml @@ -73,7 +73,7 @@ jobs: --label com.github.image.source.sha=$GITHUB_SHA \ --label com.github.image.source.branch=$GITHUB_REF \ -t "ghcr.io/tinglesoftware/$IMAGE_NAME:latest" \ - -t "ghcr.io/tinglesoftware/$IMAGE_NAME:$GITVERSION_FULLSEMVER" \ + -t "ghcr.io/tinglesoftware/$IMAGE_NAME:$GITVERSION_NUGETVERSIONV2" \ -t "ghcr.io/tinglesoftware/$IMAGE_NAME:$GITVERSION_MAJOR.$GITVERSION_MINOR" \ -t "ghcr.io/tinglesoftware/$IMAGE_NAME:$GITVERSION_MAJOR" \ --cache-from ghcr.io/tinglesoftware/$IMAGE_NAME:latest \ @@ -88,9 +88,9 @@ jobs: if: github.ref == 'refs/heads/main' run: docker push "ghcr.io/${{ github.repository_owner }}/$IMAGE_NAME:latest" - - name: Push image (FullSemVer) + - name: Push image (NuGetVersionV2) if: "!startsWith(github.ref, 'refs/pull')" - run: docker push "ghcr.io/${{ github.repository_owner }}/$IMAGE_NAME:$GITVERSION_FULLSEMVER" + run: docker push "ghcr.io/${{ github.repository_owner }}/$IMAGE_NAME:$GITVERSION_NUGETVERSIONV2" - name: Push image (major, minor) if: startsWith(github.ref, 'refs/tags') diff --git a/.github/workflows/server.yml b/.github/workflows/server.yml new file mode 100644 index 00000000..4b315268 --- /dev/null +++ b/.github/workflows/server.yml @@ -0,0 +1,131 @@ +name: Server + +on: + push: + branches: + - main + tags: + - '*' + paths: + - "server/**" + - ".github/workflows/server.yml" + - "!docs/**" + pull_request: + branches: + # Only trigger for PRs against `main` branch. + - main + paths: + - "server/**" + - ".github/workflows/server.yml" + - "!docs/**" + +jobs: + Build: + runs-on: ubuntu-latest + env: + buildConfiguration: 'Release' + DOTNET_SKIP_FIRST_TIME_EXPERIENCE: 1 + IMAGE_NAME: 'dependabot-server' + DOCKER_BUILDKIT: 1 # Enable Docker BuildKit + + steps: + - name: Checkout + uses: actions/checkout@v3 + with: + fetch-depth: 0 # Required for GitVersion + + - name: Install GitVersion + uses: gittools/actions/gitversion/setup@v0 + with: + versionSpec: '5.x' + + - name: Determine Version + uses: gittools/actions/gitversion/execute@v0 + with: + useConfigFile: true + + - name: Setup .NET SDK + uses: actions/setup-dotnet@v3 + with: + dotnet-version: '7.x' + source-url: ${{ secrets.PRIVATE_FEED_URL }} + env: + NUGET_AUTH_TOKEN: ${{ secrets.PRIVATE_FEED_API_KEY }} + + - name: Test + run: dotnet test -c $buildConfiguration --verbosity normal --collect "Code coverage" + + - name: Publish + run: | + dotnet publish \ + $GITHUB_WORKSPACE/server/Tingle.Dependabot/Tingle.Dependabot.csproj \ + -c $buildConfiguration \ + -o $GITHUB_WORKSPACE/drop/Tingle.Dependabot + + - name: Replace tokens + uses: cschleiden/replace-tokens@v1 + with: + files: '["${{ github.workspace }}/main.bicep"]' + + - name: Build bicep file + uses: azure/CLI@v1 + with: + inlineScript: | + cp $GITHUB_WORKSPACE/main.bicep $GITHUB_WORKSPACE/drop/main.bicep + az bicep build --file main.bicep --outfile $GITHUB_WORKSPACE/drop/main.json + + - name: Pull Docker base image & warm Docker cache + run: docker pull "ghcr.io/tinglesoftware/$IMAGE_NAME:latest" + continue-on-error: true # TODO: remove after the first run + + - name: Build image + run: | + docker build \ + -f server/Tingle.Dependabot/Dockerfile.CI \ + --label com.github.image.run.id=$GITHUB_RUN_ID \ + --label com.github.image.run.number=$GITHUB_RUN_NUMBER \ + --label com.github.image.job.id=$GITHUB_JOB \ + --label com.github.image.source.sha=$GITHUB_SHA \ + --label com.github.image.source.branch=$GITHUB_REF \ + -t "ghcr.io/tinglesoftware/$IMAGE_NAME:latest" \ + -t "ghcr.io/tinglesoftware/$IMAGE_NAME:$GITVERSION_NUGETVERSIONV2" \ + -t "ghcr.io/tinglesoftware/$IMAGE_NAME:$GITVERSION_MAJOR.$GITVERSION_MINOR" \ + -t "ghcr.io/tinglesoftware/$IMAGE_NAME:$GITVERSION_MAJOR" \ + --cache-from ghcr.io/tinglesoftware/$IMAGE_NAME:latest \ + --build-arg BUILDKIT_INLINE_CACHE=1 \ + $GITHUB_WORKSPACE/drop/Tingle.Dependabot + + - name: Log into registry + if: ${{ (github.ref == 'refs/heads/main') || (!startsWith(github.ref, 'refs/pull')) || startsWith(github.ref, 'refs/tags') }} + run: echo "${{ secrets.GITHUB_TOKEN }}" | docker login https://ghcr.io -u ${{ github.actor }} --password-stdin + + - name: Push image (latest) + if: github.ref == 'refs/heads/main' + run: docker push "ghcr.io/${{ github.repository_owner }}/$IMAGE_NAME:latest" + + - name: Push image (NuGetVersionV2) + if: "!startsWith(github.ref, 'refs/pull')" + run: docker push "ghcr.io/${{ github.repository_owner }}/$IMAGE_NAME:$GITVERSION_NUGETVERSIONV2" + + - name: Push image (major, minor) + if: startsWith(github.ref, 'refs/tags') + run: | + docker push "ghcr.io/${{ github.repository_owner }}/$IMAGE_NAME:$GITVERSION_MAJOR.$GITVERSION_MINOR" + docker push "ghcr.io/${{ github.repository_owner }}/$IMAGE_NAME:$GITVERSION_MAJOR" + + - name: Publish Artifact + uses: actions/upload-artifact@v3 + with: + path: ${{ github.workspace }}/drop/* + name: drop + + - name: Upload Release + if: startsWith(github.ref, 'refs/tags/') + uses: ncipollo/release-action@v1 + with: + artifacts: > + ${{ github.workspace }}/drop/main.bicep, + ${{ github.workspace }}/drop/main.json + token: ${{ secrets.GITHUB_TOKEN }} + draft: true + allowUpdates: true diff --git a/.gitignore b/.gitignore index 19bd5ff9..398ab060 100644 --- a/.gitignore +++ b/.gitignore @@ -1,35 +1,362 @@ +## Ignore Visual Studio temporary files, build results, and +## files generated by popular Visual Studio add-ons. +## +## Get latest from https://github.com/github/gitignore/blob/master/VisualStudio.gitignore + +# User-specific files +*.rsuser +*.suo +*.user +*.userosscache +*.sln.docstates + +# User-specific files (MonoDevelop/Xamarin Studio) +*.userprefs + +# Build results +[Dd]ebug/ +[Dd]ebugPublic/ +[Rr]elease/ +[Rr]eleases/ +x64/ +x86/ +[Aa][Rr][Mm]/ +[Aa][Rr][Mm]64/ +bld/ +[Bb]in/ +[Oo]bj/ +[Ll]og/ + +# Visual Studio 2015/2017 cache/options directory +.vs/ +# Uncomment if you have tasks that create the project's static files in wwwroot +#wwwroot/ + +# Visual Studio 2017 auto generated files +Generated\ Files/ + +# MSTest test Results +[Tt]est[Rr]esult*/ +[Bb]uild[Ll]og.* + +# NUNIT +*.VisualState.xml +TestResult.xml + +# Build Results of an ATL Project +[Dd]ebugPS/ +[Rr]eleasePS/ +dlldata.c + +# Benchmark Results +BenchmarkDotNet.Artifacts/ + +# .NET Core +project.lock.json +project.fragment.lock.json +artifacts/ + +# StyleCop +StyleCopReport.xml + +# Files built by Visual Studio +*_i.c +*_p.c +*_h.h +*.ilk +*.meta +*.obj +*.iobj +*.pch +*.pdb +*.ipdb +*.pgc +*.pgd +*.rsp +*.sbr +*.tlb +*.tli +*.tlh +*.tmp +*.tmp_proj +*_wpftmp.csproj +*.log +*.vspscc +*.vssscc +.builds +*.pidb +*.svclog +*.scc + +# Chutzpah Test files +_Chutzpah* + +# Visual C++ cache files +ipch/ +*.aps +*.ncb +*.opendb +*.opensdf +*.sdf +*.cachefile +*.VC.db +*.VC.VC.opendb + +# Visual Studio profiler +*.psess +*.vsp +*.vspx +*.sap + +# Visual Studio Trace Files +*.e2e + +# TFS 2012 Local Workspace +$tf/ + +# Guidance Automation Toolkit +*.gpState + +# ReSharper is a .NET coding add-in +_ReSharper*/ +*.[Rr]e[Ss]harper +*.DotSettings.user + +# JustCode is a .NET coding add-in +.JustCode + +# TeamCity is a build add-in +_TeamCity* + +# DotCover is a Code Coverage Tool +*.dotCover + +# AxoCover is a Code Coverage Tool +.axoCover/* +!.axoCover/settings.json + +# Visual Studio code coverage results +*.coverage +*.coveragexml + +# NCrunch +_NCrunch_* +.*crunch*.local.xml +nCrunchTemp_* + +# MightyMoose +*.mm.* +AutoTest.Net/ + +# Web workbench (sass) +.sass-cache/ + +# Installshield output folder +[Ee]xpress/ + +# DocProject is a documentation generator add-in +DocProject/buildhelp/ +DocProject/Help/*.HxT +DocProject/Help/*.HxC +DocProject/Help/*.hhc +DocProject/Help/*.hhk +DocProject/Help/*.hhp +DocProject/Help/Html2 +DocProject/Help/html + +# Click-Once directory +publish/ + +# Publish Web Output +*.[Pp]ublish.xml +*.azurePubxml +# Note: Comment the next line if you want to checkin your web deploy settings, +# but database connection strings (with potential passwords) will be unencrypted +*.pubxml +*.publishproj + +# Microsoft Azure Web App publish settings. Comment the next line if you want to +# checkin your Azure Web App publish settings, but sensitive information contained +# in these scripts will be unencrypted +PublishScripts/ + +# NuGet Packages +*.nupkg +# The packages folder can be ignored because of Package Restore +**/[Pp]ackages/* +# except build/, which is used as an MSBuild target. +!**/[Pp]ackages/build/ +# Uncomment if necessary however generally it will be regenerated when needed +#!**/[Pp]ackages/repositories.config +# NuGet v3's project.json files produces more ignorable files +*.nuget.props +*.nuget.targets + +# Microsoft Azure Build Output +csx/ +*.build.csdef + +# Microsoft Azure Emulator +ecf/ +rcf/ + +# Windows Store app package directories and files +AppPackages/ +BundleArtifacts/ +Package.StoreAssociation.xml +_pkginfo.txt +*.appx + +# Visual Studio cache files +# files ending in .cache can be ignored +*.[Cc]ache +# but keep track of directories ending in .cache +!?*.[Cc]ache/ + +# Others +ClientBin/ +~$* +*~ +*.dbmdl +*.dbproj.schemaview +*.jfm +*.pfx +*.publishsettings +orleans.codegen.cs + +# Including strong name files can present a security risk +# (https://github.com/github/gitignore/pull/2483#issue-259490424) +#*.snk + +# Since there are multiple workflows, uncomment next line to ignore bower_components +# (https://github.com/github/gitignore/pull/1529#issuecomment-104372622) +#bower_components/ + +# RIA/Silverlight projects +Generated_Code/ + +# Backup & report files from converting an old project file +# to a newer Visual Studio version. Backup files are not needed, +# because we have git ;-) +_UpgradeReport_Files/ +Backup*/ +UpgradeLog*.XML +UpgradeLog*.htm +ServiceFabricBackup/ +*.rptproj.bak + +# SQL Server files +*.mdf +*.ldf +*.ndf + +# Business Intelligence projects +*.rdl.data +*.bim.layout +*.bim_*.settings +*.rptproj.rsuser +*- Backup*.rdl + +# Microsoft Fakes +FakesAssemblies/ + +# GhostDoc plugin setting file +*.GhostDoc.xml + +# Node.js Tools for Visual Studio +.ntvs_analysis.dat +node_modules/ + +# Visual Studio 6 build log +*.plg + +# Visual Studio 6 workspace options file +*.opt + +# Visual Studio 6 auto-generated workspace file (contains which files were open etc.) +*.vbw + +# Visual Studio LightSwitch build output +**/*.HTMLClient/GeneratedArtifacts +**/*.DesktopClient/GeneratedArtifacts +**/*.DesktopClient/ModelManifest.xml +**/*.Server/GeneratedArtifacts +**/*.Server/ModelManifest.xml +_Pvt_Extensions + +# Paket dependency manager +.paket/paket.exe +paket-files/ + +# FAKE - F# Make +.fake/ + +# JetBrains Rider +.idea/ +*.sln.iml + +# CodeRush personal settings +.cr/personal + +# Python Tools for Visual Studio (PTVS) +__pycache__/ +*.pyc + +# Cake - Uncomment if you are using it +# tools/** +# !tools/packages.config + +# Tabs Studio +*.tss + +# Telerik's JustMock configuration file +*.jmconfig + +# BizTalk build output +*.btp.cs +*.btm.cs +*.odx.cs +*.xsd.cs + +# OpenCover UI analysis results +OpenCover/ + +# Azure Stream Analytics local run output +ASALocalRun/ + +# MSBuild Binary and Structured Log +*.binlog + +# NVidia Nsight GPU debugger configuration file +*.nvuser + +# MFractors (Xamarin productivity tool) working folder +.mfractor/ + +# Local History for Visual Studio +.localhistory/ + +# BeatPulse healthcheck temp database +healthchecksdb + /.bundle/ -/node_modules /.env /.envrc /tmp -/pkg -/dependabot-*.gem -!bundler/spec/fixtures/projects/**/Gemfile.lock Gemfile.lock !updater/spec/fixtures/**/Gemfile.lock !updater/Gemfile.lock -vendor -!bundler/spec/fixtures/vendored_gems/vendor -!common/spec/fixtures/projects/**/*/vendor -!go_modules/spec/fixtures/projects/**/* -.DS_Store *.pyc -*git.store /.vscode/ /.vscode-server/ /.vscode-server-insiders/ -**/helpers/install-dir -/npm_and_yarn/helpers/node_modules -/npm_and_yarn/helpers/.node-version -/dry-run -**/bin/helper /.core-bash_history coverage/ -.ruby-gemset .tool-versions .rspec_status .rdbg_history local-*.sh local/**.sh +.DS_Store +*git.store .idea diff --git a/Tingle.Dependabot.sln b/Tingle.Dependabot.sln new file mode 100644 index 00000000..31f3f849 --- /dev/null +++ b/Tingle.Dependabot.sln @@ -0,0 +1,31 @@ + +Microsoft Visual Studio Solution File, Format Version 12.00 +# Visual Studio Version 17 +VisualStudioVersion = 17.3.32901.215 +MinimumVisualStudioVersion = 10.0.40219.1 +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Tingle.Dependabot", "server\Tingle.Dependabot\Tingle.Dependabot.csproj", "{F3BDB109-999B-4F3C-A6C5-745CC5BE4B68}" +EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Tingle.Dependabot.Tests", "server\Tingle.Dependabot.Tests\Tingle.Dependabot.Tests.csproj", "{BCDFC1A1-1FF4-4F26-9DEC-676CEC1418BF}" +EndProject +Global + GlobalSection(SolutionConfigurationPlatforms) = preSolution + Debug|Any CPU = Debug|Any CPU + Release|Any CPU = Release|Any CPU + EndGlobalSection + GlobalSection(ProjectConfigurationPlatforms) = postSolution + {F3BDB109-999B-4F3C-A6C5-745CC5BE4B68}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {F3BDB109-999B-4F3C-A6C5-745CC5BE4B68}.Debug|Any CPU.Build.0 = Debug|Any CPU + {F3BDB109-999B-4F3C-A6C5-745CC5BE4B68}.Release|Any CPU.ActiveCfg = Release|Any CPU + {F3BDB109-999B-4F3C-A6C5-745CC5BE4B68}.Release|Any CPU.Build.0 = Release|Any CPU + {BCDFC1A1-1FF4-4F26-9DEC-676CEC1418BF}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {BCDFC1A1-1FF4-4F26-9DEC-676CEC1418BF}.Debug|Any CPU.Build.0 = Debug|Any CPU + {BCDFC1A1-1FF4-4F26-9DEC-676CEC1418BF}.Release|Any CPU.ActiveCfg = Release|Any CPU + {BCDFC1A1-1FF4-4F26-9DEC-676CEC1418BF}.Release|Any CPU.Build.0 = Release|Any CPU + EndGlobalSection + GlobalSection(SolutionProperties) = preSolution + HideSolutionNode = FALSE + EndGlobalSection + GlobalSection(ExtensibilityGlobals) = postSolution + SolutionGuid = {BF632A04-CFA4-4DB6-AA7E-B20C71654109} + EndGlobalSection +EndGlobal diff --git a/main.bicep b/main.bicep new file mode 100644 index 00000000..fe784ab7 --- /dev/null +++ b/main.bicep @@ -0,0 +1,336 @@ +@description('Location for all resources.') +param location string = resourceGroup().location + +@description('Name of the resources') +param name string = 'dependabot' + +@description('Registry of the docker image. E.g. "contoso.azurecr.io". Leave empty unless you have a private registry mirroring the image from docker hub') +param dockerImageRegistry string = 'ghcr.io' + +@description('Registry and repository of the server docker image. Ideally, you do not need to edit this value.') +param serverImageRepository string = 'tinglesoftware/dependabot-server' + +@description('Tag of the server docker image.') +param serverImageTag string = '#{GITVERSION_NUGETVERSIONV2}#' + +@description('Registry and repository of the updater docker image. Ideally, you do not need to edit this value.') +param updaterImageRepository string = 'tinglesoftware/dependabot-updater' + +@description('Tag of the updater docker image.') +param updaterImageTag string = '#{GITVERSION_NUGETVERSIONV2}#' + +@allowed([ + 'ContainerInstances' + 'ContainerApps' +]) +@description('Where to host new update jobs.') +param jobHostType string = 'ContainerInstances' + +@description('Password for Webhooks, ServiceHooks, and Notifications from Azure DevOps.') +#disable-next-line secure-secrets-in-params // need sensible defaults +param notificationsPassword string = uniqueString('service-hooks', resourceGroup().id) // e.g. zecnx476et7xm (13 characters) + +@description('URL of the project. For example "https://dev.azure.com/fabrikam/DefaultCollection"') +param projectUrl string + +@description('Token for accessing the project.') +param projectToken string + +@description('Whether to synchronize repositories on startup.') +param synchronizeOnStartup bool = false + +@description('Whether to create or update subscriptions on startup.') +param createOrUpdateWebhooksOnStartup bool = false + +@description('Whether to set auto complete on created pull requests.') +param autoComplete bool = false + +@description('Identifiers of configs to be ignored in auto complete. E.g 3,4,10') +param autoCompleteIgnoreConfigs array = [] + +@allowed([ + 'NoFastForward' + 'Rebase' + 'RebaseMerge' + 'Squash' +]) +@description('Where to host new update jobs.') +param autoCompleteMergeStrategy string = 'Squash' + +@description('Whether to automatically approve created pull requests.') +param autoApprove bool = true + +@description('Access token for authenticating requests to GitHub.') +param githubToken string = '' + +@minValue(1) +@maxValue(2) +@description('The minimum number of replicas') +param minReplicas int = 1 // necessary for in-memory scheduling + +@minValue(1) +@maxValue(5) +@description('The maximum number of replicas') +param maxReplicas int = 1 + +var sqlServerAdministratorLogin = uniqueString(resourceGroup().id) // e.g. zecnx476et7xm (13 characters) +var sqlServerAdministratorLoginPassword = '${skip(uniqueString(resourceGroup().id), 5)}%${uniqueString('sql-password', resourceGroup().id)}' // e.g. abcde%zecnx476et7xm (19 characters) +var hasDockerImageRegistry = (dockerImageRegistry != null && !empty(dockerImageRegistry)) +// avoid conflicts across multiple deployments for resources that generate FQDN based on the name +var collisionSuffix = uniqueString(resourceGroup().id) // e.g. zecnx476et7xm (13 characters) + +/* Managed Identities */ +resource managedIdentity 'Microsoft.ManagedIdentity/userAssignedIdentities@2018-11-30' = { + name: name + location: location +} +resource managedIdentityJobs 'Microsoft.ManagedIdentity/userAssignedIdentities@2018-11-30' = { + name: '${name}-jobs' + location: location +} + +/* Service Bus namespace */ +resource serviceBusNamespace 'Microsoft.ServiceBus/namespaces@2021-11-01' = { + name: '${name}-${collisionSuffix}' + location: location + properties: { + disableLocalAuth: false + zoneRedundant: false + } + sku: { + name: 'Basic' + } +} + +/* SQL Server */ +resource sqlServer 'Microsoft.Sql/servers@2022-05-01-preview' = { + name: '${name}-${collisionSuffix}' + location: location + properties: { + publicNetworkAccess: 'Enabled' + administratorLogin: sqlServerAdministratorLogin + administratorLoginPassword: sqlServerAdministratorLoginPassword + primaryUserAssignedIdentityId: managedIdentity.id + restrictOutboundNetworkAccess: 'Disabled' + } + identity: { + type: 'UserAssigned' + userAssignedIdentities: { + '${managedIdentity.id}': { /*ttk bug*/} + } + } +} +resource sqlServerDatabase 'Microsoft.Sql/servers/databases@2022-05-01-preview' = { + parent: sqlServer + name: name + location: location + sku: { + name: 'Basic' + } + properties: { + collation: 'SQL_Latin1_General_CP1_CI_AS' + maxSizeBytes: 2147483648 + catalogCollation: 'SQL_Latin1_General_CP1_CI_AS' + zoneRedundant: false + readScale: 'Disabled' + requestedBackupStorageRedundancy: 'Geo' + isLedgerOn: false + } + identity: { + type: 'UserAssigned' + userAssignedIdentities: { + '${managedIdentity.id}': { /*ttk bug*/} + } + } +} + +/* LogAnalytics */ +resource logAnalyticsWorkspace 'Microsoft.OperationalInsights/workspaces@2022-10-01' = { + name: name + location: location + properties: { + sku: { + name: 'PerGB2018' + } + workspaceCapping: { + dailyQuotaGb: json('0.167') // low so as not to pass the 5GB limit per subscription + } + } +} + +/* Container App Environment */ +resource appEnvironment 'Microsoft.App/managedEnvironments@2022-06-01-preview' = { + name: name + location: location + properties: { + appLogsConfiguration: { + destination: 'log-analytics' + logAnalyticsConfiguration: { + customerId: logAnalyticsWorkspace.properties.customerId + sharedKey: logAnalyticsWorkspace.listKeys().primarySharedKey + } + } + zoneRedundant: false // enabling this requires a custom VNET with an intrastrucutre Subnet + } + sku: { + name: 'Consumption' + } +} + +/* Application Insights */ +resource appInsights 'Microsoft.Insights/components@2020-02-02' = { + name: name + location: location + kind: 'web' + properties: { + Application_Type: 'web' + // WorkspaceResourceId: logAnalyticsWorkspace.id + } +} + +/* Container App */ +resource app 'Microsoft.App/containerApps@2022-06-01-preview' = { + name: name + location: location + properties: { + managedEnvironmentId: appEnvironment.id + configuration: { + ingress: { + external: true + targetPort: 80 + traffic: [ + { + latestRevision: true + weight: 100 + } + ] + } + registries: hasDockerImageRegistry ? [ + { + identity: managedIdentity.id + server: dockerImageRegistry + } + ] : [] + secrets: [ + { name: 'connection-strings-application-insights', value: appInsights.properties.ConnectionString } + { + name: 'connection-strings-sql' + value: join([ + 'Server=tcp:${sqlServer.properties.fullyQualifiedDomainName},1433' + 'Initial Catalog=${sqlServerDatabase.name}' + 'User ID=${sqlServerAdministratorLogin}' + 'Password=${sqlServerAdministratorLoginPassword}' + 'Persist Security Info=False' + 'MultipleActiveResultSets=False' + 'Encrypt=True' + 'TrustServerCertificate=False' + 'Connection Timeout=30' + ], ';') + } + { name: 'notifications-password', value: notificationsPassword } + { name: 'project-token', value: projectToken } + { name: 'log-analytics-workspace-key', value: logAnalyticsWorkspace.listKeys().primarySharedKey } + ] + } + template: { + containers: [ + { + image: '${'${hasDockerImageRegistry ? '${dockerImageRegistry}/' : ''}'}${serverImageRepository}:${serverImageTag}' + name: 'dependabot' + env: [ + { name: 'AZURE_CLIENT_ID', value: managedIdentity.properties.clientId } // Specifies the User-Assigned Managed Identity to use. Without this, the app attempt to use the system assigned one. + { name: 'ASPNETCORE_FORWARDEDHEADERS_ENABLED', value: 'true' } // Application is behind proxy + { name: 'EFCORE_PERFORM_MIGRATIONS', value: 'true' } // Perform migrations on startup + + { name: 'ApplicationInsights__ConnectionString', secretRef: 'connection-strings-application-insights' } + { name: 'ConnectionStrings__Sql', secretRef: 'connection-strings-sql' } + + { name: 'Workflow__SynchronizeOnStartup', value: synchronizeOnStartup ? 'true' : 'false' } + { name: 'Workflow__LoadSchedulesOnStartup', value: 'true' } + { name: 'Workflow__CreateOrUpdateWebhooksOnStartup', value: createOrUpdateWebhooksOnStartup ? 'true' : 'false' } + { name: 'Workflow__ProjectUrl', value: projectUrl } + { name: 'Workflow__ProjectToken', secretRef: 'project-token' } + { name: 'Workflow__WebhookEndpoint', value: 'https://${name}.${appEnvironment.properties.defaultDomain}/webhooks/azure' } + { name: 'Workflow__ResourceGroupId', value: resourceGroup().id } + { name: 'Workflow__LogAnalyticsWorkspaceId', value: logAnalyticsWorkspace.properties.customerId } + { name: 'Workflow__LogAnalyticsWorkspaceKey', secretRef: 'log-analytics-workspace-key' } + { name: 'Workflow__ManagedIdentityId', value: managedIdentityJobs.id } + { name: 'Workflow__UpdaterContainerImage', value: '${'${hasDockerImageRegistry ? '${dockerImageRegistry}/' : ''}'}${updaterImageRepository}:${updaterImageTag}' } + { name: 'Workflow__AutoComplete', value: autoComplete ? 'true' : 'false' } + { name: 'Workflow__AutoCompleteIgnoreConfigs', value: join(autoCompleteIgnoreConfigs, ';') } + { name: 'Workflow__AutoCompleteMergeStrategy', value: autoCompleteMergeStrategy } + { name: 'Workflow__AutoApprove', value: autoApprove ? 'true' : 'false' } + { name: 'Workflow__GithubToken', value: githubToken } + { name: 'Workflow__JobHostType', value: jobHostType } + { name: 'Workflow__Location', value: location } + + { + name: 'Authentication__Schemes__Management__Authority' + // Format: https://login.microsoftonline.com/{tenant-id}/v2.0 + value: '${environment().authentication.loginEndpoint}${subscription().tenantId}/v2.0' + } + { name: 'Authentication__Schemes__Management__ValidAudiences__0', value: 'https://${name}.${appEnvironment.properties.defaultDomain}' } + { name: 'Authentication__Schemes__ServiceHooks__Credentials__vsts', secretRef: 'notifications-password' } + + { name: 'EventBus__SelectedTransport', value: 'ServiceBus' } + { + name: 'EventBus__Transports__azure-service-bus__FullyQualifiedNamespace' + value: split(split(serviceBusNamespace.properties.serviceBusEndpoint, '/')[2], ':')[0] // manipulating https://{your-namespace}.servicebus.windows.net:443/ + } + ] + resources: { // these are the least resources we can provision + cpu: json('0.25') + memory: '0.5Gi' + } + } + ] + scale: { + minReplicas: minReplicas + maxReplicas: maxReplicas + } + } + } + identity: { + type: 'UserAssigned' + userAssignedIdentities: { + '${managedIdentity.id}': { /*ttk bug*/} + } + } +} + +/* Role Assignments */ +resource contributorRoleAssignment 'Microsoft.Authorization/roleAssignments@2022-04-01' = { // needed for creating jobs + name: guid(resourceGroup().id, 'managedIdentity', 'ContributorRoleAssignment') + scope: resourceGroup() + properties: { + roleDefinitionId: subscriptionResourceId('Microsoft.Authorization/roleDefinitions', 'b24988ac-6180-42a0-ab88-20f7382dd24c') + principalId: managedIdentity.properties.principalId + principalType: 'ServicePrincipal' + } +} +resource serviceBusDataOwnerRoleAssignment 'Microsoft.Authorization/roleAssignments@2022-04-01' = { + name: guid(resourceGroup().id, 'managedIdentity', 'AzureServiceBusDataOwner') + scope: resourceGroup() + properties: { + roleDefinitionId: subscriptionResourceId('Microsoft.Authorization/roleDefinitions', '090c5cfd-751d-490a-894a-3ce6f1109419') + principalId: managedIdentity.properties.principalId + principalType: 'ServicePrincipal' + } +} +resource logAnalyticsReaderRoleAssignment 'Microsoft.Authorization/roleAssignments@2022-04-01' = { + name: guid(resourceGroup().id, 'managedIdentity', 'LogAnalyticsReader') + scope: resourceGroup() + properties: { + roleDefinitionId: subscriptionResourceId('Microsoft.Authorization/roleDefinitions', '73c42c96-874c-492b-b04d-ab87d138a893') + principalId: managedIdentity.properties.principalId + principalType: 'ServicePrincipal' + } +} + +// output id string = app.id +// output fqdn string = app.properties.configuration.ingress.fqdn +#disable-next-line outputs-should-not-contain-secrets +output sqlServerAdministratorLoginPassword string = sqlServerAdministratorLoginPassword +output webhookEndpoint string = 'https://${name}.${appEnvironment.properties.defaultDomain}/webhooks/azure' +#disable-next-line outputs-should-not-contain-secrets +output notificationsPassword string = notificationsPassword diff --git a/nuget.config b/nuget.config new file mode 100644 index 00000000..d230270d --- /dev/null +++ b/nuget.config @@ -0,0 +1,8 @@ + + + + + + + + diff --git a/server/.editorconfig b/server/.editorconfig new file mode 100644 index 00000000..531b7301 --- /dev/null +++ b/server/.editorconfig @@ -0,0 +1,8 @@ +# editorconfig.org + +# top-most EditorConfig file +root = true + +# CSharp formatting rules: +[*.cs] +csharp_style_namespace_declarations =file_scoped:warning \ No newline at end of file diff --git a/server/.gitattributes b/server/.gitattributes new file mode 100644 index 00000000..1ff0c423 --- /dev/null +++ b/server/.gitattributes @@ -0,0 +1,63 @@ +############################################################################### +# Set default behavior to automatically normalize line endings. +############################################################################### +* text=auto + +############################################################################### +# Set default behavior for command prompt diff. +# +# This is need for earlier builds of msysgit that does not have it on by +# default for csharp files. +# Note: This is only used by command line +############################################################################### +#*.cs diff=csharp + +############################################################################### +# Set the merge driver for project and solution files +# +# Merging from the command prompt will add diff markers to the files if there +# are conflicts (Merging from VS is not affected by the settings below, in VS +# the diff markers are never inserted). Diff markers may cause the following +# file extensions to fail to load in VS. An alternative would be to treat +# these files as binary and thus will always conflict and require user +# intervention with every merge. To do so, just uncomment the entries below +############################################################################### +#*.sln merge=binary +#*.csproj merge=binary +#*.vbproj merge=binary +#*.vcxproj merge=binary +#*.vcproj merge=binary +#*.dbproj merge=binary +#*.fsproj merge=binary +#*.lsproj merge=binary +#*.wixproj merge=binary +#*.modelproj merge=binary +#*.sqlproj merge=binary +#*.wwaproj merge=binary + +############################################################################### +# behavior for image files +# +# image files are treated as binary by default. +############################################################################### +#*.jpg binary +#*.png binary +#*.gif binary + +############################################################################### +# diff behavior for common document formats +# +# Convert binary document formats to text before diffing them. This feature +# is only available from the command line. Turn it on by uncommenting the +# entries below. +############################################################################### +#*.doc diff=astextplain +#*.DOC diff=astextplain +#*.docx diff=astextplain +#*.DOCX diff=astextplain +#*.dot diff=astextplain +#*.DOT diff=astextplain +#*.pdf diff=astextplain +#*.PDF diff=astextplain +#*.rtf diff=astextplain +#*.RTF diff=astextplain diff --git a/server/.gitignore b/server/.gitignore new file mode 100644 index 00000000..c102c522 --- /dev/null +++ b/server/.gitignore @@ -0,0 +1,344 @@ +## Ignore Visual Studio temporary files, build results, and +## files generated by popular Visual Studio add-ons. +## +## Get latest from https://github.com/github/gitignore/blob/master/VisualStudio.gitignore + +# User-specific files +*.rsuser +*.suo +*.user +*.userosscache +*.sln.docstates + +# User-specific files (MonoDevelop/Xamarin Studio) +*.userprefs + +# Build results +[Dd]ebug/ +[Dd]ebugPublic/ +[Rr]elease/ +[Rr]eleases/ +x64/ +x86/ +[Aa][Rr][Mm]/ +[Aa][Rr][Mm]64/ +bld/ +[Bb]in/ +[Oo]bj/ +[Ll]og/ + +# Visual Studio 2015/2017 cache/options directory +.vs/ +# Uncomment if you have tasks that create the project's static files in wwwroot +#wwwroot/ + +# Visual Studio 2017 auto generated files +Generated\ Files/ + +# MSTest test Results +[Tt]est[Rr]esult*/ +[Bb]uild[Ll]og.* + +# NUNIT +*.VisualState.xml +TestResult.xml + +# Build Results of an ATL Project +[Dd]ebugPS/ +[Rr]eleasePS/ +dlldata.c + +# Benchmark Results +BenchmarkDotNet.Artifacts/ + +# .NET Core +project.lock.json +project.fragment.lock.json +artifacts/ + +# StyleCop +StyleCopReport.xml + +# Files built by Visual Studio +*_i.c +*_p.c +*_h.h +*.ilk +*.meta +*.obj +*.iobj +*.pch +*.pdb +*.ipdb +*.pgc +*.pgd +*.rsp +*.sbr +*.tlb +*.tli +*.tlh +*.tmp +*.tmp_proj +*_wpftmp.csproj +*.log +*.vspscc +*.vssscc +.builds +*.pidb +*.svclog +*.scc + +# Chutzpah Test files +_Chutzpah* + +# Visual C++ cache files +ipch/ +*.aps +*.ncb +*.opendb +*.opensdf +*.sdf +*.cachefile +*.VC.db +*.VC.VC.opendb + +# Visual Studio profiler +*.psess +*.vsp +*.vspx +*.sap + +# Visual Studio Trace Files +*.e2e + +# TFS 2012 Local Workspace +$tf/ + +# Guidance Automation Toolkit +*.gpState + +# ReSharper is a .NET coding add-in +_ReSharper*/ +*.[Rr]e[Ss]harper +*.DotSettings.user + +# JustCode is a .NET coding add-in +.JustCode + +# TeamCity is a build add-in +_TeamCity* + +# DotCover is a Code Coverage Tool +*.dotCover + +# AxoCover is a Code Coverage Tool +.axoCover/* +!.axoCover/settings.json + +# Visual Studio code coverage results +*.coverage +*.coveragexml + +# NCrunch +_NCrunch_* +.*crunch*.local.xml +nCrunchTemp_* + +# MightyMoose +*.mm.* +AutoTest.Net/ + +# Web workbench (sass) +.sass-cache/ + +# Installshield output folder +[Ee]xpress/ + +# DocProject is a documentation generator add-in +DocProject/buildhelp/ +DocProject/Help/*.HxT +DocProject/Help/*.HxC +DocProject/Help/*.hhc +DocProject/Help/*.hhk +DocProject/Help/*.hhp +DocProject/Help/Html2 +DocProject/Help/html + +# Click-Once directory +publish/ + +# Publish Web Output +*.[Pp]ublish.xml +*.azurePubxml +# Note: Comment the next line if you want to checkin your web deploy settings, +# but database connection strings (with potential passwords) will be unencrypted +*.pubxml +*.publishproj + +# Microsoft Azure Web App publish settings. Comment the next line if you want to +# checkin your Azure Web App publish settings, but sensitive information contained +# in these scripts will be unencrypted +PublishScripts/ + +# NuGet Packages +*.nupkg +# The packages folder can be ignored because of Package Restore +**/[Pp]ackages/* +# except build/, which is used as an MSBuild target. +!**/[Pp]ackages/build/ +# Uncomment if necessary however generally it will be regenerated when needed +#!**/[Pp]ackages/repositories.config +# NuGet v3's project.json files produces more ignorable files +*.nuget.props +*.nuget.targets + +# Microsoft Azure Build Output +csx/ +*.build.csdef + +# Microsoft Azure Emulator +ecf/ +rcf/ + +# Windows Store app package directories and files +AppPackages/ +BundleArtifacts/ +Package.StoreAssociation.xml +_pkginfo.txt +*.appx + +# Visual Studio cache files +# files ending in .cache can be ignored +*.[Cc]ache +# but keep track of directories ending in .cache +!?*.[Cc]ache/ + +# Others +ClientBin/ +~$* +*~ +*.dbmdl +*.dbproj.schemaview +*.jfm +*.pfx +*.publishsettings +orleans.codegen.cs + +# Including strong name files can present a security risk +# (https://github.com/github/gitignore/pull/2483#issue-259490424) +#*.snk + +# Since there are multiple workflows, uncomment next line to ignore bower_components +# (https://github.com/github/gitignore/pull/1529#issuecomment-104372622) +#bower_components/ + +# RIA/Silverlight projects +Generated_Code/ + +# Backup & report files from converting an old project file +# to a newer Visual Studio version. Backup files are not needed, +# because we have git ;-) +_UpgradeReport_Files/ +Backup*/ +UpgradeLog*.XML +UpgradeLog*.htm +ServiceFabricBackup/ +*.rptproj.bak + +# SQL Server files +*.mdf +*.ldf +*.ndf + +# Business Intelligence projects +*.rdl.data +*.bim.layout +*.bim_*.settings +*.rptproj.rsuser +*- Backup*.rdl + +# Microsoft Fakes +FakesAssemblies/ + +# GhostDoc plugin setting file +*.GhostDoc.xml + +# Node.js Tools for Visual Studio +.ntvs_analysis.dat +node_modules/ + +# Visual Studio 6 build log +*.plg + +# Visual Studio 6 workspace options file +*.opt + +# Visual Studio 6 auto-generated workspace file (contains which files were open etc.) +*.vbw + +# Visual Studio LightSwitch build output +**/*.HTMLClient/GeneratedArtifacts +**/*.DesktopClient/GeneratedArtifacts +**/*.DesktopClient/ModelManifest.xml +**/*.Server/GeneratedArtifacts +**/*.Server/ModelManifest.xml +_Pvt_Extensions + +# Paket dependency manager +.paket/paket.exe +paket-files/ + +# FAKE - F# Make +.fake/ + +# JetBrains Rider +.idea/ +*.sln.iml + +# CodeRush personal settings +.cr/personal + +# Python Tools for Visual Studio (PTVS) +__pycache__/ +*.pyc + +# Cake - Uncomment if you are using it +# tools/** +# !tools/packages.config + +# Tabs Studio +*.tss + +# Telerik's JustMock configuration file +*.jmconfig + +# BizTalk build output +*.btp.cs +*.btm.cs +*.odx.cs +*.xsd.cs + +# OpenCover UI analysis results +OpenCover/ + +# Azure Stream Analytics local run output +ASALocalRun/ + +# MSBuild Binary and Structured Log +*.binlog + +# NVidia Nsight GPU debugger configuration file +*.nvuser + +# MFractors (Xamarin productivity tool) working folder +.mfractor/ + +# Local History for Visual Studio +.localhistory/ + +# BeatPulse healthcheck temp database +healthchecksdb + +.idea +.DS_Store +*git.store diff --git a/server/Tingle.Dependabot.Tests/AzureDevOpsEventHandlerTests.cs b/server/Tingle.Dependabot.Tests/AzureDevOpsEventHandlerTests.cs new file mode 100644 index 00000000..8280faa9 --- /dev/null +++ b/server/Tingle.Dependabot.Tests/AzureDevOpsEventHandlerTests.cs @@ -0,0 +1,299 @@ +using AspNetCore.Authentication.Basic; +using Microsoft.AspNetCore.Builder; +using Microsoft.AspNetCore.Hosting; +using Microsoft.AspNetCore.Http.Json; +using Microsoft.AspNetCore.TestHost; +using Microsoft.EntityFrameworkCore; +using Microsoft.Extensions.Configuration; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; +using System.Net; +using System.Text; +using Tingle.Dependabot.Events; +using Tingle.Dependabot.Models; +using Tingle.EventBus; +using Tingle.EventBus.Transports.InMemory; +using Xunit; +using Xunit.Abstractions; + +namespace Tingle.Dependabot.Tests; + +public class AzureDevOpsEventHandlerTests +{ + private readonly ITestOutputHelper outputHelper; + + public AzureDevOpsEventHandlerTests(ITestOutputHelper outputHelper) + { + this.outputHelper = outputHelper ?? throw new ArgumentNullException(nameof(outputHelper)); + } + + [Fact] + public async Task Returns_Unauthorized() + { + await TestAsync(async (harness, client, handler) => + { + // without Authorization header + var request = new HttpRequestMessage(HttpMethod.Post, "/webhooks/azure"); + var response = await client.SendAsync(request); + Assert.Equal(HttpStatusCode.Unauthorized, response.StatusCode); + Assert.Empty(await response.Content.ReadAsStringAsync()); + Assert.Empty(await harness.PublishedAsync()); + + // password does not match what is on record + request = new HttpRequestMessage(HttpMethod.Post, "/webhooks/azure"); + request.Headers.Authorization = new System.Net.Http.Headers.AuthenticationHeaderValue("Basic", Convert.ToBase64String(Encoding.ASCII.GetBytes("vsts:burp-bump5"))); + response = await client.SendAsync(request); + Assert.Equal(HttpStatusCode.Unauthorized, response.StatusCode); + Assert.Empty(await response.Content.ReadAsStringAsync()); + Assert.Empty(handler.Calls); + Assert.Empty(await harness.PublishedAsync()); + }); + } + + [Fact] + public async Task Returns_BadRequest_NoBody() + { + await TestAsync(async (harness, client, handler) => + { + var request = new HttpRequestMessage(HttpMethod.Post, "/webhooks/azure"); + request.Headers.Authorization = new System.Net.Http.Headers.AuthenticationHeaderValue("Basic", Convert.ToBase64String(Encoding.ASCII.GetBytes("vsts:burp-bump"))); + request.Content = new StringContent("", Encoding.UTF8, "application/json"); + var response = await client.SendAsync(request); + Assert.Equal(HttpStatusCode.BadRequest, response.StatusCode); + Assert.Empty(await response.Content.ReadAsStringAsync()); + Assert.Empty(handler.Calls); + Assert.Empty(await harness.PublishedAsync()); + }); + } + + [Fact] + public async Task Returns_BadRequest_MissingValues() + { + await TestAsync(async (harness, client, handler) => + { + var request = new HttpRequestMessage(HttpMethod.Post, "/webhooks/azure"); + request.Headers.Authorization = new System.Net.Http.Headers.AuthenticationHeaderValue("Basic", Convert.ToBase64String(Encoding.ASCII.GetBytes("vsts:burp-bump"))); + request.Content = new StringContent("{}", Encoding.UTF8, "application/json"); + var response = await client.SendAsync(request); + Assert.Equal(HttpStatusCode.BadRequest, response.StatusCode); + var body = await response.Content.ReadAsStringAsync(); + Assert.Contains("\"type\":\"https://tools.ietf.org/html/rfc7231#section-6.5.1\"", body); + Assert.Contains("\"title\":\"One or more validation errors occurred.\"", body); + Assert.Contains("\"status\":400", body); + Assert.Contains("\"SubscriptionId\":[\"The SubscriptionId field is required.\"]", body); + Assert.Contains("\"EventType\":[\"The EventType field is required.\"]", body); + Assert.Contains("\"Resource\":[\"The Resource field is required.\"]", body); + Assert.Empty(handler.Calls); + Assert.Empty(await harness.PublishedAsync()); + }); + } + + [Fact] + public async Task Returns_UnsupportedMediaType() + { + await TestAsync(async (harness, client, handler) => + { + var stream = TestSamples.GetAzureDevOpsPullRequestUpdated1(); + var request = new HttpRequestMessage(HttpMethod.Post, "/webhooks/azure"); + request.Headers.Authorization = new System.Net.Http.Headers.AuthenticationHeaderValue("Basic", Convert.ToBase64String(Encoding.ASCII.GetBytes("vsts:burp-bump"))); + request.Content = new StreamContent(stream); + var response = await client.SendAsync(request); + Assert.Equal(HttpStatusCode.UnsupportedMediaType, response.StatusCode); + Assert.Empty(await response.Content.ReadAsStringAsync()); + Assert.Empty(handler.Calls); + Assert.Empty(await harness.PublishedAsync()); + }); + } + + [Fact] + public async Task Returns_OK_CodePush() + { + await TestAsync(async (harness, client, handler) => + { + var stream = TestSamples.GetAzureDevOpsGitPush1(); + var request = new HttpRequestMessage(HttpMethod.Post, "/webhooks/azure"); + request.Headers.Authorization = new System.Net.Http.Headers.AuthenticationHeaderValue("Basic", Convert.ToBase64String(Encoding.ASCII.GetBytes("vsts:burp-bump"))); + request.Content = new StreamContent(stream); + request.Content.Headers.ContentType = new System.Net.Http.Headers.MediaTypeHeaderValue("application/json", "utf-8"); + var response = await client.SendAsync(request); + Assert.Equal(HttpStatusCode.OK, response.StatusCode); + Assert.Empty(await response.Content.ReadAsStringAsync()); + var call = Assert.Single(handler.Calls); + Assert.Equal("435e539d-3ce2-4283-8da9-8f3c0fe2e45e", call.SubscriptionId); + Assert.Equal(3, call.NotificationId); + Assert.Equal(AzureDevOpsEventType.GitPush, call.EventType); + + // Ensure the message was published + var context = Assert.IsType>(Assert.Single(await harness.PublishedAsync(TimeSpan.FromSeconds(1f)))); + var inner = context.Event; + Assert.NotNull(inner); + Assert.Null(inner.RepositoryId); + Assert.Equal("278d5cd2-584d-4b63-824a-2ba458937249", inner.RepositoryProviderId); + Assert.True(inner.Trigger); + }); + } + + [Fact] + public async Task Returns_OK_PullRequestUpdated() + { + await TestAsync(async (harness, client, handler) => + { + var stream = TestSamples.GetAzureDevOpsPullRequestUpdated1(); + var request = new HttpRequestMessage(HttpMethod.Post, "/webhooks/azure"); + request.Headers.Authorization = new System.Net.Http.Headers.AuthenticationHeaderValue("Basic", Convert.ToBase64String(Encoding.ASCII.GetBytes("vsts:burp-bump"))); + request.Content = new StreamContent(stream); + request.Content.Headers.ContentType = new System.Net.Http.Headers.MediaTypeHeaderValue("application/json", "utf-8"); + var response = await client.SendAsync(request); + Assert.Equal(HttpStatusCode.OK, response.StatusCode); + Assert.Empty(await response.Content.ReadAsStringAsync()); + var call = Assert.Single(handler.Calls); + Assert.Equal("435e539d-3ce2-4283-8da9-8f3c0fe2e45e", call.SubscriptionId); + Assert.Equal(3, call.NotificationId); + Assert.Equal(AzureDevOpsEventType.GitPullRequestUpdated, call.EventType); + Assert.Empty(await harness.PublishedAsync()); + }); + } + + [Fact] + public async Task Returns_OK_PullRequestMerged() + { + await TestAsync(async (harness, client, handler) => + { + var stream = TestSamples.GetAzureDevOpsPullRequestMerged1(); + var request = new HttpRequestMessage(HttpMethod.Post, "/webhooks/azure"); + request.Headers.Authorization = new System.Net.Http.Headers.AuthenticationHeaderValue("Basic", Convert.ToBase64String(Encoding.ASCII.GetBytes("vsts:burp-bump"))); + request.Content = new StreamContent(stream); + request.Content.Headers.ContentType = new System.Net.Http.Headers.MediaTypeHeaderValue("application/json", "utf-8"); + var response = await client.SendAsync(request); + Assert.Equal(HttpStatusCode.OK, response.StatusCode); + Assert.Empty(await response.Content.ReadAsStringAsync()); + var call = Assert.Single(handler.Calls); + Assert.Equal("435e539d-3ce2-4283-8da9-8f3c0fe2e45e", call.SubscriptionId); + Assert.Equal(3, call.NotificationId); + Assert.Equal(AzureDevOpsEventType.GitPullRequestMerged, call.EventType); + Assert.Empty(await harness.PublishedAsync()); + }); + } + + [Fact] + public async Task Returns_OK_PullRequestCommentEvent() + { + await TestAsync(async (harness, client, handler) => + { + var stream = TestSamples.GetAzureDevOpsPullRequestCommentEvent1(); + var request = new HttpRequestMessage(HttpMethod.Post, "/webhooks/azure"); + request.Headers.Authorization = new System.Net.Http.Headers.AuthenticationHeaderValue("Basic", Convert.ToBase64String(Encoding.ASCII.GetBytes("vsts:burp-bump"))); + request.Content = new StreamContent(stream); + request.Content.Headers.ContentType = new System.Net.Http.Headers.MediaTypeHeaderValue("application/json", "utf-8"); + var response = await client.SendAsync(request); + Assert.Equal(HttpStatusCode.OK, response.StatusCode); + Assert.Empty(await response.Content.ReadAsStringAsync()); + var call = Assert.Single(handler.Calls); + Assert.Equal("435e539d-3ce2-4283-8da9-8f3c0fe2e45e", call.SubscriptionId); + Assert.Equal(3, call.NotificationId); + Assert.Equal(AzureDevOpsEventType.GitPullRequestCommentEvent, call.EventType); + Assert.Empty(await harness.PublishedAsync()); + }); + } + + private async Task TestAsync(Func executeAndVerify) + { + // Arrange + var builder = new WebHostBuilder() + .ConfigureLogging(builder => builder.AddXUnit(outputHelper)) + .ConfigureAppConfiguration(builder => + { + builder.AddInMemoryCollection(new Dictionary + { + ["Authentication:Schemes:ServiceHooks:Credentials:vsts"] = "burp-bump", + }); + }) + .ConfigureServices((context, services) => + { + var dbName = Guid.NewGuid().ToString(); + var configuration = context.Configuration; + services.AddDbContext(options => + { + options.UseInMemoryDatabase(dbName, o => o.EnableNullChecks()); + options.EnableDetailedErrors(); + }); + services.AddRouting(); + services.AddNotificationsHandler(); + services.AddSingleton(); + + services.ConfigureHttpJsonOptions(options => + { + options.SerializerOptions.Converters.Add( + new Extensions.Json.JsonStringEnumMemberConverter( + namingPolicy: options.SerializerOptions.PropertyNamingPolicy, + allowIntegerValues: true)); + }); + + services.AddAuthentication() + .AddBasic(AuthConstants.SchemeNameServiceHooks, options => options.Realm = "Dependabot"); + + services.AddAuthorization(options => + { + options.AddPolicy(AuthConstants.PolicyNameServiceHooks, policy => + { + policy.AddAuthenticationSchemes(AuthConstants.SchemeNameServiceHooks) + .RequireAuthenticatedUser(); + }); + }); + + services.AddEventBus(builder => builder.AddInMemoryTransport().AddInMemoryTestHarness()); + }) + .Configure(app => + { + app.UseRouting(); + + app.UseAuthentication(); + app.UseAuthorization(); + app.UseEndpoints(endpoints => + { + endpoints.MapWebhooks(); + }); + }); + using var server = new TestServer(builder); + + using var scope = server.Services.CreateScope(); + var provider = scope.ServiceProvider; + + var context = provider.GetRequiredService(); + await context.Database.EnsureCreatedAsync(); + + var handler = Assert.IsType(provider.GetRequiredService()); + + var harness = provider.GetRequiredService(); + await harness.StartAsync(); + + try + { + var client = server.CreateClient(); + + await executeAndVerify(harness, client, handler); + + // Ensure there were no publish failures + Assert.Empty(await harness.FailedAsync()); + } + finally + { + await harness.StopAsync(); + } + } + + class ModifiedAzureDevOpsEventHandler : AzureDevOpsEventHandler + { + public ModifiedAzureDevOpsEventHandler(IEventPublisher publisher, IOptions jsonOptions, ILogger logger) + : base(publisher, jsonOptions, logger) { } + + public List Calls { get; } = new(); + + public override async Task HandleAsync(AzureDevOpsEvent model, CancellationToken cancellationToken) + { + Calls.Add(model); + await base.HandleAsync(model, cancellationToken); + } + } +} diff --git a/server/Tingle.Dependabot.Tests/Models/DependabotConfigurationTests.cs b/server/Tingle.Dependabot.Tests/Models/DependabotConfigurationTests.cs new file mode 100644 index 00000000..e349f69a --- /dev/null +++ b/server/Tingle.Dependabot.Tests/Models/DependabotConfigurationTests.cs @@ -0,0 +1,42 @@ +using Tingle.Dependabot.Models; +using Xunit; +using YamlDotNet.Serialization; +using YamlDotNet.Serialization.NamingConventions; + +namespace Tingle.Dependabot.Tests.Models; + +public class DependabotConfigurationTests +{ + [Fact] + public void Deserialization_Works() + { + using var stream = TestSamples.GetDependabot(); + using var reader = new StreamReader(stream); + + var deserializer = new DeserializerBuilder().WithNamingConvention(HyphenatedNamingConvention.Instance) + .IgnoreUnmatchedProperties() + .Build(); + + var configuration = deserializer.Deserialize(reader); + Assert.NotNull(configuration); + Assert.Equal(2, configuration!.Version); + Assert.NotNull(configuration.Updates!); + Assert.Equal(2, configuration.Updates!.Count); + + var first = configuration.Updates[0]; + Assert.Equal("/", first.Directory); + Assert.Equal(DependabotPackageEcosystem.Docker, first.PackageEcosystem); + Assert.Equal(DependabotScheduleInterval.Weekly, first.Schedule?.Interval); + Assert.Equal(new TimeOnly(3, 0), first.Schedule?.Time); + Assert.Equal(DependabotScheduleDay.Sunday, first.Schedule?.Day); + Assert.Null(first.InsecureExternalCodeExecution); + + var second = configuration.Updates[1]; + Assert.Equal("/client", second.Directory); + Assert.Equal(DependabotPackageEcosystem.Npm, second.PackageEcosystem); + Assert.Equal(DependabotScheduleInterval.Daily, second.Schedule?.Interval); + Assert.Equal(new TimeOnly(3, 15), second.Schedule?.Time); + Assert.Equal(DependabotScheduleDay.Monday, second.Schedule?.Day); + Assert.Equal(DependabotInsecureExternalCodeExecution.Deny, second.InsecureExternalCodeExecution); + } +} diff --git a/server/Tingle.Dependabot.Tests/Models/DependabotUpdateScheduleTests.cs b/server/Tingle.Dependabot.Tests/Models/DependabotUpdateScheduleTests.cs new file mode 100644 index 00000000..a0d855f1 --- /dev/null +++ b/server/Tingle.Dependabot.Tests/Models/DependabotUpdateScheduleTests.cs @@ -0,0 +1,22 @@ +using Tingle.Dependabot.Models; +using Xunit; + +namespace Tingle.Dependabot.Tests.Models; + +public class DependabotUpdateScheduleTests +{ + [Theory] + [InlineData(DependabotScheduleInterval.Daily, null, null, "00 02 * * 1-5")] // default to 02:00 + [InlineData(DependabotScheduleInterval.Daily, "23:30", DependabotScheduleDay.Saturday, "30 23 * * 1-5")] // ignores day + [InlineData(DependabotScheduleInterval.Weekly, "10:00", DependabotScheduleDay.Saturday, "00 10 * * 6")] + [InlineData(DependabotScheduleInterval.Weekly, "15:00", null, "00 15 * * 1")] // defaults to Mondays + [InlineData(DependabotScheduleInterval.Monthly, "17:30", DependabotScheduleDay.Saturday, "30 17 1 * *")] // ignores day + public void GenerateCronSchedule_Works(DependabotScheduleInterval interval, string time, DependabotScheduleDay? day, string expected) + { + var schedule = new DependabotUpdateSchedule { Interval = interval, }; + if (time != null) schedule.Time = TimeOnly.Parse(time); + if (day != null) schedule.Day = day; + var actual = schedule.GenerateCron(); + Assert.Equal(expected, actual); + } +} diff --git a/server/Tingle.Dependabot.Tests/Models/UpdateJobResourcesTests.cs b/server/Tingle.Dependabot.Tests/Models/UpdateJobResourcesTests.cs new file mode 100644 index 00000000..6057b0f6 --- /dev/null +++ b/server/Tingle.Dependabot.Tests/Models/UpdateJobResourcesTests.cs @@ -0,0 +1,38 @@ +using Tingle.Dependabot.Models; +using Xunit; + +namespace Tingle.Dependabot.Tests.Models; + +public class UpdateJobResourcesTests +{ + [Fact] + public void FromEcosystem_Works() + { + var values = Enum.GetValues(); + Assert.All(values, ecosystem => UpdateJobResources.FromEcosystem(ecosystem)); + } + + [Theory] + [InlineData(DependabotPackageEcosystem.Bundler, 0.25, 0.5)] + [InlineData(DependabotPackageEcosystem.Cargo, 0.25, 0.5)] + [InlineData(DependabotPackageEcosystem.Composer, 0.25, 0.5)] + [InlineData(DependabotPackageEcosystem.Docker, 0.25, 0.5)] + [InlineData(DependabotPackageEcosystem.Elixir, 0.25, 0.5)] + [InlineData(DependabotPackageEcosystem.Elm, 0.25, 0.5)] + [InlineData(DependabotPackageEcosystem.GitSubmodule, 0.1, 0.2)] + [InlineData(DependabotPackageEcosystem.GithubActions, 0.25, 0.5)] + [InlineData(DependabotPackageEcosystem.GoModules, 0.25, 0.5)] + [InlineData(DependabotPackageEcosystem.Gradle, 0.25, 0.5)] + [InlineData(DependabotPackageEcosystem.Maven, 0.25, 0.5)] + [InlineData(DependabotPackageEcosystem.Mix, 0.25, 0.5)] + [InlineData(DependabotPackageEcosystem.Npm, 0.25, 1.0)] + [InlineData(DependabotPackageEcosystem.NuGet, 0.25, 0.2)] + [InlineData(DependabotPackageEcosystem.Pip, 0.25, 0.5)] + [InlineData(DependabotPackageEcosystem.Terraform, 0.25, 1.0)] + public void FromEcosystem_ExpectedValues(DependabotPackageEcosystem ecosystem, double expectedCpu, double expectedMemory) + { + var resources = UpdateJobResources.FromEcosystem(ecosystem); + Assert.Equal(expectedCpu, resources.Cpu); + Assert.Equal(expectedMemory, resources.Memory); + } +} diff --git a/server/Tingle.Dependabot.Tests/Samples/dependabot.yml b/server/Tingle.Dependabot.Tests/Samples/dependabot.yml new file mode 100644 index 00000000..02411b5a --- /dev/null +++ b/server/Tingle.Dependabot.Tests/Samples/dependabot.yml @@ -0,0 +1,39 @@ +# To get started with Dependabot version updates, you'll need to specify which +# package ecosystems to update and where the package manifests are located. +# Please see the documentation for all configuration options: +# https://help.github.com/github/administering-a-repository/configuration-options-for-dependency-updates + +version: 2 +updates: + - package-ecosystem: 'docker' # See documentation for possible values + directory: '/' # Location of package manifests + schedule: + interval: 'weekly' + time: '03:00' + day: 'sunday' + open-pull-requests-limit: 10 + - package-ecosystem: 'npm' # See documentation for possible values + directory: '/client' # Location of package manifests + schedule: + interval: 'daily' + time: '03:15' + open-pull-requests-limit: 10 + insecure-external-code-execution: 'deny' + ignore: + - dependency-name: 'react' + update-types: ['version-update:semver-major'] + - dependency-name: 'react-dom' + update-types: ['version-update:semver-major'] + - dependency-name: '@types/react' + update-types: ['version-update:semver-major'] + - dependency-name: '@types/react-dom' + update-types: ['version-update:semver-major'] +registries: + tingle: + type: nuget-feed + url: 'https://pkgs.dev.azure.com/dependabot/_packaging/dependabot/nuget/v3/index.json' + token: ':${{DEFAULT_TOKEN}}' + tingle-npm: + type: npm-registry + url: 'https://pkgs.dev.azure.com/dependabot/_packaging/dependabot-npm/npm/registry/' + token: 'tingle-npm:${{DEFAULT_TOKEN}}' diff --git a/server/Tingle.Dependabot.Tests/Samples/git-pullrequest-comment-event-1.json b/server/Tingle.Dependabot.Tests/Samples/git-pullrequest-comment-event-1.json new file mode 100644 index 00000000..d939acaf --- /dev/null +++ b/server/Tingle.Dependabot.Tests/Samples/git-pullrequest-comment-event-1.json @@ -0,0 +1,131 @@ +{ + "subscriptionId": "435e539d-3ce2-4283-8da9-8f3c0fe2e45e", + "notificationId": 3, + "id": "af07be1b-f3ad-44c8-a7f1-c4835f2df06b", + "eventType": "ms.vss-code.git-pullrequest-comment-event", + "publisherId": "tfs", + "message": { + "text": "Jamal Hartnett has edited a pull request comment", + "html": "Jamal Hartnett has edited a pull request comment", + "markdown": "Jamal Hartnett has [edited](https://fabrikam.visualstudio.com/DefaultCollection/_git/Fabrikam/pullrequest/1?discussionId=5) a pull request comment" + }, + "detailedMessage": { + "text": "Jamal Hartnett has edited a pull request comment\r\nThis is my comment.\r\n", + "html": "Jamal Hartnett has edited a pull request comment

This is my comment.

", + "markdown": "Jamal Hartnett has [edited](https://fabrikam.visualstudio.com/DefaultCollection/_git/Fabrikam/pullrequest/1?discussionId=5) a pull request comment\r\nThis is my comment.\r\n" + }, + "resource": { + "comment": { + "id": 2, + "parentCommentId": 1, + "author": { + "displayName": "Jamal Hartnett", + "url": "https://fabrikam.vssps.visualstudio.com/_apis/Identities/54d125f7-69f7-4191-904f-c5b96b6261c8", + "id": "54d125f7-69f7-4191-904f-c5b96b6261c8", + "uniqueName": "fabrikamfiber4@hotmail.com", + "imageUrl": "https://fabrikam.visualstudio.com/DefaultCollection/_api/_common/identityImage?id=54d125f7-69f7-4191-904f-c5b96b6261c8" + }, + "content": "This is my comment.", + "publishedDate": "2014-06-17T16:55:46.589889Z", + "lastUpdatedDate": "2014-06-17T16:58:33.123889Z", + "lastContentUpdatedDate": "2014-06-17T16:58:33.123889Z", + "commentType": "text", + "_links": { + "self": { + "href": "https://fabrikam.visualstudio.com/DefaultCollection/_apis/git/repositories/4bc14d40-c903-45e2-872e-0462c7748079/pullRequests/1/threads/5/comments/2" + }, + "repository": { + "href": "http://joscol2/DefaultCollection/ebed510c-62eb-474b-965f-fd151ebb82e4/_apis/git/repositories/4bc14d40-c903-45e2-872e-0462c7748079" + }, + "threads": { + "href": "https://fabrikam.visualstudio.com/DefaultCollection/_apis/git/repositories/4bc14d40-c903-45e2-872e-0462c7748079/pullRequests/1/threads/5" + } + } + }, + "pullRequest": { + "repository": { + "id": "4bc14d40-c903-45e2-872e-0462c7748079", + "name": "Fabrikam", + "url": "https://fabrikam.visualstudio.com/DefaultCollection/_apis/git/repositories/4bc14d40-c903-45e2-872e-0462c7748079", + "project": { + "id": "6ce954b1-ce1f-45d1-b94d-e6bf2464ba2c", + "name": "Fabrikam", + "url": "https://fabrikam.visualstudio.com/DefaultCollection/_apis/projects/6ce954b1-ce1f-45d1-b94d-e6bf2464ba2c", + "state": "wellFormed", + "visibility": "unchanged", + "lastUpdateTime": "0001-01-01T00:00:00" + }, + "defaultBranch": "refs/heads/master", + "remoteUrl": "https://fabrikam.visualstudio.com/DefaultCollection/_git/Fabrikam" + }, + "pullRequestId": 1, + "status": "active", + "createdBy": { + "displayName": "Jamal Hartnett", + "url": "https://fabrikam.vssps.visualstudio.com/_apis/Identities/54d125f7-69f7-4191-904f-c5b96b6261c8", + "id": "54d125f7-69f7-4191-904f-c5b96b6261c8", + "uniqueName": "fabrikamfiber4@hotmail.com", + "imageUrl": "https://fabrikam.visualstudio.com/DefaultCollection/_api/_common/identityImage?id=54d125f7-69f7-4191-904f-c5b96b6261c8" + }, + "creationDate": "2014-06-17T16:55:46.589889Z", + "title": "my first pull request", + "description": " - test2\r\n", + "sourceRefName": "refs/heads/mytopic", + "targetRefName": "refs/heads/master", + "mergeStatus": "succeeded", + "mergeId": "a10bb228-6ba6-4362-abd7-49ea21333dbd", + "lastMergeSourceCommit": { + "commitId": "53d54ac915144006c2c9e90d2c7d3880920db49c", + "url": "https://fabrikam.visualstudio.com/DefaultCollection/_apis/git/repositories/4bc14d40-c903-45e2-872e-0462c7748079/commits/53d54ac915144006c2c9e90d2c7d3880920db49c" + }, + "lastMergeTargetCommit": { + "commitId": "a511f535b1ea495ee0c903badb68fbc83772c882", + "url": "https://fabrikam.visualstudio.com/DefaultCollection/_apis/git/repositories/4bc14d40-c903-45e2-872e-0462c7748079/commits/a511f535b1ea495ee0c903badb68fbc83772c882" + }, + "lastMergeCommit": { + "commitId": "eef717f69257a6333f221566c1c987dc94cc0d72", + "url": "https://fabrikam.visualstudio.com/DefaultCollection/_apis/git/repositories/4bc14d40-c903-45e2-872e-0462c7748079/commits/eef717f69257a6333f221566c1c987dc94cc0d72" + }, + "reviewers": [ + { + "reviewerUrl": null, + "vote": 0, + "displayName": "[Mobile]\\Mobile Team", + "url": "https://fabrikam.vssps.visualstudio.com/_apis/Identities/2ea2d095-48f9-4cd6-9966-62f6f574096c", + "id": "2ea2d095-48f9-4cd6-9966-62f6f574096c", + "uniqueName": "vstfs:///Classification/TeamProject/f0811a3b-8c8a-4e43-a3bf-9a049b4835bd\\Mobile Team", + "imageUrl": "https://fabrikam.visualstudio.com/DefaultCollection/_api/_common/identityImage?id=2ea2d095-48f9-4cd6-9966-62f6f574096c", + "isContainer": true + } + ], + "commits": [ + { + "commitId": "53d54ac915144006c2c9e90d2c7d3880920db49c", + "url": "https://fabrikam.visualstudio.com/DefaultCollection/_apis/git/repositories/4bc14d40-c903-45e2-872e-0462c7748079/commits/53d54ac915144006c2c9e90d2c7d3880920db49c" + } + ], + "url": "https://fabrikam.visualstudio.com/DefaultCollection/_apis/git/repositories/4bc14d40-c903-45e2-872e-0462c7748079/pullRequests/1", + "_links": { + "web": { + "href": "https://fabrikam.visualstudio.com/DefaultCollection/_git/Fabrikam/pullrequest/1#view=discussion" + }, + "statuses": { + "href": "https://fabrikam.visualstudio.com/DefaultCollection/_apis/git/repositories/4bc14d40-c903-45e2-872e-0462c7748079/pullRequests/1/statuses" + } + } + } + }, + "resourceVersion": "2.0", + "resourceContainers": { + "collection": { + "id": "c12d0eb8-e382-443b-9f9c-c52cba5014c2" + }, + "account": { + "id": "f844ec47-a9db-4511-8281-8b63f4eaf94e" + }, + "project": { + "id": "be9b3917-87e6-42a4-a549-2bc06a7a878f" + } + }, + "createdDate": "2023-01-21T12:42:13.182Z" +} \ No newline at end of file diff --git a/server/Tingle.Dependabot.Tests/Samples/git-pullrequest-comment-event-2.json b/server/Tingle.Dependabot.Tests/Samples/git-pullrequest-comment-event-2.json new file mode 100644 index 00000000..a1e56378 --- /dev/null +++ b/server/Tingle.Dependabot.Tests/Samples/git-pullrequest-comment-event-2.json @@ -0,0 +1,186 @@ +{ + "subscriptionId": "435e539d-3ce2-4283-8da9-8f3c0fe2e45e", + "notificationId": 3, + "id": "1e869c69-418c-4ef6-b2f1-ee95fcad149f", + "eventType": "ms.vss-code.git-pullrequest-comment-event", + "publisherId": "tfs", + "message": null, + "detailedMessage": null, + "resource": { + "comment": { + "id": 1, + "parentCommentId": 0, + "author": { + "displayName": "dependabot", + "url": "https://spsprodweu2.vssps.visualstudio.com/A1f76e76f-721c-4b92-8ff3-bd07abce2671/_apis/Identities/961314fa-c312-68ab-8dce-cbb71e30c268", + "_links": { + "avatar": { + "href": "https://dev.azure.com/dependabot/_apis/GraphProfile/MemberAvatars/aad.OTYxMzE0ZmEtYzMxMi03OGFiLThkY2UtY2JiNzFlMzBjMjY4" + } + }, + "id": "961314fa-c312-68ab-8dce-cbb71e30c268", + "uniqueName": "dependabot@tingle.software", + "imageUrl": "https://dev.azure.com/dependabot/_api/_common/identityImage?id=961314fa-c312-68ab-8dce-cbb71e30c268", + "descriptor": "aad.OTYxMzE0ZmEtYzMxMi03OGFiLThkY2UtY2JiNzFlMzBjMjY4" + }, + "content": "Deployment to your Review App succeeded.", + "publishedDate": "2023-01-21T13:54:51.827Z", + "lastUpdatedDate": "2023-01-21T13:54:51.827Z", + "lastContentUpdatedDate": "2023-01-21T13:54:51.827Z", + "commentType": "text", + "usersLiked": [], + "_links": { + "self": { + "href": "https://dev.azure.com/dependabot/_apis/git/repositories/d5bb1147-bd9f-4ae1-8554-aec3d164f94c/pullRequests/22568/threads/109903/comments/1" + }, + "repository": { + "href": "https://dev.azure.com/dependabot/cea8cb01-dd13-4588-b27a-55fa170e4e94/_apis/git/repositories/d5bb1147-bd9f-4ae1-8554-aec3d164f94c" + }, + "threads": { + "href": "https://dev.azure.com/dependabot/_apis/git/repositories/d5bb1147-bd9f-4ae1-8554-aec3d164f94c/pullRequests/22568/threads/109903" + }, + "pullRequests": { + "href": "https://dev.azure.com/dependabot/_apis/git/pullRequests/22568" + } + } + }, + "pullRequest": { + "repository": { + "id": "d5bb1147-bd9f-4ae1-8554-aec3d164f94c", + "name": "dependabot-sample", + "url": "https://dev.azure.com/dependabot/cea8cb01-dd13-4588-b27a-55fa170e4e94/_apis/git/repositories/d5bb1147-bd9f-4ae1-8554-aec3d164f94c", + "project": { + "id": "cea8cb01-dd13-4588-b27a-55fa170e4e94", + "name": "Core", + "description": "All engineering happens here", + "url": "https://dev.azure.com/dependabot/_apis/projects/cea8cb01-dd13-4588-b27a-55fa170e4e94", + "state": "wellFormed", + "revision": 770, + "visibility": "private", + "lastUpdateTime": "2020-07-15T14:09:49.98Z" + }, + "size": 3568380, + "remoteUrl": "https://dependabot@dev.azure.com/dependabot/Core/_git/dependabot-sample", + "sshUrl": "git@ssh.dev.azure.com:v3/dependabot/Core/dependabot-sample", + "webUrl": "https://dev.azure.com/dependabot/Core/_git/dependabot-sample", + "isDisabled": false, + "isInMaintenance": false + }, + "pullRequestId": 22568, + "codeReviewId": 23493, + "status": "completed", + "createdBy": { + "displayName": "dependabot", + "url": "https://spsprodweu2.vssps.visualstudio.com/A1f76e76f-721c-4b92-8ff3-bd07abce2671/_apis/Identities/961314fa-c312-68ab-8dce-cbb71e30c268", + "_links": { + "avatar": { + "href": "https://dev.azure.com/dependabot/_apis/GraphProfile/MemberAvatars/aad.OTYxMzE0ZmEtYzMxMi03OGFiLThkY2UtY2JiNzFlMzBjMjY4" + } + }, + "id": "961314fa-c312-68ab-8dce-cbb71e30c268", + "uniqueName": "dependabot@tingle.software", + "imageUrl": "https://dev.azure.com/dependabot/_api/_common/identityImage?id=961314fa-c312-68ab-8dce-cbb71e30c268", + "descriptor": "aad.OTYxMzE0ZmEtYzMxMi03OGFiLThkY2UtY2JiNzFlMzBjMjY4" + }, + "creationDate": "2023-01-18T02:05:34.3260021Z", + "closedDate": "2023-01-18T04:03:21.203845Z", + "title": "Bump Tingle.EventBus.Transports.InMemory from 0.17.2-ci0016 to 0.18.0", + "description": "Bumps [Tingle.EventBus.Transports.InMemory](https://github.com/tinglesoftware/eventbus) from 0.17.2-ci0016 to 0.18.0.\n

#Release notes

\n

Sourced from Tingle.EventBus.Transports.InMemory's releases.

\n
\n

0.18.0

\n

Happy New Year and a shiny new version with lots of new stuff since last year.

\n

NEW

\n
    \n
  • #487: Support the use of IConfiguration to configure transports, events, and consumers. This makes it easier to manage different values for different environments or transports.
  • \n
  • #488: Wait for transport to be started can now be configured per transport.
  • \n
  • #491: Support for consuming dead-lettered events via IDeadLetteredEventConsumer<TEvent> and DeadLetteredEventContext<TEvent>.
  • \n
  • #489: Duplicate detection settings can now be configured per event with defaults on the transport and the entire bus.
  • \n
\n

Fixes

\n
    \n
  • #486: Include transport name in logs to disambiguate them.
  • \n
  • #490: Prevent transport options configuration from being called multiple times.
  • \n
  • #485: Expose extensions on EventData for easier working with Azure IoT Hub.
  • \n
\n

Full Changelog: https://github.com/tinglesoftware/eventbus/compare/0.17.1...0.18.0

\n
\n

#Commits

\n\n", + "sourceRefName": "refs/heads/dependabot/nuget/Tingle.EventBus.Transports.InMemory-0.18.0", + "targetRefName": "refs/heads/main", + "mergeStatus": "succeeded", + "isDraft": false, + "mergeId": "e8fdd34f-4785-43e1-8513-560c415dba45", + "lastMergeSourceCommit": { + "commitId": "6258a2efa589c277ed97e3246b6a59faf0e29859", + "url": "https://dev.azure.com/dependabot/cea8cb01-dd13-4588-b27a-55fa170e4e94/_apis/git/repositories/d5bb1147-bd9f-4ae1-8554-aec3d164f94c/commits/6258a2efa589c277ed97e3246b6a59faf0e29859" + }, + "lastMergeTargetCommit": { + "commitId": "c02450ec2c61650a1251c8a8281a0637829b87da", + "url": "https://dev.azure.com/dependabot/cea8cb01-dd13-4588-b27a-55fa170e4e94/_apis/git/repositories/d5bb1147-bd9f-4ae1-8554-aec3d164f94c/commits/c02450ec2c61650a1251c8a8281a0637829b87da" + }, + "lastMergeCommit": { + "commitId": "74f9ceb316ef52762eea289d63fbac6c8f2a224d", + "author": { + "name": "dependabot", + "email": "dependabot@tingle.software", + "date": "2023-01-18T04:03:20Z" + }, + "committer": { + "name": "dependabot", + "email": "dependabot@tingle.software", + "date": "2023-01-18T04:03:20Z" + }, + "comment": "Merge pull request 22568 from dependabot/nuget/Tingle.EventBus.Transports.InMemory-0.18.0 into main", + "url": "https://dev.azure.com/dependabot/cea8cb01-dd13-4588-b27a-55fa170e4e94/_apis/git/repositories/d5bb1147-bd9f-4ae1-8554-aec3d164f94c/commits/74f9ceb316ef52762eea289d63fbac6c8f2a224d" + }, + "reviewers": [ + { + "reviewerUrl": "https://dev.azure.com/dependabot/cea8cb01-dd13-4588-b27a-55fa170e4e94/_apis/git/repositories/d5bb1147-bd9f-4ae1-8554-aec3d164f94c/pullRequests/22568/reviewers/ddc45be0-f1fe-6042-87ac-e6dbbdbc3b45", + "vote": 10, + "hasDeclined": false, + "isRequired": true, + "isFlagged": false, + "displayName": "Maxwell Weru", + "url": "https://spsprodweu2.vssps.visualstudio.com/A1f76e76f-721c-4b92-8ff3-bd07abce2671/_apis/Identities/ddc45be0-f1fe-6042-87ac-e6dbbdbc3b45", + "_links": { + "avatar": { + "href": "https://dev.azure.com/dependabot/_apis/GraphProfile/MemberAvatars/aad.ZGRjNDViZTAtZjFmZS03MDQyLTg3YWMtZTZkYmJkYmMzYjQ1" + } + }, + "id": "ddc45be0-f1fe-6042-87ac-e6dbbdbc3b45", + "uniqueName": "mburumaxwell@tingle.software", + "imageUrl": "https://dev.azure.com/dependabot/_api/_common/identityImage?id=ddc45be0-f1fe-6042-87ac-e6dbbdbc3b45" + } + ], + "url": "https://dev.azure.com/dependabot/cea8cb01-dd13-4588-b27a-55fa170e4e94/_apis/git/repositories/d5bb1147-bd9f-4ae1-8554-aec3d164f94c/pullRequests/22568", + "_links": { + "web": { + "href": "https://dev.azure.com/dependabot/Core/_git/dependabot-sample/pullrequest/22568" + }, + "statuses": { + "href": "https://dev.azure.com/dependabot/cea8cb01-dd13-4588-b27a-55fa170e4e94/_apis/git/repositories/d5bb1147-bd9f-4ae1-8554-aec3d164f94c/pullRequests/22568/statuses" + } + }, + "completionOptions": { + "deleteSourceBranch": true, + "squashMerge": true, + "mergeStrategy": "squash", + "triggeredByAutoComplete": true, + "autoCompleteIgnoreConfigIds": [] + }, + "supportsIterations": true, + "completionQueueTime": "2023-01-18T04:03:20.0206329Z", + "closedBy": { + "displayName": "dependabot", + "url": "https://spsprodweu2.vssps.visualstudio.com/A1f76e76f-721c-4b92-8ff3-bd07abce2671/_apis/Identities/961314fa-c312-68ab-8dce-cbb71e30c268", + "_links": { + "avatar": { + "href": "https://dev.azure.com/dependabot/_apis/GraphProfile/MemberAvatars/aad.OTYxMzE0ZmEtYzMxMi03OGFiLThkY2UtY2JiNzFlMzBjMjY4" + } + }, + "id": "961314fa-c312-68ab-8dce-cbb71e30c268", + "uniqueName": "dependabot@tingle.software", + "imageUrl": "https://dev.azure.com/dependabot/_api/_common/identityImage?id=961314fa-c312-68ab-8dce-cbb71e30c268", + "descriptor": "aad.OTYxMzE0ZmEtYzMxMi03OGFiLThkY2UtY2JiNzFlMzBjMjY4" + }, + "artifactId": "vstfs:///Git/PullRequestId/cea8cb01-dd13-4588-b27a-55fa170e4e94%2fd5bb1147-bd9f-4ae1-8554-aec3d164f94c%2f22568" + } + }, + "resourceVersion": "2.0", + "resourceContainers": { + "collection": { + "id": "3fba1bb9-6e8c-4087-b435-29157f94e9a1", + "baseUrl": "https://dev.azure.com/dependabot/" + }, + "account": { + "id": "1f76e76f-721c-4b92-8ff3-bd07abce2671", + "baseUrl": "https://dev.azure.com/dependabot/" + }, + "project": { + "id": "cea8cb01-dd13-4588-b27a-55fa170e4e94", + "baseUrl": "https://dev.azure.com/dependabot/" + } + }, + "createdDate": "2023-01-21T13:54:58.3779564Z" +} \ No newline at end of file diff --git a/server/Tingle.Dependabot.Tests/Samples/git.pullrequest.merged-1.json b/server/Tingle.Dependabot.Tests/Samples/git.pullrequest.merged-1.json new file mode 100644 index 00000000..4e792462 --- /dev/null +++ b/server/Tingle.Dependabot.Tests/Samples/git.pullrequest.merged-1.json @@ -0,0 +1,88 @@ +{ + "subscriptionId": "435e539d-3ce2-4283-8da9-8f3c0fe2e45e", + "notificationId": 3, + "id": "6872ee8c-b333-4eff-bfb9-0d5274943566", + "eventType": "git.pullrequest.merged", + "publisherId": "tfs", + "scope": "all", + "message": { + "text": "Jamal Hartnett has created a pull request merge commit", + "html": "Jamal Hartnett has created a pull request merge commit", + "markdown": "Jamal Hartnett has created a pull request merge commit" + }, + "detailedMessage": { + "text": "Jamal Hartnett has created a pull request merge commit\r\n\r\n- Merge status: Succeeded\r\n- Merge commit: eef717(https://dev.azure.com/fabrikam/DefaultCollection/_apis/repos/git/repositories/4bc14d40-c903-45e2-872e-0462c7748079/commits/eef717f69257a6333f221566c1c987dc94cc0d72)\r\n", + "html": "Jamal Hartnett has created a pull request merge commit\r\n
    \r\n
  • Merge status: Succeeded
  • \r\n
  • Merge commit: eef717
  • \r\n
", + "markdown": "Jamal Hartnett has created a pull request merge commit\r\n\r\n+ Merge status: Succeeded\r\n+ Merge commit: [eef717](https://dev.azure.com/fabrikam/DefaultCollection/_apis/repos/git/repositories/4bc14d40-c903-45e2-872e-0462c7748079/commits/eef717f69257a6333f221566c1c987dc94cc0d72)\r\n" + }, + "resource": { + "repository": { + "id": "4bc14d40-c903-45e2-872e-0462c7748079", + "name": "Fabrikam", + "url": "https://dev.azure.com/fabrikam/DefaultCollection/_apis/repos/git/repositories/4bc14d40-c903-45e2-872e-0462c7748079", + "project": { + "id": "6ce954b1-ce1f-45d1-b94d-e6bf2464ba2c", + "name": "Fabrikam", + "url": "https://dev.azure.com/fabrikam/DefaultCollection/_apis/projects/6ce954b1-ce1f-45d1-b94d-e6bf2464ba2c", + "state": "wellFormed" + }, + "defaultBranch": "refs/heads/master", + "remoteUrl": "https://dev.azure.com/fabrikam/DefaultCollection/_git/Fabrikam" + }, + "pullRequestId": 1, + "status": "completed", + "createdBy": { + "id": "54d125f7-69f7-4191-904f-c5b96b6261c8", + "displayName": "Jamal Hartnett", + "uniqueName": "fabrikamfiber4@hotmail.com", + "url": "https://vssps.dev.azure.com/fabrikam/_apis/Identities/54d125f7-69f7-4191-904f-c5b96b6261c8", + "imageUrl": "https://dev.azure.com/fabrikam/DefaultCollection/_api/_common/identityImage?id=54d125f7-69f7-4191-904f-c5b96b6261c8" + }, + "creationDate": "2014-06-17T16:55:46.589889Z", + "closedDate": "2014-06-30T18:59:12.3660573Z", + "title": "my first pull request", + "description": " - test2\r\n", + "sourceRefName": "refs/heads/mytopic", + "targetRefName": "refs/heads/master", + "mergeStatus": "succeeded", + "mergeId": "a10bb228-6ba6-4362-abd7-49ea21333dbd", + "lastMergeSourceCommit": { + "commitId": "53d54ac915144006c2c9e90d2c7d3880920db49c", + "url": "https://dev.azure.com/fabrikam/DefaultCollection/_apis/repos/git/repositories/4bc14d40-c903-45e2-872e-0462c7748079/commits/53d54ac915144006c2c9e90d2c7d3880920db49c" + }, + "lastMergeTargetCommit": { + "commitId": "a511f535b1ea495ee0c903badb68fbc83772c882", + "url": "https://dev.azure.com/fabrikam/DefaultCollection/_apis/repos/git/repositories/4bc14d40-c903-45e2-872e-0462c7748079/commits/a511f535b1ea495ee0c903badb68fbc83772c882" + }, + "lastMergeCommit": { + "commitId": "eef717f69257a6333f221566c1c987dc94cc0d72", + "url": "https://dev.azure.com/fabrikam/DefaultCollection/_apis/repos/git/repositories/4bc14d40-c903-45e2-872e-0462c7748079/commits/eef717f69257a6333f221566c1c987dc94cc0d72" + }, + "reviewers": [ + { + "reviewerUrl": null, + "vote": 0, + "id": "2ea2d095-48f9-4cd6-9966-62f6f574096c", + "displayName": "[Mobile]\\Mobile Team", + "uniqueName": "vstfs:///Classification/TeamProject/f0811a3b-8c8a-4e43-a3bf-9a049b4835bd\\Mobile Team", + "url": "https://vssps.dev.azure.com/fabrikam/_apis/Identities/2ea2d095-48f9-4cd6-9966-62f6f574096c", + "imageUrl": "https://dev.azure.com/fabrikam/DefaultCollection/_api/_common/identityImage?id=2ea2d095-48f9-4cd6-9966-62f6f574096c", + "isContainer": true + } + ], + "url": "https://dev.azure.com/fabrikam/DefaultCollection/_apis/repos/git/repositories/4bc14d40-c903-45e2-872e-0462c7748079/pullRequests/1" + }, + "resourceVersion": "1.0", + "resourceContainers": { + "collection": { + "id": "c12d0eb8-e382-443b-9f9c-c52cba5014c2" + }, + "account": { + "id": "f844ec47-a9db-4511-8281-8b63f4eaf94e" + }, + "project": { + "id": "be9b3917-87e6-42a4-a549-2bc06a7a878f" + } + }, + "createdDate": "2016-09-19T13:03:27.3156388Z" +} \ No newline at end of file diff --git a/server/Tingle.Dependabot.Tests/Samples/git.pullrequest.merged-2.json b/server/Tingle.Dependabot.Tests/Samples/git.pullrequest.merged-2.json new file mode 100644 index 00000000..10df2936 --- /dev/null +++ b/server/Tingle.Dependabot.Tests/Samples/git.pullrequest.merged-2.json @@ -0,0 +1,148 @@ +{ + "subscriptionId": "435e539d-3ce2-4283-8da9-8f3c0fe2e45e", + "notificationId": 3, + "id": "6872ee8c-b333-4eff-bfb9-0d5274943566", + "eventType": "git.pullrequest.merged", + "publisherId": "tfs", + "scope": "all", + "message": null, + "detailedMessage": null, + "resource": { + "repository": { + "id": "d5bb1147-bd9f-4ae1-8554-aec3d164f94c", + "name": "dependabot-sample", + "url": "https://dev.azure.com/dependabot/cea8cb01-dd13-4588-b27a-55fa170e4e94/_apis/git/repositories/d5bb1147-bd9f-4ae1-8554-aec3d164f94c", + "project": { + "id": "cea8cb01-dd13-4588-b27a-55fa170e4e94", + "name": "Core", + "description": "All engineering happens here", + "url": "https://dev.azure.com/dependabot/_apis/projects/cea8cb01-dd13-4588-b27a-55fa170e4e94", + "state": "wellFormed", + "revision": 770, + "visibility": "private", + "lastUpdateTime": "2020-07-15T14:09:49.98Z" + }, + "size": 3568380, + "remoteUrl": "https://dependabot@dev.azure.com/dependabot/Core/_git/dependabot-sample", + "sshUrl": "git@ssh.dev.azure.com:v3/dependabot/Core/dependabot-sample", + "webUrl": "https://dev.azure.com/dependabot/Core/_git/dependabot-sample", + "isDisabled": false, + "isInMaintenance": false + }, + "pullRequestId": 22568, + "codeReviewId": 23493, + "status": "completed", + "createdBy": { + "displayName": "dependabot", + "url": "https://spsprodweu2.vssps.visualstudio.com/A1f76e76f-721c-4b92-8ff3-bd07abce2671/_apis/Identities/961314fa-c312-68ab-8dce-cbb71e30c268", + "_links": { + "avatar": { + "href": "https://dev.azure.com/dependabot/_apis/GraphProfile/MemberAvatars/aad.OTYxMzE0ZmEtYzMxMi03OGFiLThkY2UtY2JiNzFlMzBjMjY4" + } + }, + "id": "961314fa-c312-68ab-8dce-cbb71e30c268", + "uniqueName": "dependabot@tingle.software", + "imageUrl": "https://dev.azure.com/dependabot/_api/_common/identityImage?id=961314fa-c312-68ab-8dce-cbb71e30c268", + "descriptor": "aad.OTYxMzE0ZmEtYzMxMi03OGFiLThkY2UtY2JiNzFlMzBjMjY4" + }, + "creationDate": "2023-01-18T02:05:34.3260021Z", + "closedDate": "2023-01-18T04:03:21.203845Z", + "title": "Bump Tingle.EventBus.Transports.InMemory from 0.17.2-ci0016 to 0.18.0", + "description": "Bumps [Tingle.EventBus.Transports.InMemory](https://github.com/tinglesoftware/eventbus) from 0.17.2-ci0016 to 0.18.0.\n

#Release notes

\n

Sourced from Tingle.EventBus.Transports.InMemory's releases.

\n
\n

0.18.0

\n

Happy New Year and a shiny new version with lots of new stuff since last year.

\n

NEW

\n
    \n
  • #487: Support the use of IConfiguration to configure transports, events, and consumers. This makes it easier to manage different values for different environments or transports.
  • \n
  • #488: Wait for transport to be started can now be configured per transport.
  • \n
  • #491: Support for consuming dead-lettered events via IDeadLetteredEventConsumer<TEvent> and DeadLetteredEventContext<TEvent>.
  • \n
  • #489: Duplicate detection settings can now be configured per event with defaults on the transport and the entire bus.
  • \n
\n

Fixes

\n
    \n
  • #486: Include transport name in logs to disambiguate them.
  • \n
  • #490: Prevent transport options configuration from being called multiple times.
  • \n
  • #485: Expose extensions on EventData for easier working with Azure IoT Hub.
  • \n
\n

Full Changelog: https://github.com/tinglesoftware/eventbus/compare/0.17.1...0.18.0

\n
\n

#Commits

\n\n", + "sourceRefName": "refs/heads/dependabot/nuget/Tingle.EventBus.Transports.InMemory-0.18.0", + "targetRefName": "refs/heads/main", + "mergeStatus": "succeeded", + "isDraft": false, + "mergeId": "e8fdd34f-4785-43e1-8513-560c415dba45", + "lastMergeSourceCommit": { + "commitId": "6258a2efa589c277ed97e3246b6a59faf0e29859", + "url": "https://dev.azure.com/dependabot/cea8cb01-dd13-4588-b27a-55fa170e4e94/_apis/git/repositories/d5bb1147-bd9f-4ae1-8554-aec3d164f94c/commits/6258a2efa589c277ed97e3246b6a59faf0e29859" + }, + "lastMergeTargetCommit": { + "commitId": "c02450ec2c61650a1251c8a8281a0637829b87da", + "url": "https://dev.azure.com/dependabot/cea8cb01-dd13-4588-b27a-55fa170e4e94/_apis/git/repositories/d5bb1147-bd9f-4ae1-8554-aec3d164f94c/commits/c02450ec2c61650a1251c8a8281a0637829b87da" + }, + "lastMergeCommit": { + "commitId": "74f9ceb316ef52762eea289d63fbac6c8f2a224d", + "author": { + "name": "dependabot", + "email": "dependabot@tingle.software", + "date": "2023-01-18T04:03:20Z" + }, + "committer": { + "name": "dependabot", + "email": "dependabot@tingle.software", + "date": "2023-01-18T04:03:20Z" + }, + "comment": "Merge pull request 22568 from dependabot/nuget/Tingle.EventBus.Transports.InMemory-0.18.0 into main", + "url": "https://dev.azure.com/dependabot/cea8cb01-dd13-4588-b27a-55fa170e4e94/_apis/git/repositories/d5bb1147-bd9f-4ae1-8554-aec3d164f94c/commits/74f9ceb316ef52762eea289d63fbac6c8f2a224d" + }, + "reviewers": [ + { + "reviewerUrl": "https://dev.azure.com/dependabot/cea8cb01-dd13-4588-b27a-55fa170e4e94/_apis/git/repositories/d5bb1147-bd9f-4ae1-8554-aec3d164f94c/pullRequests/22568/reviewers/ddc45be0-f1fe-6042-87ac-e6dbbdbc3b45", + "vote": 10, + "hasDeclined": false, + "isRequired": true, + "isFlagged": false, + "displayName": "Maxwell Weru", + "url": "https://spsprodweu2.vssps.visualstudio.com/A1f76e76f-721c-4b92-8ff3-bd07abce2671/_apis/Identities/ddc45be0-f1fe-6042-87ac-e6dbbdbc3b45", + "_links": { + "avatar": { + "href": "https://dev.azure.com/dependabot/_apis/GraphProfile/MemberAvatars/aad.ZGRjNDViZTAtZjFmZS03MDQyLTg3YWMtZTZkYmJkYmMzYjQ1" + } + }, + "id": "ddc45be0-f1fe-6042-87ac-e6dbbdbc3b45", + "uniqueName": "mburumaxwell@tingle.software", + "imageUrl": "https://dev.azure.com/dependabot/_api/_common/identityImage?id=ddc45be0-f1fe-6042-87ac-e6dbbdbc3b45" + } + ], + "url": "https://dev.azure.com/dependabot/cea8cb01-dd13-4588-b27a-55fa170e4e94/_apis/git/repositories/d5bb1147-bd9f-4ae1-8554-aec3d164f94c/pullRequests/22568", + "_links": { + "web": { + "href": "https://dev.azure.com/dependabot/Core/_git/dependabot-sample/pullrequest/22568" + }, + "statuses": { + "href": "https://dev.azure.com/dependabot/cea8cb01-dd13-4588-b27a-55fa170e4e94/_apis/git/repositories/d5bb1147-bd9f-4ae1-8554-aec3d164f94c/pullRequests/22568/statuses" + } + }, + "completionOptions": { + "deleteSourceBranch": true, + "squashMerge": true, + "mergeStrategy": "squash", + "triggeredByAutoComplete": true, + "autoCompleteIgnoreConfigIds": [] + }, + "supportsIterations": true, + "completionQueueTime": "2023-01-18T04:03:20.0206329Z", + "closedBy": { + "displayName": "dependabot", + "url": "https://spsprodweu2.vssps.visualstudio.com/A1f76e76f-721c-4b92-8ff3-bd07abce2671/_apis/Identities/961314fa-c312-68ab-8dce-cbb71e30c268", + "_links": { + "avatar": { + "href": "https://dev.azure.com/dependabot/_apis/GraphProfile/MemberAvatars/aad.OTYxMzE0ZmEtYzMxMi03OGFiLThkY2UtY2JiNzFlMzBjMjY4" + } + }, + "id": "961314fa-c312-68ab-8dce-cbb71e30c268", + "uniqueName": "dependabot@tingle.software", + "imageUrl": "https://dev.azure.com/dependabot/_api/_common/identityImage?id=961314fa-c312-68ab-8dce-cbb71e30c268", + "descriptor": "aad.OTYxMzE0ZmEtYzMxMi03OGFiLThkY2UtY2JiNzFlMzBjMjY4" + }, + "artifactId": "vstfs:///Git/PullRequestId/cea8cb01-dd13-4588-b27a-55fa170e4e94%2fd5bb1147-bd9f-4ae1-8554-aec3d164f94c%2f22568" + }, + "resourceVersion": "1.0", + "resourceContainers": { + "collection": { + "id": "3fba1bb9-6e8c-4087-b435-29157f94e9a1", + "baseUrl": "https://dev.azure.com/dependabot/" + }, + "account": { + "id": "1f76e76f-721c-4b92-8ff3-bd07abce2671", + "baseUrl": "https://dev.azure.com/dependabot/" + }, + "project": { + "id": "cea8cb01-dd13-4588-b27a-55fa170e4e94", + "baseUrl": "https://dev.azure.com/dependabot/" + } + }, + "createdDate": "2023-01-18T04:03:28.114Z" +} \ No newline at end of file diff --git a/server/Tingle.Dependabot.Tests/Samples/git.pullrequest.updated-1.json b/server/Tingle.Dependabot.Tests/Samples/git.pullrequest.updated-1.json new file mode 100644 index 00000000..117ec135 --- /dev/null +++ b/server/Tingle.Dependabot.Tests/Samples/git.pullrequest.updated-1.json @@ -0,0 +1,94 @@ +{ + "subscriptionId": "435e539d-3ce2-4283-8da9-8f3c0fe2e45e", + "notificationId": 3, + "id": "af07be1b-f3ad-44c8-a7f1-c4835f2df06b", + "eventType": "git.pullrequest.updated", + "publisherId": "tfs", + "scope": "all", + "message": { + "text": "Jamal Hartnett marked the pull request as completed", + "html": "Jamal Hartnett marked the pull request as completed", + "markdown": "Jamal Hartnett marked the pull request as completed" + }, + "detailedMessage": { + "text": "Jamal Hartnett marked the pull request as completed\r\n\r\n- Merge status: Succeeded\r\n- Merge commit: eef717(https://dev.azure.com/fabrikam/DefaultCollection/_apis/repos/git/repositories/4bc14d40-c903-45e2-872e-0462c7748079/commits/eef717f69257a6333f221566c1c987dc94cc0d72)\r\n", + "html": "Jamal Hartnett marked the pull request as completed\r\n
    \r\n
  • Merge status: Succeeded
  • \r\n
  • Merge commit: eef717
  • \r\n
", + "markdown": "Jamal Hartnett marked the pull request as completed\r\n\r\n+ Merge status: Succeeded\r\n+ Merge commit: [eef717](https://dev.azure.com/fabrikam/DefaultCollection/_apis/repos/git/repositories/4bc14d40-c903-45e2-872e-0462c7748079/commits/eef717f69257a6333f221566c1c987dc94cc0d72)\r\n" + }, + "resource": { + "repository": { + "id": "4bc14d40-c903-45e2-872e-0462c7748079", + "name": "Fabrikam", + "url": "https://dev.azure.com/fabrikam/DefaultCollection/_apis/repos/git/repositories/4bc14d40-c903-45e2-872e-0462c7748079", + "project": { + "id": "6ce954b1-ce1f-45d1-b94d-e6bf2464ba2c", + "name": "Fabrikam", + "url": "https://dev.azure.com/fabrikam/DefaultCollection/_apis/projects/6ce954b1-ce1f-45d1-b94d-e6bf2464ba2c", + "state": "wellFormed" + }, + "defaultBranch": "refs/heads/master", + "remoteUrl": "https://dev.azure.com/fabrikam/DefaultCollection/_git/Fabrikam" + }, + "pullRequestId": 1, + "status": "completed", + "createdBy": { + "id": "54d125f7-69f7-4191-904f-c5b96b6261c8", + "displayName": "Jamal Hartnett", + "uniqueName": "fabrikamfiber4@hotmail.com", + "url": "https://vssps.dev.azure.com/fabrikam/_apis/Identities/54d125f7-69f7-4191-904f-c5b96b6261c8", + "imageUrl": "https://dev.azure.com/fabrikam/DefaultCollection/_api/_common/identityImage?id=54d125f7-69f7-4191-904f-c5b96b6261c8" + }, + "creationDate": "2014-06-17T16:55:46.589889Z", + "closedDate": "2014-06-30T18:59:12.3660573Z", + "title": "my first pull request", + "description": " - test2\r\n", + "sourceRefName": "refs/heads/mytopic", + "targetRefName": "refs/heads/master", + "mergeStatus": "succeeded", + "mergeId": "a10bb228-6ba6-4362-abd7-49ea21333dbd", + "lastMergeSourceCommit": { + "commitId": "53d54ac915144006c2c9e90d2c7d3880920db49c", + "url": "https://dev.azure.com/fabrikam/DefaultCollection/_apis/repos/git/repositories/4bc14d40-c903-45e2-872e-0462c7748079/commits/53d54ac915144006c2c9e90d2c7d3880920db49c" + }, + "lastMergeTargetCommit": { + "commitId": "a511f535b1ea495ee0c903badb68fbc83772c882", + "url": "https://dev.azure.com/fabrikam/DefaultCollection/_apis/repos/git/repositories/4bc14d40-c903-45e2-872e-0462c7748079/commits/a511f535b1ea495ee0c903badb68fbc83772c882" + }, + "lastMergeCommit": { + "commitId": "eef717f69257a6333f221566c1c987dc94cc0d72", + "url": "https://dev.azure.com/fabrikam/DefaultCollection/_apis/repos/git/repositories/4bc14d40-c903-45e2-872e-0462c7748079/commits/eef717f69257a6333f221566c1c987dc94cc0d72" + }, + "reviewers": [ + { + "reviewerUrl": null, + "vote": 0, + "id": "2ea2d095-48f9-4cd6-9966-62f6f574096c", + "displayName": "[Mobile]\\Mobile Team", + "uniqueName": "vstfs:///Classification/TeamProject/f0811a3b-8c8a-4e43-a3bf-9a049b4835bd\\Mobile Team", + "url": "https://vssps.dev.azure.com/fabrikam/_apis/Identities/2ea2d095-48f9-4cd6-9966-62f6f574096c", + "imageUrl": "https://dev.azure.com/fabrikam/DefaultCollection/_api/_common/identityImage?id=2ea2d095-48f9-4cd6-9966-62f6f574096c", + "isContainer": true + } + ], + "commits": [ + { + "commitId": "53d54ac915144006c2c9e90d2c7d3880920db49c", + "url": "https://dev.azure.com/fabrikam/DefaultCollection/_apis/repos/git/repositories/4bc14d40-c903-45e2-872e-0462c7748079/commits/53d54ac915144006c2c9e90d2c7d3880920db49c" + } + ], + "url": "https://dev.azure.com/fabrikam/DefaultCollection/_apis/repos/git/repositories/4bc14d40-c903-45e2-872e-0462c7748079/pullRequests/1" + }, + "resourceVersion": "1.0", + "resourceContainers": { + "collection": { + "id": "c12d0eb8-e382-443b-9f9c-c52cba5014c2" + }, + "account": { + "id": "f844ec47-a9db-4511-8281-8b63f4eaf94e" + }, + "project": { + "id": "be9b3917-87e6-42a4-a549-2bc06a7a878f" + } + }, + "createdDate": "2016-09-19T13:03:27.2813828Z" +} \ No newline at end of file diff --git a/server/Tingle.Dependabot.Tests/Samples/git.pullrequest.updated-2.json b/server/Tingle.Dependabot.Tests/Samples/git.pullrequest.updated-2.json new file mode 100644 index 00000000..0c65de38 --- /dev/null +++ b/server/Tingle.Dependabot.Tests/Samples/git.pullrequest.updated-2.json @@ -0,0 +1,147 @@ +{ + "subscriptionId": "435e539d-3ce2-4283-8da9-8f3c0fe2e45e", + "notificationId": 3, + "id": "43236d01-b085-4739-80a6-153d305a902b", + "eventType": "git.pullrequest.updated", + "publisherId": "tfs", + "message": null, + "detailedMessage": null, + "resource": { + "repository": { + "id": "d5bb1147-bd9f-4ae1-8554-aec3d164f94c", + "name": "dependabot-sample", + "url": "https://dev.azure.com/dependabot/cea8cb01-dd13-4588-b27a-55fa170e4e94/_apis/git/repositories/d5bb1147-bd9f-4ae1-8554-aec3d164f94c", + "project": { + "id": "cea8cb01-dd13-4588-b27a-55fa170e4e94", + "name": "Core", + "description": "All engineering happens here", + "url": "https://dev.azure.com/dependabot/_apis/projects/cea8cb01-dd13-4588-b27a-55fa170e4e94", + "state": "wellFormed", + "revision": 770, + "visibility": "private", + "lastUpdateTime": "2020-07-15T14:09:49.98Z" + }, + "size": 3568380, + "remoteUrl": "https://dependabot@dev.azure.com/dependabot/Core/_git/dependabot-sample", + "sshUrl": "git@ssh.dev.azure.com:v3/dependabot/Core/dependabot-sample", + "webUrl": "https://dev.azure.com/dependabot/Core/_git/dependabot-sample", + "isDisabled": false, + "isInMaintenance": false + }, + "pullRequestId": 22568, + "codeReviewId": 23493, + "status": "completed", + "createdBy": { + "displayName": "dependabot", + "url": "https://spsprodweu2.vssps.visualstudio.com/A1f76e76f-721c-4b92-8ff3-bd07abce2671/_apis/Identities/961314fa-c312-68ab-8dce-cbb71e30c268", + "_links": { + "avatar": { + "href": "https://dev.azure.com/dependabot/_apis/GraphProfile/MemberAvatars/aad.OTYxMzE0ZmEtYzMxMi03OGFiLThkY2UtY2JiNzFlMzBjMjY4" + } + }, + "id": "961314fa-c312-68ab-8dce-cbb71e30c268", + "uniqueName": "dependabot@tingle.software", + "imageUrl": "https://dev.azure.com/dependabot/_api/_common/identityImage?id=961314fa-c312-68ab-8dce-cbb71e30c268", + "descriptor": "aad.OTYxMzE0ZmEtYzMxMi03OGFiLThkY2UtY2JiNzFlMzBjMjY4" + }, + "creationDate": "2023-01-18T02:05:34.3260021Z", + "closedDate": "2023-01-18T04:03:21.203845Z", + "title": "Bump Tingle.EventBus.Transports.InMemory from 0.17.2-ci0016 to 0.18.0", + "description": "Bumps [Tingle.EventBus.Transports.InMemory](https://github.com/tinglesoftware/eventbus) from 0.17.2-ci0016 to 0.18.0.\n

#Release notes

\n

Sourced from Tingle.EventBus.Transports.InMemory's releases.

\n
\n

0.18.0

\n

Happy New Year and a shiny new version with lots of new stuff since last year.

\n

NEW

\n
    \n
  • #487: Support the use of IConfiguration to configure transports, events, and consumers. This makes it easier to manage different values for different environments or transports.
  • \n
  • #488: Wait for transport to be started can now be configured per transport.
  • \n
  • #491: Support for consuming dead-lettered events via IDeadLetteredEventConsumer<TEvent> and DeadLetteredEventContext<TEvent>.
  • \n
  • #489: Duplicate detection settings can now be configured per event with defaults on the transport and the entire bus.
  • \n
\n

Fixes

\n
    \n
  • #486: Include transport name in logs to disambiguate them.
  • \n
  • #490: Prevent transport options configuration from being called multiple times.
  • \n
  • #485: Expose extensions on EventData for easier working with Azure IoT Hub.
  • \n
\n

Full Changelog: https://github.com/tinglesoftware/eventbus/compare/0.17.1...0.18.0

\n
\n

#Commits

\n\n", + "sourceRefName": "refs/heads/dependabot/nuget/Tingle.EventBus.Transports.InMemory-0.18.0", + "targetRefName": "refs/heads/main", + "mergeStatus": "succeeded", + "isDraft": false, + "mergeId": "e8fdd34f-4785-43e1-8513-560c415dba45", + "lastMergeSourceCommit": { + "commitId": "6258a2efa589c277ed97e3246b6a59faf0e29859", + "url": "https://dev.azure.com/dependabot/cea8cb01-dd13-4588-b27a-55fa170e4e94/_apis/git/repositories/d5bb1147-bd9f-4ae1-8554-aec3d164f94c/commits/6258a2efa589c277ed97e3246b6a59faf0e29859" + }, + "lastMergeTargetCommit": { + "commitId": "c02450ec2c61650a1251c8a8281a0637829b87da", + "url": "https://dev.azure.com/dependabot/cea8cb01-dd13-4588-b27a-55fa170e4e94/_apis/git/repositories/d5bb1147-bd9f-4ae1-8554-aec3d164f94c/commits/c02450ec2c61650a1251c8a8281a0637829b87da" + }, + "lastMergeCommit": { + "commitId": "74f9ceb316ef52762eea289d63fbac6c8f2a224d", + "author": { + "name": "dependabot", + "email": "dependabot@tingle.software", + "date": "2023-01-18T04:03:20Z" + }, + "committer": { + "name": "dependabot", + "email": "dependabot@tingle.software", + "date": "2023-01-18T04:03:20Z" + }, + "comment": "Merge pull request 22568 from dependabot/nuget/Tingle.EventBus.Transports.InMemory-0.18.0 into main", + "url": "https://dev.azure.com/dependabot/cea8cb01-dd13-4588-b27a-55fa170e4e94/_apis/git/repositories/d5bb1147-bd9f-4ae1-8554-aec3d164f94c/commits/74f9ceb316ef52762eea289d63fbac6c8f2a224d" + }, + "reviewers": [ + { + "reviewerUrl": "https://dev.azure.com/dependabot/cea8cb01-dd13-4588-b27a-55fa170e4e94/_apis/git/repositories/d5bb1147-bd9f-4ae1-8554-aec3d164f94c/pullRequests/22568/reviewers/ddc45be0-f1fe-6042-87ac-e6dbbdbc3b45", + "vote": 10, + "hasDeclined": false, + "isRequired": true, + "isFlagged": false, + "displayName": "Maxwell Weru", + "url": "https://spsprodweu2.vssps.visualstudio.com/A1f76e76f-721c-4b92-8ff3-bd07abce2671/_apis/Identities/ddc45be0-f1fe-6042-87ac-e6dbbdbc3b45", + "_links": { + "avatar": { + "href": "https://dev.azure.com/dependabot/_apis/GraphProfile/MemberAvatars/aad.ZGRjNDViZTAtZjFmZS03MDQyLTg3YWMtZTZkYmJkYmMzYjQ1" + } + }, + "id": "ddc45be0-f1fe-6042-87ac-e6dbbdbc3b45", + "uniqueName": "mburumaxwell@tingle.software", + "imageUrl": "https://dev.azure.com/dependabot/_api/_common/identityImage?id=ddc45be0-f1fe-6042-87ac-e6dbbdbc3b45" + } + ], + "url": "https://dev.azure.com/dependabot/cea8cb01-dd13-4588-b27a-55fa170e4e94/_apis/git/repositories/d5bb1147-bd9f-4ae1-8554-aec3d164f94c/pullRequests/22568", + "_links": { + "web": { + "href": "https://dev.azure.com/dependabot/Core/_git/dependabot-sample/pullrequest/22568" + }, + "statuses": { + "href": "https://dev.azure.com/dependabot/cea8cb01-dd13-4588-b27a-55fa170e4e94/_apis/git/repositories/d5bb1147-bd9f-4ae1-8554-aec3d164f94c/pullRequests/22568/statuses" + } + }, + "completionOptions": { + "deleteSourceBranch": true, + "squashMerge": true, + "mergeStrategy": "squash", + "triggeredByAutoComplete": true, + "autoCompleteIgnoreConfigIds": [] + }, + "supportsIterations": true, + "completionQueueTime": "2023-01-18T04:03:20.0206329Z", + "closedBy": { + "displayName": "dependabot", + "url": "https://spsprodweu2.vssps.visualstudio.com/A1f76e76f-721c-4b92-8ff3-bd07abce2671/_apis/Identities/961314fa-c312-68ab-8dce-cbb71e30c268", + "_links": { + "avatar": { + "href": "https://dev.azure.com/dependabot/_apis/GraphProfile/MemberAvatars/aad.OTYxMzE0ZmEtYzMxMi03OGFiLThkY2UtY2JiNzFlMzBjMjY4" + } + }, + "id": "961314fa-c312-68ab-8dce-cbb71e30c268", + "uniqueName": "dependabot@tingle.software", + "imageUrl": "https://dev.azure.com/dependabot/_api/_common/identityImage?id=961314fa-c312-68ab-8dce-cbb71e30c268", + "descriptor": "aad.OTYxMzE0ZmEtYzMxMi03OGFiLThkY2UtY2JiNzFlMzBjMjY4" + }, + "artifactId": "vstfs:///Git/PullRequestId/cea8cb01-dd13-4588-b27a-55fa170e4e94%2fd5bb1147-bd9f-4ae1-8554-aec3d164f94c%2f22568" + }, + "resourceVersion": "1.0", + "resourceContainers": { + "collection": { + "id": "3fba1bb9-6e8c-4087-b435-29157f94e9a1", + "baseUrl": "https://dev.azure.com/dependabot/" + }, + "account": { + "id": "1f76e76f-721c-4b92-8ff3-bd07abce2671", + "baseUrl": "https://dev.azure.com/dependabot/" + }, + "project": { + "id": "cea8cb01-dd13-4588-b27a-55fa170e4e94", + "baseUrl": "https://dev.azure.com/dependabot/" + } + }, + "createdDate": "2023-01-18T04:03:28.114Z" +} \ No newline at end of file diff --git a/server/Tingle.Dependabot.Tests/Samples/git.push-1.json b/server/Tingle.Dependabot.Tests/Samples/git.push-1.json new file mode 100644 index 00000000..b0d362d5 --- /dev/null +++ b/server/Tingle.Dependabot.Tests/Samples/git.push-1.json @@ -0,0 +1,78 @@ +{ + "subscriptionId": "435e539d-3ce2-4283-8da9-8f3c0fe2e45e", + "notificationId": 3, + "id": "03c164c2-8912-4d5e-8009-3707d5f83734", + "eventType": "git.push", + "publisherId": "tfs", + "scope": "all", + "message": { + "text": "Jamal Hartnett pushed updates to branch master of repository Fabrikam-Fiber-Git.", + "html": "Jamal Hartnett pushed updates to branch master of repository Fabrikam-Fiber-Git.", + "markdown": "Jamal Hartnett pushed updates to branch `master` of repository `Fabrikam-Fiber-Git`." + }, + "detailedMessage": { + "text": "Jamal Hartnett pushed 1 commit to branch master of repository Fabrikam-Fiber-Git.\n - Fixed bug in web.config file 33b55f7c", + "html": "Jamal Hartnett pushed 1 commit to branch master of repository Fabrikam-Fiber-Git.\n
    \n
  • Fixed bug in web.config file 33b55f7c\n
", + "markdown": "Jamal Hartnett pushed 1 commit to branch [master](https://dev.azure.com/fabrikam-fiber-inc/DefaultCollection/_git/Fabrikam-Fiber-Git/#version=GBmaster) of repository [Fabrikam-Fiber-Git](https://dev.azure.com/fabrikam-fiber-inc/DefaultCollection/_git/Fabrikam-Fiber-Git/).\n* Fixed bug in web.config file [33b55f7c](https://dev.azure.com/fabrikam-fiber-inc/DefaultCollection/_git/Fabrikam-Fiber-Git/commit/33b55f7cb7e7e245323987634f960cf4a6e6bc74)" + }, + "resource": { + "commits": [ + { + "commitId": "33b55f7cb7e7e245323987634f960cf4a6e6bc74", + "author": { + "name": "Jamal Hartnett", + "email": "fabrikamfiber4@hotmail.com", + "date": "2015-02-25T19:01:00Z" + }, + "committer": { + "name": "Jamal Hartnett", + "email": "fabrikamfiber4@hotmail.com", + "date": "2015-02-25T19:01:00Z" + }, + "comment": "Fixed bug in web.config file", + "url": "https://dev.azure.com/fabrikam-fiber-inc/DefaultCollection/_git/Fabrikam-Fiber-Git/commit/33b55f7cb7e7e245323987634f960cf4a6e6bc74" + } + ], + "refUpdates": [ + { + "name": "refs/heads/master", + "oldObjectId": "aad331d8d3b131fa9ae03cf5e53965b51942618a", + "newObjectId": "33b55f7cb7e7e245323987634f960cf4a6e6bc74" + } + ], + "repository": { + "id": "278d5cd2-584d-4b63-824a-2ba458937249", + "name": "Fabrikam-Fiber-Git", + "url": "https://dev.azure.com/fabrikam-fiber-inc/DefaultCollection/_apis/repos/git/repositories/278d5cd2-584d-4b63-824a-2ba458937249", + "project": { + "id": "6ce954b1-ce1f-45d1-b94d-e6bf2464ba2c", + "name": "Fabrikam-Fiber-Git", + "url": "https://dev.azure.com/fabrikam-fiber-inc/DefaultCollection/_apis/projects/6ce954b1-ce1f-45d1-b94d-e6bf2464ba2c", + "state": "wellFormed" + }, + "defaultBranch": "refs/heads/master", + "remoteUrl": "https://dev.azure.com/fabrikam-fiber-inc/DefaultCollection/_git/Fabrikam-Fiber-Git" + }, + "pushedBy": { + "id": "00067FFED5C7AF52@Live.com", + "displayName": "Jamal Hartnett", + "uniqueName": "Windows Live ID\\fabrikamfiber4@hotmail.com" + }, + "pushId": 14, + "date": "2014-05-02T19:17:13.3309587Z", + "url": "https://dev.azure.com/fabrikam-fiber-inc/DefaultCollection/_apis/repos/git/repositories/278d5cd2-584d-4b63-824a-2ba458937249/pushes/14" + }, + "resourceVersion": "1.0", + "resourceContainers": { + "collection": { + "id": "c12d0eb8-e382-443b-9f9c-c52cba5014c2" + }, + "account": { + "id": "f844ec47-a9db-4511-8281-8b63f4eaf94e" + }, + "project": { + "id": "be9b3917-87e6-42a4-a549-2bc06a7a878f" + } + }, + "createdDate": "2016-09-19T13:03:27.0379153Z" +} \ No newline at end of file diff --git a/server/Tingle.Dependabot.Tests/Samples/git.push-2.json b/server/Tingle.Dependabot.Tests/Samples/git.push-2.json new file mode 100644 index 00000000..f847f2f5 --- /dev/null +++ b/server/Tingle.Dependabot.Tests/Samples/git.push-2.json @@ -0,0 +1,99 @@ +{ + "subscriptionId": "435e539d-3ce2-4283-8da9-8f3c0fe2e45e", + "notificationId": 3, + "id": "799c2a17-84d0-4bf8-ac8c-53493eebec3a", + "eventType": "git.push", + "publisherId": "tfs", + "message": null, + "detailedMessage": null, + "resource": { + "commits": [ + { + "commitId": "aaf58235f187b26d98a1ad66b6d094501f311afe", + "author": { + "name": "dependabot[bot]", + "email": "noreply@github.com", + "date": "2020-12-23T02:04:46Z" + }, + "committer": { + "name": "Maxwell Weru", + "email": "example@contoso.com", + "date": "2020-12-23T02:04:46Z" + }, + "comment": "build(deps-dev): bump webpack-merge from 4.2.2 to 5.7.3\n\nBumps [webpack-merge](https://github.com/survivejs/webpack-merge) from 4.2.2 to 5.7.3.\n- [Release notes](https://github.com/survivejs/webpack-merge/releases)\n- [Changelog](https://github.com/survivejs/webpack-merge/blob/develop/CHANGELOG.md)\n- [Commits](https://github.com/survivejs/webpack-merge/compare/v4.2.2...v5.7.3)", + "url": "https://dev.azure.com/dependabot/_apis/git/repositories/e502622f-ac2b-4635-bec7-5e43d8865f71/commits/aaf58235f187b26d98a1ad66b6d094501f311afe" + } + ], + "refUpdates": [ + { + "name": "refs/heads/dependabot/npm_and_yarn/webpack-merge-5.7.3", + "oldObjectId": "0000000000000000000000000000000000000000", + "newObjectId": "aaf58235f187b26d98a1ad66b6d094501f311afe" + } + ], + "repository": { + "id": "e502622f-ac2b-4635-bec7-5e43d8865f71", + "name": "tingle", + "url": "https://dev.azure.com/dependabot/_apis/git/repositories/e502622f-ac2b-4635-bec7-5e43d8865f71", + "project": { + "id": "cea8cb01-dd13-4588-b27a-55fa170e4e94", + "name": "Core", + "url": "https://dev.azure.com/dependabot/_apis/projects/cea8cb01-dd13-4588-b27a-55fa170e4e94", + "state": "wellFormed", + "visibility": "unchanged", + "lastUpdateTime": "0001-01-01T00:00:00" + }, + "defaultBranch": "refs/heads/main", + "remoteUrl": "https://dev.azure.com/dependabot/Core/_git/dependabot" + }, + "pushedBy": { + "displayName": "Maxwell Weru", + "url": "https://spsprodweu2.vssps.visualstudio.com/A1f76e76f-721c-4b92-8ff3-bd07abce2671/_apis/Identities/ddc45be0-f1fe-6042-87ac-e6dbbdbc3b45", + "_links": { + "avatar": { + "href": "https://dev.azure.com/dependabot/_apis/GraphProfile/MemberAvatars/aad.ZGRjNDViZTAtZjFmZS03MDQyLTg3YWMtZTZkYmJkYmMzYjQ1" + } + }, + "id": "ddc45be0-f1fe-6042-87ac-e6dbbdbc3b45", + "uniqueName": "example@contoso.com", + "imageUrl": "https://dev.azure.com/dependabot/_api/_common/identityImage?id=ddc45be0-f1fe-6042-87ac-e6dbbdbc3b45", + "descriptor": "aad.ZGRjNDViZTAtZjFmZS03MDQyLTg3YWMtZTZkYmJkYmMzYjQ1" + }, + "pushId": 34770, + "date": "2020-12-23T02:04:47.3801166Z", + "url": "https://dev.azure.com/dependabot/_apis/git/repositories/e502622f-ac2b-4635-bec7-5e43d8865f71/pushes/34770", + "_links": { + "self": { + "href": "https://dev.azure.com/dependabot/_apis/git/repositories/e502622f-ac2b-4635-bec7-5e43d8865f71/pushes/34770" + }, + "repository": { + "href": "https://dev.azure.com/dependabot/cea8cb01-dd13-4588-b27a-55fa170e4e94/_apis/git/repositories/e502622f-ac2b-4635-bec7-5e43d8865f71" + }, + "commits": { + "href": "https://dev.azure.com/dependabot/_apis/git/repositories/e502622f-ac2b-4635-bec7-5e43d8865f71/pushes/34770/commits" + }, + "pusher": { + "href": "https://spsprodweu2.vssps.visualstudio.com/A1f76e76f-721c-4b92-8ff3-bd07abce2671/_apis/Identities/ddc45be0-f1fe-6042-87ac-e6dbbdbc3b45" + }, + "refs": { + "href": "https://dev.azure.com/dependabot/cea8cb01-dd13-4588-b27a-55fa170e4e94/_apis/git/repositories/e502622f-ac2b-4635-bec7-5e43d8865f71/refs/heads/dependabot/npm_and_yarn/webpack-merge-5.7.3" + } + } + }, + "resourceVersion": "1.0", + "resourceContainers": { + "collection": { + "id": "3fba1bb9-6e8c-4087-b435-29157f94e9a1", + "baseUrl": "https://dev.azure.com/dependabot/" + }, + "account": { + "id": "1f76e76f-721c-4b92-8ff3-bd07abce2671", + "baseUrl": "https://dev.azure.com/dependabot/" + }, + "project": { + "id": "cea8cb01-dd13-4588-b27a-55fa170e4e94", + "baseUrl": "https://dev.azure.com/dependabot/" + } + }, + "createdDate": "2020-12-23T02:04:55.1406608Z" +} \ No newline at end of file diff --git a/server/Tingle.Dependabot.Tests/Samples/git.push-3.json b/server/Tingle.Dependabot.Tests/Samples/git.push-3.json new file mode 100644 index 00000000..ea2804a4 --- /dev/null +++ b/server/Tingle.Dependabot.Tests/Samples/git.push-3.json @@ -0,0 +1,114 @@ +{ + "subscriptionId": "435e539d-3ce2-4283-8da9-8f3c0fe2e45e", + "notificationId": 3, + "id": "56e81d32-b0e8-44e2-a92a-55eb7b6ccdce", + "eventType": "git.push", + "publisherId": "tfs", + "message": null, + "detailedMessage": null, + "resource": { + "commits": [ + { + "commitId": "b8a410b1b75ecb203fb5dda54adce2f9d2c87a27", + "author": { + "name": "Maxwell Weru", + "email": "mburumaxwell@gmail.com", + "date": "2020-12-23T05:46:01Z" + }, + "committer": { + "name": "Maxwell Weru", + "email": "mburumaxwell@gmail.com", + "date": "2020-12-23T05:46:01Z" + }, + "comment": "Added models for events from AzureDevOps on git.push", + "url": "https://dev.azure.com/dependabot/_apis/git/repositories/d5bb1147-bd9f-4ae1-8554-aec3d164f94c/commits/b8a410b1b75ecb203fb5dda54adce2f9d2c87a27" + }, + { + "commitId": "cdebf445da23c0c2d501cb46d4c496e37b40a6fe", + "author": { + "name": "Maxwell Weru", + "email": "mburumaxwell@gmail.com", + "date": "2020-12-23T05:39:43Z" + }, + "committer": { + "name": "Maxwell Weru", + "email": "mburumaxwell@gmail.com", + "date": "2020-12-23T05:39:43Z" + }, + "comment": "Update IEventBusPublisher to IEventPublisher", + "url": "https://dev.azure.com/dependabot/_apis/git/repositories/d5bb1147-bd9f-4ae1-8554-aec3d164f94c/commits/cdebf445da23c0c2d501cb46d4c496e37b40a6fe" + } + ], + "refUpdates": [ + { + "name": "refs/heads/main", + "oldObjectId": "dd6ba920a4b4243162033737c0a1abbd937f4c40", + "newObjectId": "b8a410b1b75ecb203fb5dda54adce2f9d2c87a27" + } + ], + "repository": { + "id": "d5bb1147-bd9f-4ae1-8554-aec3d164f94c", + "name": "dependabot-sample", + "url": "https://dev.azure.com/dependabot/_apis/git/repositories/d5bb1147-bd9f-4ae1-8554-aec3d164f94c", + "project": { + "id": "cea8cb01-dd13-4588-b27a-55fa170e4e94", + "name": "Core", + "url": "https://dev.azure.com/dependabot/_apis/projects/cea8cb01-dd13-4588-b27a-55fa170e4e94", + "state": "wellFormed", + "visibility": "unchanged", + "lastUpdateTime": "0001-01-01T00:00:00" + }, + "defaultBranch": "refs/heads/main", + "remoteUrl": "https://dev.azure.com/dependabot/Core/_git/dependabot-sample" + }, + "pushedBy": { + "displayName": "Maxwell Weru", + "url": "https://spsprodweu2.vssps.visualstudio.com/A1f76e76f-721c-4b92-8ff3-bd07abce2671/_apis/Identities/ddc45be0-f1fe-6042-87ac-e6dbbdbc3b45", + "_links": { + "avatar": { + "href": "https://dev.azure.com/dependabot/_apis/GraphProfile/MemberAvatars/aad.ZGRjNDViZTAtZjFmZS03MDQyLTg3YWMtZTZkYmJkYmMzYjQ1" + } + }, + "id": "ddc45be0-f1fe-6042-87ac-e6dbbdbc3b45", + "uniqueName": "example@contoso.com", + "imageUrl": "https://dev.azure.com/dependabot/_api/_common/identityImage?id=ddc45be0-f1fe-6042-87ac-e6dbbdbc3b45", + "descriptor": "aad.ZGRjNDViZTAtZjFmZS03MDQyLTg3YWMtZTZkYmJkYmMzYjQ1" + }, + "pushId": 34772, + "date": "2020-12-23T05:47:12.0015512Z", + "url": "https://dev.azure.com/dependabot/_apis/git/repositories/d5bb1147-bd9f-4ae1-8554-aec3d164f94c/pushes/34772", + "_links": { + "self": { + "href": "https://dev.azure.com/dependabot/_apis/git/repositories/d5bb1147-bd9f-4ae1-8554-aec3d164f94c/pushes/34772" + }, + "repository": { + "href": "https://dev.azure.com/dependabot/cea8cb01-dd13-4588-b27a-55fa170e4e94/_apis/git/repositories/d5bb1147-bd9f-4ae1-8554-aec3d164f94c" + }, + "commits": { + "href": "https://dev.azure.com/dependabot/_apis/git/repositories/d5bb1147-bd9f-4ae1-8554-aec3d164f94c/pushes/34772/commits" + }, + "pusher": { + "href": "https://spsprodweu2.vssps.visualstudio.com/A1f76e76f-721c-4b92-8ff3-bd07abce2671/_apis/Identities/ddc45be0-f1fe-6042-87ac-e6dbbdbc3b45" + }, + "refs": { + "href": "https://dev.azure.com/dependabot/cea8cb01-dd13-4588-b27a-55fa170e4e94/_apis/git/repositories/d5bb1147-bd9f-4ae1-8554-aec3d164f94c/refs/heads/main" + } + } + }, + "resourceVersion": "1.0", + "resourceContainers": { + "collection": { + "id": "3fba1bb9-6e8c-4087-b435-29157f94e9a1", + "baseUrl": "https://dev.azure.com/dependabot/" + }, + "account": { + "id": "1f76e76f-721c-4b92-8ff3-bd07abce2671", + "baseUrl": "https://dev.azure.com/dependabot/" + }, + "project": { + "id": "cea8cb01-dd13-4588-b27a-55fa170e4e94", + "baseUrl": "https://dev.azure.com/dependabot/" + } + }, + "createdDate": "2020-12-23T05:47:19.8108134Z" +} \ No newline at end of file diff --git a/server/Tingle.Dependabot.Tests/Samples/sample-registries.yml b/server/Tingle.Dependabot.Tests/Samples/sample-registries.yml new file mode 100644 index 00000000..b2ce4ca0 --- /dev/null +++ b/server/Tingle.Dependabot.Tests/Samples/sample-registries.yml @@ -0,0 +1,59 @@ +version: 2 +registries: + composer: + type: composer-repository + url: https://repo.packagist.com/example-company/ + username: octocat + password: 'pwd_1234567890' + dockerhub: + type: docker-registry + url: https://registry.hub.docker.com + username: octocat + password: 'pwd_1234567890' + replaces-base: true + github-octocat: + type: git + url: https://github.com + username: x-access-token + password: 'pwd_1234567890' + github-hex-org: + type: hex-organization + organization: github + key: 'key_1234567890' + github-hex-repository: + type: hex-repository + repo: private-repo + url: https://private-repo.example.com + auth-key: 'ak_1234567890' + public-key-fingerprint: 'pkf_1234567890' + maven-artifactory: + type: maven-repository + url: https://artifactory.example.com + username: octocat + password: 'pwd_1234567890' + replaces-base: true + npm-github: + type: npm-registry + url: https://npm.pkg.github.com + token: 'tkn_1234567890' + replaces-base: true + nuget-azure-devops: + type: nuget-feed + url: https://pkgs.dev.azure.com/contoso/_packaging/My_Feed/nuget/v3/index.json + username: octocat@example.com + password: 'pwd_1234567890' + python-azure: + type: python-index + url: https://pkgs.dev.azure.com/octocat/_packaging/my-feed/pypi/example + username: octocat@example.com + password: 'pwd_1234567890' + replaces-base: true + ruby-github: + type: rubygems-server + url: https://rubygems.pkg.github.com/octocat/github_api + token: 'tkn_1234567890' + replaces-base: false + terraform-example: + type: terraform-registry + url: https://terraform.example.com + token: 'tkn_1234567890' diff --git a/server/Tingle.Dependabot.Tests/TestSamples.cs b/server/Tingle.Dependabot.Tests/TestSamples.cs new file mode 100644 index 00000000..04d318cb --- /dev/null +++ b/server/Tingle.Dependabot.Tests/TestSamples.cs @@ -0,0 +1,24 @@ +using Tingle.Extensions.Processing; + +namespace Tingle.Dependabot.Tests; + +internal class TestSamples +{ + private const string FolderNameSamples = "Samples"; + + private static Stream GetAsStream(string fileName) + => EmbeddedResourceHelper.GetResourceAsStream(FolderNameSamples, fileName)!; + + public static Stream GetAzureDevOpsGitPush1() => GetAsStream("git.push-1.json"); + public static Stream GetAzureDevOpsGitPush2() => GetAsStream("git.push-2.json"); + public static Stream GetAzureDevOpsGitPush3() => GetAsStream("git.push-3.json"); + public static Stream GetAzureDevOpsPullRequestUpdated1() => GetAsStream("git.pullrequest.updated-1.json"); + public static Stream GetAzureDevOpsPullRequestUpdated2() => GetAsStream("git.pullrequest.updated-2.json"); + public static Stream GetAzureDevOpsPullRequestMerged1() => GetAsStream("git.pullrequest.merged-1.json"); + public static Stream GetAzureDevOpsPullRequestMerged2() => GetAsStream("git.pullrequest.merged-2.json"); + public static Stream GetAzureDevOpsPullRequestCommentEvent1() => GetAsStream("git-pullrequest-comment-event-1.json"); + public static Stream GetAzureDevOpsPullRequestCommentEvent2() => GetAsStream("git-pullrequest-comment-event-2.json"); + + public static Stream GetDependabot() => GetAsStream("dependabot.yml"); + public static Stream GetSampleRegistries() => GetAsStream("sample-registries.yml"); +} diff --git a/server/Tingle.Dependabot.Tests/Tingle.Dependabot.Tests.csproj b/server/Tingle.Dependabot.Tests/Tingle.Dependabot.Tests.csproj new file mode 100644 index 00000000..05426061 --- /dev/null +++ b/server/Tingle.Dependabot.Tests/Tingle.Dependabot.Tests.csproj @@ -0,0 +1,33 @@ + + + + 11.0 + enable + enable + net7.0 + true + false + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/server/Tingle.Dependabot.Tests/Workflow/AzureDevOpsProjectUrlTests.cs b/server/Tingle.Dependabot.Tests/Workflow/AzureDevOpsProjectUrlTests.cs new file mode 100644 index 00000000..c93bba37 --- /dev/null +++ b/server/Tingle.Dependabot.Tests/Workflow/AzureDevOpsProjectUrlTests.cs @@ -0,0 +1,76 @@ +using System.ComponentModel; +using Tingle.Dependabot.Workflow; +using Xunit; + +namespace Tingle.Dependabot.Tests.Workflow; + +public class AzureDevOpsProjectUrlTests +{ + [Theory] + [InlineData("https://dev.azure.com/dependabot/Core", "dev.azure.com", "dependabot", "https://dev.azure.com/dependabot/", "Core", false)] + [InlineData("https://dev.azure.com/dependabot/_apis/projects/Core", "dev.azure.com", "dependabot", "https://dev.azure.com/dependabot/", "Core", false)] + [InlineData("https://dev.azure.com/dependabot/_apis/projects/cea8cb01-dd13-4588-b27a-55fa170e4e94", "dev.azure.com", "dependabot", "https://dev.azure.com/dependabot/", "cea8cb01-dd13-4588-b27a-55fa170e4e94", true)] + [InlineData("https://dependabot.visualstudio.com/Core", "dependabot.visualstudio.com", "dependabot", "https://dependabot.visualstudio.com/", "Core", false)] + public void Creation_WithParsing_Works(string projectUrl, string hostname, string organizationName, string organizationUrl, string projectIdOrName, bool usesProjectId) + { + var url = (AzureDevOpsProjectUrl)projectUrl; + Assert.Equal(hostname, url.Hostname); + Assert.Equal(organizationName, url.OrganizationName); + Assert.Equal(organizationUrl, url.OrganizationUrl); + Assert.Equal(projectIdOrName, url.ProjectIdOrName); + if (usesProjectId) + { + Assert.NotNull(url.ProjectId); + Assert.Null(url.ProjectName); + } + else + { + Assert.Null(url.ProjectId); + Assert.NotNull(url.ProjectName); + } + } + + [Theory] + [InlineData("https://dev.azure.com/dependabot/Core/", "dependabot-sample", "dependabot/Core/_git/dependabot-sample")] + [InlineData("https://dependabot.visualstudio.com/Core", "dependabot-sample", "dependabot/Core/_git/dependabot-sample")] + public void MakeRepositorySlug_Works_For_Azure(string projectUrl, string repoName, string expected) + { + var url = (AzureDevOpsProjectUrl)projectUrl; + var actual = url.MakeRepositorySlug(repoName); + Assert.Equal(expected, actual); + } + + [Fact] + public void ConvertsToUriOrString() + { + var converter = TypeDescriptor.GetConverter(typeof(AzureDevOpsProjectUrl)); + Assert.NotNull(converter); + var url = new AzureDevOpsProjectUrl("https://dependabot.visualstudio.com/Core"); + + var actual = converter.ConvertTo(url, typeof(string)); + Assert.Equal(actual, "https://dependabot.visualstudio.com/Core"); + + actual = converter.ConvertTo(url, typeof(Uri)); + Assert.Equal(actual, new Uri("https://dependabot.visualstudio.com/Core")); + + actual = converter.ConvertToString(url); + Assert.Equal("https://dependabot.visualstudio.com/Core", actual); + } + + [Fact] + public void ConvertsFromUriOrString() + { + var expected = new AzureDevOpsProjectUrl("https://dependabot.visualstudio.com/Core"); + var converter = TypeDescriptor.GetConverter(typeof(AzureDevOpsProjectUrl)); + Assert.NotNull(converter); + + var actual = Assert.IsType(converter.ConvertFrom(null, null, new Uri("https://dependabot.visualstudio.com/Core"))); + Assert.Equal(expected, actual); + + actual = Assert.IsType(converter.ConvertFrom(null, null, "https://dependabot.visualstudio.com/Core")); + Assert.Equal(expected, actual); + + actual = Assert.IsType(converter.ConvertFromString("https://dependabot.visualstudio.com/Core")); + Assert.Equal(expected, actual); + } +} diff --git a/server/Tingle.Dependabot.Tests/Workflow/UpdateRunnerTests.cs b/server/Tingle.Dependabot.Tests/Workflow/UpdateRunnerTests.cs new file mode 100644 index 00000000..ea5d0719 --- /dev/null +++ b/server/Tingle.Dependabot.Tests/Workflow/UpdateRunnerTests.cs @@ -0,0 +1,237 @@ +using Tingle.Dependabot.Models; +using Tingle.Dependabot.Workflow; +using Xunit; +using Xunit.Abstractions; +using YamlDotNet.Serialization; +using YamlDotNet.Serialization.NamingConventions; + +namespace Tingle.Dependabot.Tests.Workflow; + +public class UpdateRunnerTests +{ + private readonly ITestOutputHelper outputHelper; + + public UpdateRunnerTests(ITestOutputHelper outputHelper) + { + this.outputHelper = outputHelper ?? throw new ArgumentNullException(nameof(outputHelper)); + } + + [Fact] + public void MakeExtraCredentials_Works_1() + { + using var stream = TestSamples.GetSampleRegistries(); + using var reader = new StreamReader(stream); + + var deserializer = new DeserializerBuilder().WithNamingConvention(HyphenatedNamingConvention.Instance) + .IgnoreUnmatchedProperties() + .Build(); + + var configuration = deserializer.Deserialize(reader); + Assert.NotNull(configuration?.Registries); + var registries = UpdateRunner.MakeExtraCredentials(configuration.Registries.Values, new Dictionary()); + Assert.NotNull(registries); + Assert.Equal(11, registries.Count); + + // composer-repository + var registry = registries[0]; + Assert.Equal("composer_repository", Assert.Contains("type", registry)); + Assert.Equal("https://repo.packagist.com/example-company/", Assert.Contains("url", registry)); + Assert.DoesNotContain("registry", registry); + Assert.DoesNotContain("host", registry); + Assert.DoesNotContain("key", registry); + Assert.DoesNotContain("token", registry); + Assert.DoesNotContain("organization", registry); + Assert.DoesNotContain("repo", registry); + Assert.DoesNotContain("auth-key", registry); + Assert.DoesNotContain("public-key-fingerprint", registry); + Assert.Equal("octocat", Assert.Contains("username", registry)); + Assert.Equal("pwd_1234567890", Assert.Contains("password", registry)); + Assert.DoesNotContain("replaces-base", registry); + + // docker-registry + registry = registries[1]; + Assert.Equal("docker_registry", Assert.Contains("type", registry)); + Assert.DoesNotContain("url", registry); + Assert.Equal("registry.hub.docker.com", Assert.Contains("registry", registry)); + Assert.DoesNotContain("host", registry); + Assert.DoesNotContain("key", registry); + Assert.DoesNotContain("token", registry); + Assert.DoesNotContain("organization", registry); + Assert.DoesNotContain("repo", registry); + Assert.DoesNotContain("auth-key", registry); + Assert.DoesNotContain("public-key-fingerprint", registry); + Assert.Equal("octocat", Assert.Contains("username", registry)); + Assert.Equal("pwd_1234567890", Assert.Contains("password", registry)); + Assert.Equal("true", Assert.Contains("replaces-base", registry)); + + // git + registry = registries[2]; + Assert.Equal("git", Assert.Contains("type", registry)); + Assert.Equal("https://github.com", Assert.Contains("url", registry)); + Assert.DoesNotContain("registry", registry); + Assert.DoesNotContain("host", registry); + Assert.DoesNotContain("key", registry); + Assert.DoesNotContain("token", registry); + Assert.DoesNotContain("organization", registry); + Assert.DoesNotContain("repo", registry); + Assert.DoesNotContain("auth-key", registry); + Assert.DoesNotContain("public-key-fingerprint", registry); + Assert.Equal("x-access-token", Assert.Contains("username", registry)); + Assert.Equal("pwd_1234567890", Assert.Contains("password", registry)); + Assert.DoesNotContain("replaces-base", registry); + + // hex-organization + registry = registries[3]; + Assert.Equal("hex_organization", Assert.Contains("type", registry)); + Assert.DoesNotContain("url", registry); + Assert.DoesNotContain("registry", registry); + Assert.DoesNotContain("host", registry); + Assert.Equal("key_1234567890", Assert.Contains("key", registry)); + Assert.DoesNotContain("token", registry); + Assert.Equal("github", Assert.Contains("organization", registry)); + Assert.DoesNotContain("repo", registry); + Assert.DoesNotContain("auth-key", registry); + Assert.DoesNotContain("public-key-fingerprint", registry); + Assert.DoesNotContain("username", registry); + Assert.DoesNotContain("password", registry); + Assert.DoesNotContain("replaces-base", registry); + + // hex-repository + registry = registries[4]; + Assert.Equal("hex_repository", Assert.Contains("type", registry)); + Assert.Equal("https://private-repo.example.com", Assert.Contains("url", registry)); + Assert.DoesNotContain("registry", registry); + Assert.DoesNotContain("host", registry); + Assert.DoesNotContain("key", registry); + Assert.DoesNotContain("token", registry); + Assert.DoesNotContain("organization", registry); + Assert.Equal("private-repo", Assert.Contains("repo", registry)); + Assert.Equal("ak_1234567890", Assert.Contains("auth-key", registry)); + Assert.Equal("pkf_1234567890", Assert.Contains("public-key-fingerprint", registry)); + Assert.DoesNotContain("username", registry); + Assert.DoesNotContain("password", registry); + Assert.DoesNotContain("replaces-base", registry); + + // maven-repository + registry = registries[5]; + Assert.Equal("maven_repository", Assert.Contains("type", registry)); + Assert.Equal("https://artifactory.example.com", Assert.Contains("url", registry)); + Assert.DoesNotContain("registry", registry); + Assert.DoesNotContain("host", registry); + Assert.DoesNotContain("key", registry); + Assert.DoesNotContain("token", registry); + Assert.DoesNotContain("organization", registry); + Assert.DoesNotContain("repo", registry); + Assert.DoesNotContain("auth-key", registry); + Assert.DoesNotContain("public-key-fingerprint", registry); + Assert.Equal("octocat", Assert.Contains("username", registry)); + Assert.Equal("pwd_1234567890", Assert.Contains("password", registry)); + Assert.Equal("true", Assert.Contains("replaces-base", registry)); + + // npm-registry + registry = registries[6]; + Assert.Equal("npm_registry", Assert.Contains("type", registry)); + Assert.DoesNotContain("url", registry); + Assert.Equal("npm.pkg.github.com", Assert.Contains("registry", registry)); + Assert.DoesNotContain("host", registry); + Assert.DoesNotContain("key", registry); + Assert.Equal("tkn_1234567890", Assert.Contains("token", registry)); + Assert.DoesNotContain("organization", registry); + Assert.DoesNotContain("repo", registry); + Assert.DoesNotContain("auth-key", registry); + Assert.DoesNotContain("public-key-fingerprint", registry); + Assert.DoesNotContain("username", registry); + Assert.DoesNotContain("password", registry); + Assert.Equal("true", Assert.Contains("replaces-base", registry)); + + // nuget-feed + registry = registries[7]; + Assert.Equal("nuget_feed", Assert.Contains("type", registry)); + Assert.Equal("https://pkgs.dev.azure.com/contoso/_packaging/My_Feed/nuget/v3/index.json", Assert.Contains("url", registry)); + Assert.DoesNotContain("registry", registry); + Assert.DoesNotContain("host", registry); + Assert.DoesNotContain("key", registry); + Assert.DoesNotContain("token", registry); + Assert.DoesNotContain("organization", registry); + Assert.DoesNotContain("repo", registry); + Assert.DoesNotContain("auth-key", registry); + Assert.DoesNotContain("public-key-fingerprint", registry); + Assert.Equal("octocat@example.com", Assert.Contains("username", registry)); + Assert.Equal("pwd_1234567890", Assert.Contains("password", registry)); + Assert.DoesNotContain("replaces-base", registry); + + // python-index + registry = registries[8]; + Assert.Equal("python_index", Assert.Contains("type", registry)); + Assert.Equal("https://pkgs.dev.azure.com/octocat/_packaging/my-feed/pypi/example", Assert.Contains("url", registry)); + Assert.DoesNotContain("registry", registry); + Assert.DoesNotContain("host", registry); + Assert.DoesNotContain("key", registry); + Assert.DoesNotContain("token", registry); + Assert.DoesNotContain("organization", registry); + Assert.DoesNotContain("repo", registry); + Assert.DoesNotContain("auth-key", registry); + Assert.DoesNotContain("public-key-fingerprint", registry); + Assert.Equal("octocat@example.com", Assert.Contains("username", registry)); + Assert.Equal("pwd_1234567890", Assert.Contains("password", registry)); + Assert.Equal("true", Assert.Contains("replaces-base", registry)); + + // rubygems-server + registry = registries[9]; + Assert.Equal("rubygems_server", Assert.Contains("type", registry)); + Assert.Equal("https://rubygems.pkg.github.com/octocat/github_api", Assert.Contains("url", registry)); + Assert.DoesNotContain("registry", registry); + Assert.DoesNotContain("host", registry); + Assert.DoesNotContain("key", registry); + Assert.Equal("tkn_1234567890", Assert.Contains("token", registry)); + Assert.DoesNotContain("organization", registry); + Assert.DoesNotContain("repo", registry); + Assert.DoesNotContain("auth-key", registry); + Assert.DoesNotContain("public-key-fingerprint", registry); + Assert.DoesNotContain("username", registry); + Assert.DoesNotContain("password", registry); + Assert.DoesNotContain("replaces-base", registry); + + // terraform-registry + registry = registries[10]; + Assert.Equal("terraform_registry", Assert.Contains("type", registry)); + Assert.DoesNotContain("url", registry); + Assert.DoesNotContain("registry", registry); + Assert.Equal("terraform.example.com", Assert.Contains("host", registry)); + Assert.DoesNotContain("key", registry); + Assert.Equal("tkn_1234567890", Assert.Contains("token", registry)); + Assert.DoesNotContain("organization", registry); + Assert.DoesNotContain("repo", registry); + Assert.DoesNotContain("auth-key", registry); + Assert.DoesNotContain("public-key-fingerprint", registry); + Assert.DoesNotContain("username", registry); + Assert.DoesNotContain("password", registry); + Assert.DoesNotContain("replaces-base", registry); + } + + [Fact] + public void ConvertPlaceholder_Works() + { + var input = ":${{MY-p_aT}}"; + var secrets = new Dictionary(StringComparer.OrdinalIgnoreCase) + { + ["my-p_at"] = "cake", + }; + var result = UpdateRunner.ConvertPlaceholder(input, secrets); + Assert.Equal(":cake", result); + } + + [Theory] + [InlineData("contoso.azurecr.io/tinglesoftware/dependabot-updater:0.11", true, "contoso.azurecr.io")] + [InlineData("fabrikam.azurecr.io/tinglesoftware/dependabot-updater:0.11", true, "fabrikam.azurecr.io")] + [InlineData("dependabot.azurecr.io/tinglesoftware/dependabot-updater:0.11", true, "dependabot.azurecr.io")] + [InlineData("ghcr.io/tinglesoftware/dependabot-updater:0.11", false, null)] + [InlineData("tingle/dependabot-updater:0.11", false, null)] + [InlineData("tingle/dependabot-azure-devops:0.11", false, null)] + public void TryGetAzureContainerRegistry_Works(string input, bool matches, string? expected) + { + var found = UpdateRunner.TryGetAzureContainerRegistry(input, out var actual); + Assert.Equal(matches, found); + Assert.Equal(expected, actual); + } +} diff --git a/server/Tingle.Dependabot.Tests/Workflow/WorkflowBackgroundServiceTests.cs b/server/Tingle.Dependabot.Tests/Workflow/WorkflowBackgroundServiceTests.cs new file mode 100644 index 00000000..175b355d --- /dev/null +++ b/server/Tingle.Dependabot.Tests/Workflow/WorkflowBackgroundServiceTests.cs @@ -0,0 +1,268 @@ +using Microsoft.EntityFrameworkCore; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Hosting; +using Microsoft.Extensions.Logging; +using Tingle.Dependabot.Events; +using Tingle.Dependabot.Models; +using Tingle.Dependabot.Workflow; +using Tingle.EventBus; +using Tingle.EventBus.Transports.InMemory; +using Tingle.Extensions; +using Xunit; +using Xunit.Abstractions; + +namespace Tingle.Dependabot.Tests.Workflow; + +public class WorkflowBackgroundServiceTests +{ + private const string RepositoryId = "repo_1234567890"; + private const int UpdateId1 = 1; + + private readonly ITestOutputHelper outputHelper; + + public WorkflowBackgroundServiceTests(ITestOutputHelper outputHelper) + { + this.outputHelper = outputHelper ?? throw new ArgumentNullException(nameof(outputHelper)); + } + + [Fact] + public async Task SynchronizationInnerAsync_Works() + { + await TestAsync(async (harness, context, service) => + { + await service.SynchronizationInnerAsync(); + + // Ensure the message was published + var evt_context = Assert.IsType>(Assert.Single(await harness.PublishedAsync())); + var inner = evt_context.Event; + Assert.NotNull(inner); + Assert.Null(inner.RepositoryId); + Assert.Null(inner.RepositoryProviderId); + Assert.False(inner.Trigger); + }); + } + + [Fact] + public async Task CheckMissedTriggerInnerAsync_MissedScheduleIsDetected() + { + var referencePoint = DateTimeOffset.Parse("2023-01-24T05:00:00+00:00"); + var lastUpdate0 = DateTimeOffset.Parse("2023-01-24T03:45:00+00:00"); + var lastUpdate1 = DateTimeOffset.Parse("2023-01-23T03:30:00+00:00"); + await TestAsync(lastUpdate0, lastUpdate1, async (harness, context, service) => + { + await service.CheckMissedTriggerInnerAsync(referencePoint); + + // Ensure the message was published + var evt_context = Assert.IsType>(Assert.Single(await harness.PublishedAsync())); + var inner = evt_context.Event; + Assert.NotNull(inner); + Assert.Equal(RepositoryId, inner.RepositoryId); + Assert.Equal(UpdateId1, inner.RepositoryUpdateId); + Assert.Equal(UpdateJobTrigger.MissedSchedule, inner.Trigger); + }); + } + + [Fact] + public async Task CheckMissedTriggerInnerAsync_MissedScheduleIsDetected_NotRun_Before() + { + var referencePoint = DateTimeOffset.Parse("2023-01-24T05:00:00+00:00"); + var lastUpdate0 = DateTimeOffset.Parse("2023-01-24T03:45:00+00:00"); + var lastUpdate1 = (DateTimeOffset?)null; + await TestAsync(lastUpdate0, lastUpdate1, async (harness, context, service) => + { + await service.CheckMissedTriggerInnerAsync(referencePoint); + + // Ensure the message was published + var evt_context = Assert.IsType>(Assert.Single(await harness.PublishedAsync())); + var inner = evt_context.Event; + Assert.NotNull(inner); + Assert.Equal(RepositoryId, inner.RepositoryId); + Assert.Equal(UpdateId1, inner.RepositoryUpdateId); + Assert.Equal(UpdateJobTrigger.MissedSchedule, inner.Trigger); + }); + } + + [Fact] + public async Task CheckMissedTriggerInnerAsync_NoMissedSchedule() + { + var referencePoint = DateTimeOffset.Parse("2023-01-24T05:00:00+00:00"); + var lastUpdate0 = DateTimeOffset.Parse("2023-01-24T03:45:00+00:00"); + var lastUpdate1 = DateTimeOffset.Parse("2023-01-24T03:30:00+00:00"); + await TestAsync(lastUpdate0, lastUpdate1, async (harness, context, service) => + { + await service.CheckMissedTriggerInnerAsync(referencePoint); + + // Ensure nothing was published + Assert.Empty(await harness.PublishedAsync()); + }); + } + + + [Fact] + public async Task CleanupInnerAsync_ResolvesJobs() + { + await TestAsync(async (harness, context, job) => + { + var targetId = Guid.NewGuid().ToString(); + await context.UpdateJobs.AddAsync(new UpdateJob + { + Id = Guid.NewGuid().ToString(), + RepositoryId = RepositoryId, + RepositorySlug = "test-repo", + Created = DateTimeOffset.UtcNow.AddMinutes(-19), + Directory = "/", + Resources = new(0.25, 0.2), + AuthKey = Keygen.Create(25), + Status = UpdateJobStatus.Succeeded, + }); + await context.UpdateJobs.AddAsync(new UpdateJob + { + Id = Guid.NewGuid().ToString(), + RepositoryId = RepositoryId, + RepositorySlug = "test-repo", + Created = DateTimeOffset.UtcNow.AddHours(-100), + Directory = "/", + Resources = new(0.25, 0.2), + AuthKey = Keygen.Create(25), + Status = UpdateJobStatus.Succeeded, + }); + await context.UpdateJobs.AddAsync(new UpdateJob + { + Id = targetId, + RepositoryId = RepositoryId, + RepositorySlug = "test-repo", + Created = DateTimeOffset.UtcNow.AddMinutes(-30), + Directory = "/", + Resources = new(0.25, 0.2), + AuthKey = Keygen.Create(25), + Status = UpdateJobStatus.Running, + }); + await context.SaveChangesAsync(); + + await job.CleanupInnerAsync(); + + // Ensure the message was published + var evt_context = Assert.IsType>(Assert.Single(await harness.PublishedAsync())); + var inner = evt_context.Event; + Assert.NotNull(inner); + Assert.Equal(targetId, inner.JobId); + }); + } + + [Fact] + public async Task CleanupInnerAsync_DeletesOldJobsAsync() + { + await TestAsync(async (harness, context, job) => + { + await context.UpdateJobs.AddAsync(new UpdateJob + { + Id = Guid.NewGuid().ToString(), + RepositoryId = RepositoryId, + RepositorySlug = "test-repo", + Created = DateTimeOffset.UtcNow.AddDays(-80), + Directory = "/", + Resources = new(0.25, 0.2), + AuthKey = Keygen.Create(25), + }); + await context.UpdateJobs.AddAsync(new UpdateJob + { + Id = Guid.NewGuid().ToString(), + RepositoryId = RepositoryId, + RepositorySlug = "test-repo", + Created = DateTimeOffset.UtcNow.AddDays(-100), + Directory = "/", + Resources = new(0.25, 0.2), + AuthKey = Keygen.Create(25), + }); + await context.UpdateJobs.AddAsync(new UpdateJob + { + Id = Guid.NewGuid().ToString(), + RepositoryId = RepositoryId, + RepositorySlug = "test-repo", + Created = DateTimeOffset.UtcNow.AddDays(-120), + Directory = "/", + Resources = new(0.25, 0.2), + AuthKey = Keygen.Create(25), + }); + await context.SaveChangesAsync(); + + await job.CleanupInnerAsync(); + Assert.Equal(1, await context.UpdateJobs.CountAsync()); + }); + } + + private Task TestAsync(Func executeAndVerify) => TestAsync(null, null, executeAndVerify); + + private async Task TestAsync(DateTimeOffset? lastUpdate0, DateTimeOffset? lastUpdate1, Func executeAndVerify) + { + var host = Host.CreateDefaultBuilder() + .ConfigureLogging(builder => builder.AddXUnit(outputHelper)) + .ConfigureServices((context, services) => + { + var dbName = Guid.NewGuid().ToString(); + services.AddDbContext(options => + { + options.UseInMemoryDatabase(dbName, o => o.EnableNullChecks()); + options.EnableDetailedErrors(); + }); + services.AddEventBus(builder => builder.AddInMemoryTransport().AddInMemoryTestHarness()); + }) + .Build(); + + using var scope = host.Services.CreateScope(); + var provider = scope.ServiceProvider; + + var context = provider.GetRequiredService(); + await context.Database.EnsureCreatedAsync(); + + await context.Repositories.AddAsync(new Repository + { + Id = RepositoryId, + Name = "test-repo", + ConfigFileContents = "", + Updates = new List + { + new RepositoryUpdate + { + PackageEcosystem = DependabotPackageEcosystem.Npm, + Directory = "/", + Schedule = new DependabotUpdateSchedule + { + Interval = DependabotScheduleInterval.Daily, + Time = new(3, 45), + }, + LatestUpdate = lastUpdate0, + }, + new RepositoryUpdate + { + PackageEcosystem = DependabotPackageEcosystem.Npm, + Directory = "/legacy", + Schedule = new DependabotUpdateSchedule + { + Interval = DependabotScheduleInterval.Daily, + Time = new(3, 30), + }, + LatestUpdate = lastUpdate1, + }, + }, + }); + await context.SaveChangesAsync(); + + var harness = provider.GetRequiredService(); + await harness.StartAsync(); + + try + { + var service = ActivatorUtilities.GetServiceOrCreateInstance(provider); + + await executeAndVerify(harness, context, service); + + // Ensure there were no publish failures + Assert.Empty(await harness.FailedAsync()); + } + finally + { + await harness.StopAsync(); + } + } +} diff --git a/server/Tingle.Dependabot/ApiKeyProvider.cs b/server/Tingle.Dependabot/ApiKeyProvider.cs new file mode 100644 index 00000000..17923cf9 --- /dev/null +++ b/server/Tingle.Dependabot/ApiKeyProvider.cs @@ -0,0 +1,41 @@ +using AspNetCore.Authentication.ApiKey; +using Microsoft.EntityFrameworkCore; +using System.Security.Claims; +using Tingle.Dependabot.Models; + +namespace Tingle.Dependabot; + +internal class ApiKeyProvider : IApiKeyProvider +{ + private readonly MainDbContext dbContext; + + public ApiKeyProvider(MainDbContext dbContext) + { + this.dbContext = dbContext ?? throw new ArgumentNullException(nameof(dbContext)); + } + + public async Task ProvideAsync(string key) + { + var job = await dbContext.UpdateJobs.SingleOrDefaultAsync(j => j.AuthKey == key); + if (job is not null) + { + return new ApiKey(key, job.RepositoryId!); + } + + return null; + } + + class ApiKey : IApiKey + { + public ApiKey(string key, string owner, IReadOnlyCollection? claims = null) + { + Key = key; + OwnerName = owner; + Claims = claims ?? new List(); + } + + public string Key { get; } + public string OwnerName { get; } + public IReadOnlyCollection Claims { get; } + } +} diff --git a/server/Tingle.Dependabot/AppSetup.cs b/server/Tingle.Dependabot/AppSetup.cs new file mode 100644 index 00000000..1e586914 --- /dev/null +++ b/server/Tingle.Dependabot/AppSetup.cs @@ -0,0 +1,50 @@ +using Microsoft.EntityFrameworkCore; +using Microsoft.Extensions.Options; +using Tingle.Dependabot.Models; +using Tingle.Dependabot.Workflow; + +namespace Tingle.Dependabot; + +internal static class AppSetup +{ + public static async Task SetupAsync(WebApplication app, CancellationToken cancellationToken = default) + { + using var scope = app.Services.CreateScope(); + var provider = scope.ServiceProvider; + + // perform migrations on startup if asked to + if (app.Configuration.GetValue("EFCORE_PERFORM_MIGRATIONS")) + { + var db = provider.GetRequiredService().Database; + if (db.IsRelational()) // only relational databases + { + await db.MigrateAsync(cancellationToken: cancellationToken); + } + } + + var options = provider.GetRequiredService>().Value; + if (options.SynchronizeOnStartup) + { + var synchronizer = provider.GetRequiredService(); + await synchronizer.SynchronizeAsync(false, cancellationToken); /* database sync should not trigger, just in case it's too many */ + } + + if (options.LoadSchedulesOnStartup) + { + var dbContext = provider.GetRequiredService(); + var repositories = await dbContext.Repositories.ToListAsync(cancellationToken); + var scheduler = provider.GetRequiredService(); + foreach (var repository in repositories) + { + await scheduler.CreateOrUpdateAsync(repository, cancellationToken); + } + } + + // create or update webhooks/subscriptions if asked to + if (options.CreateOrUpdateWebhooksOnStartup) + { + var adoProvider = provider.GetRequiredService(); + await adoProvider.CreateOrUpdateSubscriptionsAsync(cancellationToken); + } + } +} diff --git a/server/Tingle.Dependabot/AzureDevOpsEvent.cs b/server/Tingle.Dependabot/AzureDevOpsEvent.cs new file mode 100644 index 00000000..8b781648 --- /dev/null +++ b/server/Tingle.Dependabot/AzureDevOpsEvent.cs @@ -0,0 +1,239 @@ +using System.ComponentModel.DataAnnotations; +using System.Runtime.Serialization; +using System.Text.Json.Nodes; +using System.Text.Json.Serialization; + +namespace Tingle.Dependabot; + +public class AzureDevOpsEvent +{ + [Required] + [JsonPropertyName("subscriptionId")] + public string? SubscriptionId { get; set; } + + [Required] + [JsonPropertyName("notificationId")] + public int NotificationId { get; set; } + + [Required] + [JsonPropertyName("eventType")] + public AzureDevOpsEventType? EventType { get; set; } + + [Required] + [JsonPropertyName("resource")] + public JsonObject? Resource { get; set; } +} + +public class AzureDevOpsEventCodePushResource +{ + /// + /// List of updated references. + /// + [Required] + [JsonPropertyName("refUpdates")] + public List? RefUpdates { get; set; } + + /// + /// Details about the repository. + /// + [Required] + [JsonPropertyName("repository")] + public AzureDevOpsEventRepository? Repository { get; set; } +} + +public class AzureDevOpsEventPullRequestResource +{ + /// + /// Details about the repository. + /// + [Required] + [JsonPropertyName("repository")] + public AzureDevOpsEventRepository? Repository { get; set; } + + /// + /// The identifier of the Pull Request. + /// + [Required] + [JsonPropertyName("pullRequestId")] + public int PullRequestId { get; set; } + + /// + /// The status of the Pull Request. + /// + [Required] + [JsonPropertyName("status")] + public string? Status { get; set; } + + /// + /// The title of the Pull Request. + /// + [Required] + [JsonPropertyName("title")] + public string? Title { get; set; } + + /// + /// The branch of the repository from which the changes are picked from in the Pull Request. + /// + /// refs/heads/feature/my-feature + [Required] + [JsonPropertyName("sourceRefName")] + public string? SourceRefName { get; set; } + + /// + /// The branch of the repository to which the merge shall be done. + /// + /// refs/heads/main + [Required] + [JsonPropertyName("targetRefName")] + public string? TargetRefName { get; set; } + + /// + /// The status of the merge. + /// + [Required] + [JsonPropertyName("mergeStatus")] + public string? MergeStatus { get; set; } + + /// + /// The identifier of the merge. + /// + [Required] + [JsonPropertyName("mergeId")] + public string? MergeId { get; set; } + + /// + /// The URL for the Pull Request. + /// + [Required] + [JsonPropertyName("url")] + public string? Url { get; set; } +} + +public class AzureDevOpsEventPullRequestCommentEventResource +{ + [Required] + [JsonPropertyName("comment")] + public AzureDevOpsEventCommentResource? Comment { get; set; } + + [Required] + [JsonPropertyName("pullRequest")] + public AzureDevOpsEventPullRequestResource? PullRequest { get; set; } +} + +public class AzureDevOpsEventCommentResource +{ + [Required] + [JsonPropertyName("id")] + public int? Id { get; set; } + + [JsonPropertyName("parentCommentId")] + public int? ParentCommentId { get; set; } + + [Required] + [JsonPropertyName("content")] + public string? Content { get; set; } + + [JsonPropertyName("commentType")] + public string? CommentType { get; set; } + + [Required] + [JsonPropertyName("publishedDate")] + public DateTimeOffset? PublishedDate { get; set; } +} + +public class AzureDevOpsEventRefUpdate +{ + [Required] + [JsonPropertyName("name")] + public string? Name { get; set; } + + [JsonPropertyName("oldObjectId")] + public string? OldObjectId { get; set; } + + [JsonPropertyName("newObjectId")] + public string? NewObjectId { get; set; } +} + +public class AzureDevOpsEventRepository +{ + /// + /// The unique identifier of the repository. + /// + [Required] + [JsonPropertyName("id")] + public string? Id { get; set; } + + /// + /// The name of the repository. + /// + [Required] + [JsonPropertyName("name")] + public string? Name { get; set; } + + /// + /// The details about the project which owns the repository. + /// + [Required] + [JsonPropertyName("project")] + public AzureDevOpsEventRepositoryProject? Project { get; set; } + + /// + /// The default branch of the repository. + /// + [JsonPropertyName("defaultBranch")] + public string? DefaultBranch { get; set; } // should not be required because some repositories do not have default branches + + [Required] + [JsonPropertyName("remoteUrl")] + public string? RemoteUrl { get; set; } +} + +public class AzureDevOpsEventRepositoryProject +{ + /// + /// The unique identifier of the project. + /// + [Required] + [JsonPropertyName("id")] + public string? Id { get; set; } + + /// + /// The name of the project. + /// + [Required] + [JsonPropertyName("name")] + public string? Name { get; set; } + + /// + /// The URL for the project. + /// + [Required] + [JsonPropertyName("url")] + public string? Url { get; set; } +} + +public enum AzureDevOpsEventType +{ + /// Code pushed + /// Code is pushed to a Git repository. + [EnumMember(Value = "git.push")] + GitPush, + + /// Pull request updated + /// + /// Pull request is updated – status, review list, reviewer vote + /// changed or the source branch is updated with a push. + /// + [EnumMember(Value = "git.pullrequest.updated")] + GitPullRequestUpdated, + + /// Pull request merge attempted + /// Pull request - Branch merge attempted. + [EnumMember(Value = "git.pullrequest.merged")] + GitPullRequestMerged, + + /// Pull request commented on + /// Comments are added to a pull request. + [EnumMember(Value = "ms.vss-code.git-pullrequest-comment-event")] + GitPullRequestCommentEvent, +} diff --git a/server/Tingle.Dependabot/AzureDevOpsEventHandler.cs b/server/Tingle.Dependabot/AzureDevOpsEventHandler.cs new file mode 100644 index 00000000..b5897e04 --- /dev/null +++ b/server/Tingle.Dependabot/AzureDevOpsEventHandler.cs @@ -0,0 +1,96 @@ +using Microsoft.AspNetCore.Http.Json; +using Microsoft.EntityFrameworkCore; +using Microsoft.Extensions.Options; +using System.Text.Json; +using Tingle.Dependabot.Events; +using Tingle.EventBus; + +namespace Tingle.Dependabot; + +internal class AzureDevOpsEventHandler +{ + private readonly IEventPublisher publisher; + private readonly JsonOptions jsonOptions; + private readonly ILogger logger; + + public AzureDevOpsEventHandler(IEventPublisher publisher, IOptions jsonOptions, ILogger logger) + { + this.publisher = publisher ?? throw new ArgumentNullException(nameof(publisher)); + this.jsonOptions = jsonOptions?.Value ?? throw new ArgumentNullException(nameof(jsonOptions)); + this.logger = logger ?? throw new ArgumentNullException(nameof(logger)); + } + + public virtual async Task HandleAsync(AzureDevOpsEvent model, CancellationToken cancellationToken = default) + { + var type = model.EventType; + logger.LogInformation("Received {EventType} notification {NotificationId} on subscription {SubscriptionId}", + type, + model.NotificationId, + model.SubscriptionId); + + if (type is AzureDevOpsEventType.GitPush) + { + var resource = JsonSerializer.Deserialize(model.Resource, jsonOptions.SerializerOptions)!; + var adoRepository = resource.Repository!; + var adoRepositoryId = adoRepository.Id; + var defaultBranch = adoRepository.DefaultBranch; + + // if the updates are not the default branch, then we ignore them + var updatedReferences = resource.RefUpdates!.Select(ru => ru.Name).ToList(); + if (updatedReferences.Contains(defaultBranch, StringComparer.OrdinalIgnoreCase)) + { + // request synchronization of the repository + var evt = new ProcessSynchronization(true, repositoryProviderId: adoRepositoryId); + await publisher.PublishAsync(evt, cancellationToken: cancellationToken); + } + } + else if (type is AzureDevOpsEventType.GitPullRequestUpdated or AzureDevOpsEventType.GitPullRequestMerged) + { + var resource = JsonSerializer.Deserialize(model.Resource, jsonOptions.SerializerOptions)!; + var adoRepository = resource.Repository!; + var prId = resource.PullRequestId; + var status = resource.Status; + + if (type is AzureDevOpsEventType.GitPullRequestUpdated) + { + logger.LogInformation("PR {PullRequestId} in {RepositoryUrl} status updated to {PullRequestStatus}", + prId, + adoRepository.RemoteUrl, + status); + + // TODO: handle the logic for merge conflicts here using events + + } + else if (type is AzureDevOpsEventType.GitPullRequestMerged) + { + logger.LogInformation("Merge status {MergeStatus} for PR {PullRequestId} in {RepositoryUrl}", + resource.MergeStatus, + prId, + adoRepository.RemoteUrl); + + // TODO: handle the logic for updating other PRs to find merge conflicts (restart merge or attempt merge) + + } + } + else if (type is AzureDevOpsEventType.GitPullRequestCommentEvent) + { + var resource = JsonSerializer.Deserialize(model.Resource, jsonOptions.SerializerOptions)!; + var comment = resource.Comment!; + var pr = resource.PullRequest!; + var adoRepository = pr.Repository!; + var prId = pr.PullRequestId; + var status = pr.Status; + + logger.LogInformation("PR {PullRequestId} in {RepositoryUrl} was commented on: {Comment}", + prId, + adoRepository.RemoteUrl, + comment); + + // TODO: handle the logic for comments here using events + } + else + { + logger.LogWarning("'{EventType}' events are not supported!", type); + } + } +} diff --git a/server/Tingle.Dependabot/BasicUserValidationService.cs b/server/Tingle.Dependabot/BasicUserValidationService.cs new file mode 100644 index 00000000..ca50b7ad --- /dev/null +++ b/server/Tingle.Dependabot/BasicUserValidationService.cs @@ -0,0 +1,19 @@ +using AspNetCore.Authentication.Basic; + +namespace Tingle.Dependabot; + +internal class BasicUserValidationService : IBasicUserValidationService +{ + private readonly IConfiguration configuration; + + public BasicUserValidationService(IConfiguration configuration) + { + this.configuration = configuration ?? throw new ArgumentNullException(nameof(configuration)); + } + + public Task IsValidAsync(string username, string password) + { + var expected = configuration.GetValue($"Authentication:Schemes:ServiceHooks:Credentials:{username}"); + return Task.FromResult(string.Equals(expected, password, StringComparison.Ordinal)); + } +} diff --git a/server/Tingle.Dependabot/CollectionExtensions.cs b/server/Tingle.Dependabot/CollectionExtensions.cs new file mode 100644 index 00000000..7c6e6535 --- /dev/null +++ b/server/Tingle.Dependabot/CollectionExtensions.cs @@ -0,0 +1,27 @@ +namespace System.Collections.Generic; + +internal static class CollectionExtensions +{ + /// + /// Adds an element with the provided key and value, + /// provided the value is not equal to the type's default value (or empty for strings). + /// + /// The type of keys in the dictionary. + /// The type of values in the dictionary. + /// The dictionary to use + /// The object to use as the key of the element to add. + /// The object to use as the value of the element to add. + /// key is null. + /// The dictionary is read-only. + /// + public static IDictionary AddIfNotDefault(this IDictionary dictionary, TKey key, TValue? value) + where TKey : notnull + { + if (value is not null || value is string s && !string.IsNullOrWhiteSpace(s)) + { + dictionary[key] = value; + } + + return dictionary; + } +} diff --git a/server/Tingle.Dependabot/Consumers/ProcessSynchronizationConsumer.cs b/server/Tingle.Dependabot/Consumers/ProcessSynchronizationConsumer.cs new file mode 100644 index 00000000..4676e446 --- /dev/null +++ b/server/Tingle.Dependabot/Consumers/ProcessSynchronizationConsumer.cs @@ -0,0 +1,50 @@ +using Microsoft.EntityFrameworkCore; +using Tingle.Dependabot.Events; +using Tingle.Dependabot.Models; +using Tingle.Dependabot.Workflow; +using Tingle.EventBus; + +namespace Tingle.Dependabot.Consumers; + +internal class ProcessSynchronizationConsumer : IEventConsumer +{ + private readonly MainDbContext dbContext; + private readonly Synchronizer synchronizer; + private readonly ILogger logger; + + public ProcessSynchronizationConsumer(MainDbContext dbContext, Synchronizer synchronizer, ILogger logger) + { + this.dbContext = dbContext ?? throw new ArgumentNullException(nameof(dbContext)); + this.synchronizer = synchronizer ?? throw new ArgumentNullException(nameof(synchronizer)); + this.logger = logger ?? throw new ArgumentNullException(nameof(logger)); + } + + public async Task ConsumeAsync(EventContext context, CancellationToken cancellationToken = default) + { + var evt = context.Event; + + var trigger = evt.Trigger; + + if (evt.RepositoryId is not null) + { + // ensure repository exists + var repositoryId = evt.RepositoryId ?? throw new InvalidOperationException($"'{nameof(evt.RepositoryId)}' cannot be null"); + var repository = await dbContext.Repositories.SingleOrDefaultAsync(r => r.Id == repositoryId, cancellationToken); + if (repository is null) + { + logger.LogWarning("Skipping synchronization because repository '{Repository}' does not exist.", repositoryId); + return; + } + + await synchronizer.SynchronizeAsync(repository, trigger, cancellationToken); + } + else if (evt.RepositoryProviderId is not null) + { + await synchronizer.SynchronizeAsync(repositoryProviderId: evt.RepositoryProviderId, trigger, cancellationToken); + } + else + { + await synchronizer.SynchronizeAsync(evt.Trigger, cancellationToken); + } + } +} diff --git a/server/Tingle.Dependabot/Consumers/RepositoryEventsConsumer.cs b/server/Tingle.Dependabot/Consumers/RepositoryEventsConsumer.cs new file mode 100644 index 00000000..d5caf706 --- /dev/null +++ b/server/Tingle.Dependabot/Consumers/RepositoryEventsConsumer.cs @@ -0,0 +1,49 @@ +using Microsoft.EntityFrameworkCore; +using Tingle.Dependabot.Events; +using Tingle.Dependabot.Models; +using Tingle.Dependabot.Workflow; +using Tingle.EventBus; + +namespace Tingle.Dependabot.Consumers; + +internal class RepositoryEventsConsumer : IEventConsumer, IEventConsumer, IEventConsumer +{ + private readonly MainDbContext dbContext; + private readonly UpdateScheduler scheduler; + + public RepositoryEventsConsumer(MainDbContext dbContext, UpdateScheduler scheduler) + { + this.dbContext = dbContext ?? throw new ArgumentNullException(nameof(dbContext)); + this.scheduler = scheduler ?? throw new ArgumentNullException(nameof(scheduler)); + } + + public async Task ConsumeAsync(EventContext context, CancellationToken cancellationToken) + { + var evt = context.Event; + + // update scheduler + var repositoryId = evt.RepositoryId ?? throw new InvalidOperationException($"'{nameof(evt.RepositoryId)}' cannot be null"); + var repository = await dbContext.Repositories.SingleAsync(r => r.Id == repositoryId, cancellationToken); + await scheduler.CreateOrUpdateAsync(repository, cancellationToken); + } + + public async Task ConsumeAsync(EventContext context, CancellationToken cancellationToken) + { + var evt = context.Event; + + // update scheduler + var repositoryId = evt.RepositoryId ?? throw new InvalidOperationException($"'{nameof(evt.RepositoryId)}' cannot be null"); + var repository = await dbContext.Repositories.SingleAsync(r => r.Id == repositoryId, cancellationToken); + await scheduler.CreateOrUpdateAsync(repository, cancellationToken); + } + + public async Task ConsumeAsync(EventContext context, CancellationToken cancellationToken) + { + var evt = context.Event; + + // remove from scheduler + var repositoryId = evt.RepositoryId ?? throw new InvalidOperationException($"'{nameof(evt.RepositoryId)}' cannot be null"); + var repository = await dbContext.Repositories.SingleAsync(r => r.Id == repositoryId, cancellationToken); + await scheduler.RemoveAsync(repositoryId, cancellationToken); + } +} diff --git a/server/Tingle.Dependabot/Consumers/TriggerUpdateJobsEventConsumer.cs b/server/Tingle.Dependabot/Consumers/TriggerUpdateJobsEventConsumer.cs new file mode 100644 index 00000000..56e87d22 --- /dev/null +++ b/server/Tingle.Dependabot/Consumers/TriggerUpdateJobsEventConsumer.cs @@ -0,0 +1,116 @@ +using Microsoft.EntityFrameworkCore; +using Tingle.Dependabot.Events; +using Tingle.Dependabot.Models; +using Tingle.Dependabot.Workflow; +using Tingle.EventBus; +using Tingle.Extensions; + +namespace Tingle.Dependabot.Consumers; + +internal class TriggerUpdateJobsEventConsumer : IEventConsumer +{ + private readonly MainDbContext dbContext; + private readonly UpdateRunner updateRunner; + private readonly ILogger logger; + + public TriggerUpdateJobsEventConsumer(MainDbContext dbContext, UpdateRunner updateRunner, ILogger logger) + { + this.dbContext = dbContext ?? throw new ArgumentNullException(nameof(dbContext)); + this.updateRunner = updateRunner ?? throw new ArgumentNullException(nameof(updateRunner)); + this.logger = logger ?? throw new ArgumentNullException(nameof(logger)); + } + + public async Task ConsumeAsync(EventContext context, CancellationToken cancellationToken) + { + var evt = context.Event; + + // ensure repository exists + var repositoryId = evt.RepositoryId ?? throw new InvalidOperationException($"'{nameof(evt.RepositoryId)}' cannot be null"); + var repository = await dbContext.Repositories.SingleOrDefaultAsync(r => r.Id == repositoryId, cancellationToken); + if (repository is null) + { + logger.LogWarning("Skipping trigger for update because repository '{Repository}' does not exist.", repositoryId); + return; + } + + // if we have a specific update to trigger, use it otherwise, do all + IList? updates = null; + var repositoryUpdateId = evt.RepositoryUpdateId; + if (repositoryUpdateId is not null) + { + var update = repository.Updates.ElementAtOrDefault(repositoryUpdateId.Value); + if (update is null) + { + logger.LogWarning("Skipping trigger for update because repository update '{RepositoryUpdateId}' does not exist.", repositoryUpdateId); + return; + } + updates = new[] { update, }; + } + else + { + updates = repository.Updates.ToList(); + } + + // trigger each update + var eventBusId = context.Id; + foreach (var update in updates) + { + // check if there is an existing one + var job = await dbContext.UpdateJobs.SingleOrDefaultAsync(j => j.PackageEcosystem == update.PackageEcosystem && j.Directory == update.Directory && j.EventBusId == eventBusId, cancellationToken); + if (job is not null) + { + logger.LogWarning("A job for update '{RepositoryId}({UpdateId})' requested by event '{EventBusId}' already exists. Skipping it ...", + repository.Id, + repository.Updates.IndexOf(update), + eventBusId); + } + else + { + // decide the resources based on the ecosystem + var ecosystem = update.PackageEcosystem!.Value; + var resources = UpdateJobResources.FromEcosystem(ecosystem); + + // create the job + job = new UpdateJob + { + Id = SequenceNumber.Generate().ToString(), + + Created = DateTimeOffset.UtcNow, + Status = UpdateJobStatus.Scheduled, + Trigger = evt.Trigger, + + RepositoryId = repository.Id, + RepositorySlug = repository.Slug, + EventBusId = eventBusId, + + Commit = repository.LatestCommit, + PackageEcosystem = ecosystem, + Directory = update.Directory, + Resources = resources, + AuthKey = Keygen.Create(25), + + Start = null, + End = null, + Duration = null, + Log = null, + }; + await dbContext.UpdateJobs.AddAsync(job, cancellationToken); + + // update the RepositoryUpdate + update.LatestJobId = job.Id; + update.LatestJobStatus = job.Status; + update.LatestUpdate = job.Created; + + // save to the database + await dbContext.SaveChangesAsync(cancellationToken); + } + + // call the update runner to run the update + await updateRunner.CreateAsync(repository, update, job, cancellationToken); + + // save changes that may have been made by the updateRunner + update.LatestJobStatus = job.Status; + await dbContext.SaveChangesAsync(cancellationToken); + } + } +} diff --git a/server/Tingle.Dependabot/Consumers/UpdateJobEventsConsumer.cs b/server/Tingle.Dependabot/Consumers/UpdateJobEventsConsumer.cs new file mode 100644 index 00000000..68ce9bad --- /dev/null +++ b/server/Tingle.Dependabot/Consumers/UpdateJobEventsConsumer.cs @@ -0,0 +1,129 @@ +using Microsoft.EntityFrameworkCore; +using Tingle.Dependabot.Events; +using Tingle.Dependabot.Models; +using Tingle.Dependabot.Workflow; +using Tingle.EventBus; + +namespace Tingle.Dependabot.Consumers; + +internal class UpdateJobEventsConsumer : IEventConsumer, IEventConsumer +{ + private readonly MainDbContext dbContext; + private readonly UpdateRunner updateRunner; + private readonly ILogger logger; + + public UpdateJobEventsConsumer(MainDbContext dbContext, UpdateRunner updateRunner, ILogger logger) + { + this.dbContext = dbContext ?? throw new ArgumentNullException(nameof(dbContext)); + this.updateRunner = updateRunner ?? throw new ArgumentNullException(nameof(updateRunner)); + this.logger = logger ?? throw new ArgumentNullException(nameof(logger)); + } + + public async Task ConsumeAsync(EventContext context, CancellationToken cancellationToken) + { + var evt = context.Event; + + // find the job + var jobId = evt.JobId; + var job = await dbContext.UpdateJobs.SingleOrDefaultAsync(j => j.Id == jobId, cancellationToken); + if (job is null) + { + logger.LogWarning("Cannot update state for job '{UpdateJobId}' as it does not exist.", jobId); + return; + } + + // skip jobs in a terminal state (useful when reprocessed events) + if (job.Status is UpdateJobStatus.Succeeded or UpdateJobStatus.Failed) + { + logger.LogWarning("Cannot update state for job '{UpdateJobId}' as it is already in a terminal state.", jobId); + return; + } + + // get the state from the runner + var state = await updateRunner.GetStateAsync(job, cancellationToken); + if (state is null) + { + logger.LogInformation("The runner did not provide a state for job '{UpdateJobId}'.", jobId); + + // delete the job if we have been waiting for over 90 minutes and still do not have state + var diff = DateTimeOffset.UtcNow - job.Created; + if (diff > TimeSpan.FromMinutes(90)) + { + logger.LogWarning("Deleting job '{UpdateJobId}' as it has been pending for more than 90 minutes.", jobId); + + // delete the run + await updateRunner.DeleteAsync(job, cancellationToken); + + // delete from the database + dbContext.UpdateJobs.Remove(job); + await dbContext.SaveChangesAsync(cancellationToken); + } + + return; + } + + // calculate duration + var (status, start, end) = state.Value; + TimeSpan? duration = null; + if (start is not null && end is not null) + { + var diff = end.Value - start.Value; + duration = diff; + } + + // update the job + job.Status = status; + job.Start = start; + job.End = end; + job.Duration = duration is null ? null : Convert.ToInt64(Math.Ceiling(duration.Value.TotalMilliseconds)); + + // update the Repository with status of the latest job for the update, if it exists + var repository = await dbContext.Repositories.SingleOrDefaultAsync(r => r.Id == job.RepositoryId, cancellationToken); + if (repository is not null) + { + var update = repository.Updates.SingleOrDefault(u => u.PackageEcosystem == job.PackageEcosystem && u.Directory == job.Directory); + if (update is not null && update.LatestJobId == job.Id) + { + update.LatestJobStatus = job.Status; + } + } + + // save to the database + await dbContext.SaveChangesAsync(cancellationToken); + + // logs are sometimes not available immediately, we usually need at least 2 minutes after completion time + // we publish an event in the future to pull the logs then delete the run + var scheduleTime = end?.AddMinutes(2.5f); // extra half-minute for buffer + if (scheduleTime < DateTimeOffset.UtcNow) scheduleTime = null; // no need to schedule in the past + await context.PublishAsync(new UpdateJobCollectLogsEvent { JobId = job.Id, }, scheduleTime, cancellationToken); + } + + public async Task ConsumeAsync(EventContext context, CancellationToken cancellationToken) + { + var evt = context.Event; + + // find the job + var jobId = evt.JobId; + var job = await dbContext.UpdateJobs.SingleOrDefaultAsync(j => j.Id == jobId, cancellationToken); + if (job is null) + { + logger.LogWarning("Cannot collect logs for job '{UpdateJobId}' as it does not exist.", jobId); + return; + } + + // ensure the job succeeded or failed + if (job.Status is not UpdateJobStatus.Succeeded and not UpdateJobStatus.Failed) + { + logger.LogWarning("Cannot collect logs for job '{UpdateJobId}' with status '{UpdateJobStatus}'.", job.Id, job.Status); + return; + } + + // pull the log and update the database + job.Log = await updateRunner.GetLogsAsync(job, cancellationToken); + if (string.IsNullOrWhiteSpace(job.Log)) job.Log = null; // reduces allocations later and unnecessary serialization + await dbContext.SaveChangesAsync(cancellationToken); + + // delete the run + await updateRunner.DeleteAsync(job, cancellationToken); + } +} diff --git a/server/Tingle.Dependabot/Dockerfile b/server/Tingle.Dependabot/Dockerfile new file mode 100644 index 00000000..1001f323 --- /dev/null +++ b/server/Tingle.Dependabot/Dockerfile @@ -0,0 +1,22 @@ +#See https://aka.ms/containerfastmode to understand how Visual Studio uses this Dockerfile to build your images for faster debugging. + +FROM mcr.microsoft.com/dotnet/aspnet:7.0-bullseye-slim AS base +WORKDIR /app +EXPOSE 80 +EXPOSE 443 + +FROM mcr.microsoft.com/dotnet/sdk:7.0-bullseye-slim AS build +WORKDIR /src +COPY ["Tingle.Dependabot/Tingle.Dependabot.csproj", "Tingle.Dependabot/"] +RUN dotnet restore "Tingle.Dependabot/Tingle.Dependabot.csproj" +COPY . . +WORKDIR "/src/Tingle.Dependabot" +RUN dotnet build "Tingle.Dependabot.csproj" -c Release -o /app/build + +FROM build AS publish +RUN dotnet publish "Tingle.Dependabot.csproj" -c Release -o /app/publish + +FROM base AS final +WORKDIR /app +COPY --from=publish /app/publish . +ENTRYPOINT ["dotnet", "Tingle.Dependabot.dll"] \ No newline at end of file diff --git a/server/Tingle.Dependabot/Dockerfile.CI b/server/Tingle.Dependabot/Dockerfile.CI new file mode 100644 index 00000000..0e9834be --- /dev/null +++ b/server/Tingle.Dependabot/Dockerfile.CI @@ -0,0 +1,12 @@ +# There are a number of reasons as to why this docker file is not used for restore, build, test and publish +# 1. The project requires nuget packages that require authentication which would make it complex +# 2. The solution is made up of multiple projects which makes copying of files complex +# +# As a result, we only copy publish output and put it in the container + +FROM mcr.microsoft.com/dotnet/aspnet:7.0-bullseye-slim AS base +EXPOSE 80 +EXPOSE 443 +WORKDIR /app +COPY . . +ENTRYPOINT ["dotnet", "Tingle.Dependabot.dll"] \ No newline at end of file diff --git a/server/Tingle.Dependabot/Events/ProcessSynchronization.cs b/server/Tingle.Dependabot/Events/ProcessSynchronization.cs new file mode 100644 index 00000000..001ffd6c --- /dev/null +++ b/server/Tingle.Dependabot/Events/ProcessSynchronization.cs @@ -0,0 +1,30 @@ +namespace Tingle.Dependabot.Events; + +public record ProcessSynchronization +{ + public ProcessSynchronization() { } // required for deserialization + + public ProcessSynchronization(bool trigger, string? repositoryId = null, string? repositoryProviderId = null) + { + Trigger = trigger; + RepositoryId = repositoryId; + RepositoryProviderId = repositoryProviderId; + } + + /// + /// Indicates whether we should trigger the update jobs where changes have been detected. + /// + public bool Trigger { get; set; } + + /// + /// Identifier of the repository. + /// Required if is not supplied. + /// + public string? RepositoryId { get; set; } + + /// + /// Identifier of the repository as given by the provider. + /// Required if is not supplied. + /// + public string? RepositoryProviderId { get; set; } +} diff --git a/server/Tingle.Dependabot/Events/RepositoryCreatedEvent.cs b/server/Tingle.Dependabot/Events/RepositoryCreatedEvent.cs new file mode 100644 index 00000000..e98350a7 --- /dev/null +++ b/server/Tingle.Dependabot/Events/RepositoryCreatedEvent.cs @@ -0,0 +1,27 @@ +using Tingle.Dependabot.Models; + +namespace Tingle.Dependabot.Events; + +public record RepositoryCreatedEvent : AbstractRepositoryEvent { } + +public record RepositoryUpdatedEvent : AbstractRepositoryEvent { } + +public record RepositoryDeletedEvent : AbstractRepositoryEvent { } + +public record TriggerUpdateJobsEvent : AbstractRepositoryEvent +{ + /// + /// Optional identifier of the repository update. + /// When all updates in the repository are scheduled to run. + /// + public int? RepositoryUpdateId { get; set; } + + /// The trigger. + public required UpdateJobTrigger Trigger { get; set; } +} + +public abstract record AbstractRepositoryEvent +{ + /// Identifier of the repository. + public required string? RepositoryId { get; set; } +} diff --git a/server/Tingle.Dependabot/Events/UpdateJobCheckStateEvent.cs b/server/Tingle.Dependabot/Events/UpdateJobCheckStateEvent.cs new file mode 100644 index 00000000..7a73f078 --- /dev/null +++ b/server/Tingle.Dependabot/Events/UpdateJobCheckStateEvent.cs @@ -0,0 +1,11 @@ +namespace Tingle.Dependabot.Events; + +public record UpdateJobCheckStateEvent : AbstractUpdateJobEvent { } + +public record UpdateJobCollectLogsEvent : AbstractUpdateJobEvent { } + +public abstract record AbstractUpdateJobEvent +{ + /// Identifier of the job. + public required string? JobId { get; set; } +} diff --git a/server/Tingle.Dependabot/Migrations/20230224045948_InitialCreate.Designer.cs b/server/Tingle.Dependabot/Migrations/20230224045948_InitialCreate.Designer.cs new file mode 100644 index 00000000..e59e76e0 --- /dev/null +++ b/server/Tingle.Dependabot/Migrations/20230224045948_InitialCreate.Designer.cs @@ -0,0 +1,274 @@ +// +using System; +using Microsoft.EntityFrameworkCore; +using Microsoft.EntityFrameworkCore.Infrastructure; +using Microsoft.EntityFrameworkCore.Metadata; +using Microsoft.EntityFrameworkCore.Migrations; +using Microsoft.EntityFrameworkCore.Storage.ValueConversion; +using Tingle.Dependabot.Models; + +#nullable disable + +namespace Tingle.Dependabot.Migrations +{ + [DbContext(typeof(MainDbContext))] + [Migration("20230224045948_InitialCreate")] + partial class InitialCreate + { + /// + protected override void BuildTargetModel(ModelBuilder modelBuilder) + { +#pragma warning disable 612, 618 + modelBuilder + .HasAnnotation("ProductVersion", "7.0.3") + .HasAnnotation("Relational:MaxIdentifierLength", 128); + + SqlServerModelBuilderExtensions.UseIdentityColumns(modelBuilder); + + modelBuilder.Entity("Microsoft.AspNetCore.DataProtection.EntityFrameworkCore.DataProtectionKey", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("int"); + + SqlServerPropertyBuilderExtensions.UseIdentityColumn(b.Property("Id")); + + b.Property("FriendlyName") + .HasColumnType("nvarchar(max)"); + + b.Property("Xml") + .HasColumnType("nvarchar(max)"); + + b.HasKey("Id"); + + b.ToTable("DataProtectionKeys"); + }); + + modelBuilder.Entity("Tingle.Dependabot.Models.Repository", b => + { + b.Property("Id") + .HasMaxLength(50) + .HasColumnType("nvarchar(50)"); + + b.Property("ConfigFileContents") + .IsRequired() + .HasColumnType("nvarchar(max)"); + + b.Property("Created") + .HasColumnType("datetimeoffset"); + + b.Property("Etag") + .IsConcurrencyToken() + .ValueGeneratedOnAddOrUpdate() + .HasColumnType("rowversion"); + + b.Property("LatestCommit") + .HasMaxLength(200) + .HasColumnType("nvarchar(200)"); + + b.Property("Name") + .HasColumnType("nvarchar(max)"); + + b.Property("ProviderId") + .HasColumnType("nvarchar(450)"); + + b.Property("Slug") + .HasColumnType("nvarchar(max)"); + + b.Property("SyncException") + .HasColumnType("nvarchar(max)"); + + b.Property("Updated") + .HasColumnType("datetimeoffset"); + + b.Property("Updates") + .IsRequired() + .HasColumnType("nvarchar(max)"); + + b.HasKey("Id"); + + b.HasIndex("Created") + .IsDescending(); + + b.HasIndex("ProviderId") + .IsUnique() + .HasFilter("[ProviderId] IS NOT NULL"); + + b.ToTable("Repositories"); + }); + + modelBuilder.Entity("Tingle.Dependabot.Models.UpdateJob", b => + { + b.Property("Id") + .HasMaxLength(50) + .HasColumnType("nvarchar(50)"); + + b.Property("AuthKey") + .IsRequired() + .HasColumnType("nvarchar(450)"); + + b.Property("Commit") + .HasMaxLength(50) + .HasColumnType("nvarchar(50)"); + + b.Property("Created") + .HasColumnType("datetimeoffset"); + + b.Property("Directory") + .IsRequired() + .HasColumnType("nvarchar(450)"); + + b.Property("Duration") + .HasColumnType("bigint"); + + b.Property("End") + .HasColumnType("datetimeoffset"); + + b.Property("Etag") + .IsConcurrencyToken() + .ValueGeneratedOnAddOrUpdate() + .HasColumnType("rowversion"); + + b.Property("EventBusId") + .HasColumnType("nvarchar(450)"); + + b.Property("Log") + .HasColumnType("nvarchar(max)"); + + b.Property("PackageEcosystem") + .HasColumnType("int"); + + b.Property("RepositoryId") + .IsRequired() + .HasColumnType("nvarchar(450)"); + + b.Property("RepositorySlug") + .IsRequired() + .HasColumnType("nvarchar(max)"); + + b.Property("Start") + .HasColumnType("datetimeoffset"); + + b.Property("Status") + .HasColumnType("int"); + + b.Property("Trigger") + .HasColumnType("int"); + + b.HasKey("Id"); + + b.HasIndex("AuthKey") + .IsUnique(); + + b.HasIndex("Created") + .IsDescending(); + + b.HasIndex("RepositoryId"); + + b.HasIndex("PackageEcosystem", "Directory"); + + b.HasIndex("PackageEcosystem", "Directory", "EventBusId") + .IsUnique() + .HasFilter("[EventBusId] IS NOT NULL"); + + b.ToTable("UpdateJobs"); + }); + + modelBuilder.Entity("Tingle.Dependabot.Models.Repository", b => + { + b.OwnsMany("Tingle.Dependabot.Models.DependabotRegistry", "Registries", b1 => + { + b1.Property("RepositoryId") + .HasColumnType("nvarchar(50)"); + + b1.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("int"); + + b1.Property("AuthKey") + .HasColumnType("nvarchar(max)") + .HasAnnotation("Relational:JsonPropertyName", "auth-key"); + + b1.Property("Key") + .HasColumnType("nvarchar(max)") + .HasAnnotation("Relational:JsonPropertyName", "key"); + + b1.Property("Organization") + .HasColumnType("nvarchar(max)") + .HasAnnotation("Relational:JsonPropertyName", "organization"); + + b1.Property("Password") + .HasColumnType("nvarchar(max)") + .HasAnnotation("Relational:JsonPropertyName", "password"); + + b1.Property("PublicKeyFingerprint") + .HasColumnType("nvarchar(max)") + .HasAnnotation("Relational:JsonPropertyName", "public-key-fingerprint"); + + b1.Property("ReplacesBase") + .HasColumnType("bit") + .HasAnnotation("Relational:JsonPropertyName", "replaces-base"); + + b1.Property("Repo") + .HasColumnType("nvarchar(max)") + .HasAnnotation("Relational:JsonPropertyName", "repo"); + + b1.Property("Token") + .HasColumnType("nvarchar(max)") + .HasAnnotation("Relational:JsonPropertyName", "token"); + + b1.Property("Type") + .IsRequired() + .HasColumnType("nvarchar(max)") + .HasAnnotation("Relational:JsonPropertyName", "type"); + + b1.Property("Url") + .IsRequired() + .HasColumnType("nvarchar(max)") + .HasAnnotation("Relational:JsonPropertyName", "url"); + + b1.Property("Username") + .HasColumnType("nvarchar(max)") + .HasAnnotation("Relational:JsonPropertyName", "username"); + + b1.HasKey("RepositoryId", "Id"); + + b1.ToTable("Repositories"); + + b1.ToJson("Registries"); + + b1.WithOwner() + .HasForeignKey("RepositoryId"); + }); + + b.Navigation("Registries"); + }); + + modelBuilder.Entity("Tingle.Dependabot.Models.UpdateJob", b => + { + b.OwnsOne("Tingle.Dependabot.Models.UpdateJobResources", "Resources", b1 => + { + b1.Property("UpdateJobId") + .HasColumnType("nvarchar(50)"); + + b1.Property("Cpu") + .HasColumnType("float"); + + b1.Property("Memory") + .HasColumnType("float"); + + b1.HasKey("UpdateJobId"); + + b1.ToTable("UpdateJobs"); + + b1.WithOwner() + .HasForeignKey("UpdateJobId"); + }); + + b.Navigation("Resources") + .IsRequired(); + }); +#pragma warning restore 612, 618 + } + } +} diff --git a/server/Tingle.Dependabot/Migrations/20230224045948_InitialCreate.cs b/server/Tingle.Dependabot/Migrations/20230224045948_InitialCreate.cs new file mode 100644 index 00000000..c53b23a6 --- /dev/null +++ b/server/Tingle.Dependabot/Migrations/20230224045948_InitialCreate.cs @@ -0,0 +1,132 @@ +using Microsoft.EntityFrameworkCore.Migrations; + +#nullable disable + +namespace Tingle.Dependabot.Migrations; + +/// +public partial class InitialCreate : Migration +{ + /// + protected override void Up(MigrationBuilder migrationBuilder) + { + migrationBuilder.CreateTable( + name: "DataProtectionKeys", + columns: table => new + { + Id = table.Column(type: "int", nullable: false) + .Annotation("SqlServer:Identity", "1, 1"), + FriendlyName = table.Column(type: "nvarchar(max)", nullable: true), + Xml = table.Column(type: "nvarchar(max)", nullable: true) + }, + constraints: table => + { + table.PrimaryKey("PK_DataProtectionKeys", x => x.Id); + }); + + migrationBuilder.CreateTable( + name: "Repositories", + columns: table => new + { + Id = table.Column(type: "nvarchar(50)", maxLength: 50, nullable: false), + Created = table.Column(type: "datetimeoffset", nullable: false), + Updated = table.Column(type: "datetimeoffset", nullable: false), + Name = table.Column(type: "nvarchar(max)", nullable: true), + Slug = table.Column(type: "nvarchar(max)", nullable: true), + ProviderId = table.Column(type: "nvarchar(450)", nullable: true), + LatestCommit = table.Column(type: "nvarchar(200)", maxLength: 200, nullable: true), + ConfigFileContents = table.Column(type: "nvarchar(max)", nullable: false), + SyncException = table.Column(type: "nvarchar(max)", nullable: true), + Updates = table.Column(type: "nvarchar(max)", nullable: false), + Etag = table.Column(type: "rowversion", rowVersion: true, nullable: true), + Registries = table.Column(type: "nvarchar(max)", nullable: true) + }, + constraints: table => + { + table.PrimaryKey("PK_Repositories", x => x.Id); + }); + + migrationBuilder.CreateTable( + name: "UpdateJobs", + columns: table => new + { + Id = table.Column(type: "nvarchar(50)", maxLength: 50, nullable: false), + Created = table.Column(type: "datetimeoffset", nullable: false), + Status = table.Column(type: "int", nullable: false), + Trigger = table.Column(type: "int", nullable: false), + RepositoryId = table.Column(type: "nvarchar(450)", nullable: false), + RepositorySlug = table.Column(type: "nvarchar(max)", nullable: false), + EventBusId = table.Column(type: "nvarchar(450)", nullable: true), + Commit = table.Column(type: "nvarchar(50)", maxLength: 50, nullable: true), + PackageEcosystem = table.Column(type: "int", nullable: false), + Directory = table.Column(type: "nvarchar(450)", nullable: false), + Resources_Cpu = table.Column(type: "float", nullable: false), + Resources_Memory = table.Column(type: "float", nullable: false), + AuthKey = table.Column(type: "nvarchar(450)", nullable: false), + Start = table.Column(type: "datetimeoffset", nullable: true), + End = table.Column(type: "datetimeoffset", nullable: true), + Duration = table.Column(type: "bigint", nullable: true), + Log = table.Column(type: "nvarchar(max)", nullable: true), + Etag = table.Column(type: "rowversion", rowVersion: true, nullable: true) + }, + constraints: table => + { + table.PrimaryKey("PK_UpdateJobs", x => x.Id); + }); + + migrationBuilder.CreateIndex( + name: "IX_Repositories_Created", + table: "Repositories", + column: "Created", + descending: new bool[0]); + + migrationBuilder.CreateIndex( + name: "IX_Repositories_ProviderId", + table: "Repositories", + column: "ProviderId", + unique: true, + filter: "[ProviderId] IS NOT NULL"); + + migrationBuilder.CreateIndex( + name: "IX_UpdateJobs_AuthKey", + table: "UpdateJobs", + column: "AuthKey", + unique: true); + + migrationBuilder.CreateIndex( + name: "IX_UpdateJobs_Created", + table: "UpdateJobs", + column: "Created", + descending: new bool[0]); + + migrationBuilder.CreateIndex( + name: "IX_UpdateJobs_PackageEcosystem_Directory", + table: "UpdateJobs", + columns: new[] { "PackageEcosystem", "Directory" }); + + migrationBuilder.CreateIndex( + name: "IX_UpdateJobs_PackageEcosystem_Directory_EventBusId", + table: "UpdateJobs", + columns: new[] { "PackageEcosystem", "Directory", "EventBusId" }, + unique: true, + filter: "[EventBusId] IS NOT NULL"); + + migrationBuilder.CreateIndex( + name: "IX_UpdateJobs_RepositoryId", + table: "UpdateJobs", + column: "RepositoryId"); + } + + /// + protected override void Down(MigrationBuilder migrationBuilder) + { + migrationBuilder.DropTable( + name: "DataProtectionKeys"); + + migrationBuilder.DropTable( + name: "Repositories"); + + migrationBuilder.DropTable( + name: "UpdateJobs"); + } +} diff --git a/server/Tingle.Dependabot/Migrations/MainDbContextModelSnapshot.cs b/server/Tingle.Dependabot/Migrations/MainDbContextModelSnapshot.cs new file mode 100644 index 00000000..38fee502 --- /dev/null +++ b/server/Tingle.Dependabot/Migrations/MainDbContextModelSnapshot.cs @@ -0,0 +1,271 @@ +// +using System; +using Microsoft.EntityFrameworkCore; +using Microsoft.EntityFrameworkCore.Infrastructure; +using Microsoft.EntityFrameworkCore.Metadata; +using Microsoft.EntityFrameworkCore.Storage.ValueConversion; +using Tingle.Dependabot.Models; + +#nullable disable + +namespace Tingle.Dependabot.Migrations +{ + [DbContext(typeof(MainDbContext))] + partial class MainDbContextModelSnapshot : ModelSnapshot + { + protected override void BuildModel(ModelBuilder modelBuilder) + { +#pragma warning disable 612, 618 + modelBuilder + .HasAnnotation("ProductVersion", "7.0.3") + .HasAnnotation("Relational:MaxIdentifierLength", 128); + + SqlServerModelBuilderExtensions.UseIdentityColumns(modelBuilder); + + modelBuilder.Entity("Microsoft.AspNetCore.DataProtection.EntityFrameworkCore.DataProtectionKey", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("int"); + + SqlServerPropertyBuilderExtensions.UseIdentityColumn(b.Property("Id")); + + b.Property("FriendlyName") + .HasColumnType("nvarchar(max)"); + + b.Property("Xml") + .HasColumnType("nvarchar(max)"); + + b.HasKey("Id"); + + b.ToTable("DataProtectionKeys"); + }); + + modelBuilder.Entity("Tingle.Dependabot.Models.Repository", b => + { + b.Property("Id") + .HasMaxLength(50) + .HasColumnType("nvarchar(50)"); + + b.Property("ConfigFileContents") + .IsRequired() + .HasColumnType("nvarchar(max)"); + + b.Property("Created") + .HasColumnType("datetimeoffset"); + + b.Property("Etag") + .IsConcurrencyToken() + .ValueGeneratedOnAddOrUpdate() + .HasColumnType("rowversion"); + + b.Property("LatestCommit") + .HasMaxLength(200) + .HasColumnType("nvarchar(200)"); + + b.Property("Name") + .HasColumnType("nvarchar(max)"); + + b.Property("ProviderId") + .HasColumnType("nvarchar(450)"); + + b.Property("Slug") + .HasColumnType("nvarchar(max)"); + + b.Property("SyncException") + .HasColumnType("nvarchar(max)"); + + b.Property("Updated") + .HasColumnType("datetimeoffset"); + + b.Property("Updates") + .IsRequired() + .HasColumnType("nvarchar(max)"); + + b.HasKey("Id"); + + b.HasIndex("Created") + .IsDescending(); + + b.HasIndex("ProviderId") + .IsUnique() + .HasFilter("[ProviderId] IS NOT NULL"); + + b.ToTable("Repositories"); + }); + + modelBuilder.Entity("Tingle.Dependabot.Models.UpdateJob", b => + { + b.Property("Id") + .HasMaxLength(50) + .HasColumnType("nvarchar(50)"); + + b.Property("AuthKey") + .IsRequired() + .HasColumnType("nvarchar(450)"); + + b.Property("Commit") + .HasMaxLength(50) + .HasColumnType("nvarchar(50)"); + + b.Property("Created") + .HasColumnType("datetimeoffset"); + + b.Property("Directory") + .IsRequired() + .HasColumnType("nvarchar(450)"); + + b.Property("Duration") + .HasColumnType("bigint"); + + b.Property("End") + .HasColumnType("datetimeoffset"); + + b.Property("Etag") + .IsConcurrencyToken() + .ValueGeneratedOnAddOrUpdate() + .HasColumnType("rowversion"); + + b.Property("EventBusId") + .HasColumnType("nvarchar(450)"); + + b.Property("Log") + .HasColumnType("nvarchar(max)"); + + b.Property("PackageEcosystem") + .HasColumnType("int"); + + b.Property("RepositoryId") + .IsRequired() + .HasColumnType("nvarchar(450)"); + + b.Property("RepositorySlug") + .IsRequired() + .HasColumnType("nvarchar(max)"); + + b.Property("Start") + .HasColumnType("datetimeoffset"); + + b.Property("Status") + .HasColumnType("int"); + + b.Property("Trigger") + .HasColumnType("int"); + + b.HasKey("Id"); + + b.HasIndex("AuthKey") + .IsUnique(); + + b.HasIndex("Created") + .IsDescending(); + + b.HasIndex("RepositoryId"); + + b.HasIndex("PackageEcosystem", "Directory"); + + b.HasIndex("PackageEcosystem", "Directory", "EventBusId") + .IsUnique() + .HasFilter("[EventBusId] IS NOT NULL"); + + b.ToTable("UpdateJobs"); + }); + + modelBuilder.Entity("Tingle.Dependabot.Models.Repository", b => + { + b.OwnsMany("Tingle.Dependabot.Models.DependabotRegistry", "Registries", b1 => + { + b1.Property("RepositoryId") + .HasColumnType("nvarchar(50)"); + + b1.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("int"); + + b1.Property("AuthKey") + .HasColumnType("nvarchar(max)") + .HasAnnotation("Relational:JsonPropertyName", "auth-key"); + + b1.Property("Key") + .HasColumnType("nvarchar(max)") + .HasAnnotation("Relational:JsonPropertyName", "key"); + + b1.Property("Organization") + .HasColumnType("nvarchar(max)") + .HasAnnotation("Relational:JsonPropertyName", "organization"); + + b1.Property("Password") + .HasColumnType("nvarchar(max)") + .HasAnnotation("Relational:JsonPropertyName", "password"); + + b1.Property("PublicKeyFingerprint") + .HasColumnType("nvarchar(max)") + .HasAnnotation("Relational:JsonPropertyName", "public-key-fingerprint"); + + b1.Property("ReplacesBase") + .HasColumnType("bit") + .HasAnnotation("Relational:JsonPropertyName", "replaces-base"); + + b1.Property("Repo") + .HasColumnType("nvarchar(max)") + .HasAnnotation("Relational:JsonPropertyName", "repo"); + + b1.Property("Token") + .HasColumnType("nvarchar(max)") + .HasAnnotation("Relational:JsonPropertyName", "token"); + + b1.Property("Type") + .IsRequired() + .HasColumnType("nvarchar(max)") + .HasAnnotation("Relational:JsonPropertyName", "type"); + + b1.Property("Url") + .IsRequired() + .HasColumnType("nvarchar(max)") + .HasAnnotation("Relational:JsonPropertyName", "url"); + + b1.Property("Username") + .HasColumnType("nvarchar(max)") + .HasAnnotation("Relational:JsonPropertyName", "username"); + + b1.HasKey("RepositoryId", "Id"); + + b1.ToTable("Repositories"); + + b1.ToJson("Registries"); + + b1.WithOwner() + .HasForeignKey("RepositoryId"); + }); + + b.Navigation("Registries"); + }); + + modelBuilder.Entity("Tingle.Dependabot.Models.UpdateJob", b => + { + b.OwnsOne("Tingle.Dependabot.Models.UpdateJobResources", "Resources", b1 => + { + b1.Property("UpdateJobId") + .HasColumnType("nvarchar(50)"); + + b1.Property("Cpu") + .HasColumnType("float"); + + b1.Property("Memory") + .HasColumnType("float"); + + b1.HasKey("UpdateJobId"); + + b1.ToTable("UpdateJobs"); + + b1.WithOwner() + .HasForeignKey("UpdateJobId"); + }); + + b.Navigation("Resources") + .IsRequired(); + }); +#pragma warning restore 612, 618 + } + } +} diff --git a/server/Tingle.Dependabot/Models/DependabotConfiguration.cs b/server/Tingle.Dependabot/Models/DependabotConfiguration.cs new file mode 100644 index 00000000..fd1d9401 --- /dev/null +++ b/server/Tingle.Dependabot/Models/DependabotConfiguration.cs @@ -0,0 +1,202 @@ +using System.ComponentModel.DataAnnotations; +using System.Runtime.Serialization; +using System.Text.Json.Serialization; +using YamlDotNet.Serialization; + +namespace Tingle.Dependabot.Models; + +public class DependabotConfiguration +{ + [Required, AllowedValues(2)] + [JsonPropertyName("version")] + public int Version { get; set; } + + [Required, MinLength(1)] + [JsonPropertyName("updates")] + public List? Updates { get; set; } + + [JsonPropertyName("registries")] + public Dictionary? Registries { get; set; } +} + +public record DependabotUpdate +{ + /// Ecosystem for the update. + [Required] + [JsonPropertyName("package-ecosystem")] + public DependabotPackageEcosystem? PackageEcosystem { get; set; } + + [Required] + [JsonPropertyName("directory")] + public string? Directory { get; set; } + + [Required] + [JsonPropertyName("schedule")] + public DependabotUpdateSchedule? Schedule { get; set; } + + [Required] + [JsonPropertyName("open-pull-requests-limit")] + public int? OpenPullRequestsLimit { get; set; } = 5; + + [JsonPropertyName("allow")] + public List? Allow { get; set; } + [JsonPropertyName("labels")] + public List? Labels { get; set; } + [JsonPropertyName("milestone")] + public int? Milestone { get; set; } = null; + [JsonPropertyName("pull-request-branch-name")] + public DependabotPullRequestBranchName? PullRequestBranchName { get; set; } + [JsonPropertyName("rebase-strategy")] + public DependabotRebaseStrategy RebaseStrategy { get; set; } = DependabotRebaseStrategy.Auto; + [JsonPropertyName("insecure-external-code-execution")] + public DependabotInsecureExternalCodeExecution? InsecureExternalCodeExecution { get; set; } + [JsonPropertyName("target-branch")] + public string? TargetBranch { get; set; } + [JsonPropertyName("vendor")] + public bool Vendor { get; set; } = false; + [JsonPropertyName("versioning-strategy")] + public DependabotVersioningStrategy VersioningStrategy { get; set; } = DependabotVersioningStrategy.Auto; +} + +public class DependabotUpdateSchedule +{ + [Required] + [JsonPropertyName("interval")] + public DependabotScheduleInterval? Interval { get; set; } + + [Required] + [JsonPropertyName("time")] + public TimeOnly? Time { get; set; } = new TimeOnly(2, 0); + + [Required] + [JsonPropertyName("day")] + public DependabotScheduleDay? Day { get; set; } = DependabotScheduleDay.Monday; + + [Required, TimeZone] + [JsonPropertyName("timezone")] + public string Timezone { get; set; } = "Etc/UTC"; + + /// Generate the appropriate CRON schedule. + public string GenerateCron() + { + // format to use: + // minute, hour, day of month, month, day of week + + var time = Time ?? throw new InvalidOperationException($"'{nameof(Time)}' cannot be null at this point"); + var day = Day ?? throw new InvalidOperationException($"'{nameof(Day)}' cannot be null at this point"); + return $"{time:mm} {time:HH} " + Interval switch + { + DependabotScheduleInterval.Daily => "* * 1-5", // any day of the month, any month, but on weekdays + DependabotScheduleInterval.Weekly => $"* * {(int)day}", // any day of the month, any month, but on a given day + DependabotScheduleInterval.Monthly => "1 * *", // first day of the month, any month, any day of the week + _ => throw new NotImplementedException(), + }; + } +} + +public class DependabotAllowDependency +{ + [JsonPropertyName("dependency-name")] + public string? DependencyName { get; set; } + [JsonPropertyName("dependency-type")] + public DependabotDependencyType? DependencyType { get; set; } + + public bool IsValid() => DependencyName is not null || DependencyType is not null; +} + +public class DependabotPullRequestBranchName +{ + [Required] + [AllowedValues("-", "_", "/")] + [JsonPropertyName("separator")] + public string? Separator { get; set; } +} + +public class DependabotRegistry +{ + [Required] + [JsonPropertyName("type")] + public string? Type { get; set; } + + [Required, Url] + [JsonPropertyName("url")] + public string? Url { get; set; } + + [JsonPropertyName("username")] + public string? Username { get; set; } + + [DataType(DataType.Password)] + [JsonPropertyName("password")] + public string? Password { get; set; } + + [JsonPropertyName("key")] + [DataType(DataType.Password)] + public string? Key { get; set; } + + [JsonPropertyName("token")] + [DataType(DataType.Password)] + public string? Token { get; set; } + + [JsonPropertyName("replaces-base")] + public bool? ReplacesBase { get; set; } // keep nullable to prevent issues with database context + + [JsonPropertyName("organization")] + public string? Organization { get; set; } + [JsonPropertyName("repo")] + public string? Repo { get; set; } + [JsonPropertyName("auth-key")] + public string? AuthKey { get; set; } + [JsonPropertyName("public-key-fingerprint")] + public string? PublicKeyFingerprint { get; set; } +} + +public enum DependabotPackageEcosystem +{ + Bundler, + Cargo, + Composer, + Docker, + Elixir, + Elm, + + [EnumMember(Value = "gitsubmodule")] + [YamlMember(Alias = "gitsubmodule")] + GitSubmodule, + + [EnumMember(Value = "github-actions")] + [YamlMember(Alias = "github-actions")] + GithubActions, + + [EnumMember(Value = "gomod")] + [YamlMember(Alias = "gomod")] + GoModules, + + Gradle, + Maven, + Mix, + Npm, + NuGet, + Pip, + Terraform, +} + +public enum DependabotScheduleInterval { Daily, Weekly, Monthly, } +public enum DependabotScheduleDay { Sunday, Monday, Tuesday, Wednesday, Thursday, Friday, Saturday, } +public enum DependabotDependencyType { Direct, All, Production, Development, } +public enum DependabotRebaseStrategy { Disabled, Auto, } +public enum DependabotInsecureExternalCodeExecution { Allow, Deny, } + +public enum DependabotVersioningStrategy +{ + Auto, + Widen, + Increase, + + [EnumMember(Value = "lock-file-only")] + [YamlMember(Alias = "lock-file-only")] + LockFileOnly, + + [EnumMember(Value = "increase-if-necessary")] + [YamlMember(Alias = "increase-if-necessary")] + IncreaseIfNecessary, +} diff --git a/server/Tingle.Dependabot/Models/MainDbContext.cs b/server/Tingle.Dependabot/Models/MainDbContext.cs new file mode 100644 index 00000000..1718bc27 --- /dev/null +++ b/server/Tingle.Dependabot/Models/MainDbContext.cs @@ -0,0 +1,40 @@ +using Microsoft.AspNetCore.DataProtection.EntityFrameworkCore; +using Microsoft.EntityFrameworkCore; + +namespace Tingle.Dependabot.Models; + +public class MainDbContext : DbContext, IDataProtectionKeyContext +{ + public MainDbContext(DbContextOptions options) : base(options) { } + + public DbSet Repositories => Set(); + public DbSet UpdateJobs => Set(); + + public DbSet DataProtectionKeys => Set(); + + protected override void OnModelCreating(ModelBuilder modelBuilder) + { + base.OnModelCreating(modelBuilder); + + modelBuilder.Entity(b => + { + b.HasIndex(r => r.Created).IsDescending(); // faster filtering + b.HasIndex(r => r.ProviderId).IsUnique(); + + b.Property(r => r.Updates).HasJsonConversion(); + b.OwnsMany(r => r.Registries).ToJson(); + }); + + modelBuilder.Entity(b => + { + b.HasIndex(j => j.Created).IsDescending(); // faster filtering + + b.HasIndex(j => j.RepositoryId); + b.HasIndex(j => new { j.PackageEcosystem, j.Directory, }); // faster filtering + b.HasIndex(j => new { j.PackageEcosystem, j.Directory, j.EventBusId, }).IsUnique(); + b.HasIndex(j => j.AuthKey).IsUnique(); + + b.OwnsOne(j => j.Resources); + }); + } +} diff --git a/server/Tingle.Dependabot/Models/MergeStrategy.cs b/server/Tingle.Dependabot/Models/MergeStrategy.cs new file mode 100644 index 00000000..0f7f97a0 --- /dev/null +++ b/server/Tingle.Dependabot/Models/MergeStrategy.cs @@ -0,0 +1,9 @@ +namespace Tingle.Dependabot.Models; + +public enum MergeStrategy +{ + NoFastForward = 0, + Rebase = 1, + RebaseMerge = 2, + Squash = 3, +} diff --git a/server/Tingle.Dependabot/Models/Repository.cs b/server/Tingle.Dependabot/Models/Repository.cs new file mode 100644 index 00000000..b4929d7c --- /dev/null +++ b/server/Tingle.Dependabot/Models/Repository.cs @@ -0,0 +1,55 @@ +using System.ComponentModel.DataAnnotations; +using System.Text.Json.Serialization; + +namespace Tingle.Dependabot.Models; + +public class Repository +{ + [Key, MaxLength(50)] + public string? Id { get; set; } + + public DateTimeOffset Created { get; set; } + + public DateTimeOffset Updated { get; set; } + + /// Name of the repository as per provider. + public string? Name { get; set; } + public string? Slug { get; set; } + + /// Identifier of the repository as per provider. + [JsonIgnore] // only for internal use + public string? ProviderId { get; set; } + + /// + /// Latest commit SHA synchronized for the configuration file. + /// + [MaxLength(200)] + public string? LatestCommit { get; set; } + + /// Contents of the configuration file as of . + [Required] + [JsonIgnore] // only for internal use + public string? ConfigFileContents { get; set; } + + /// + /// Exception that encountered, if any, when parsing the configuration file. + /// This is populated when updates is null or empty. + /// + public string? SyncException { get; set; } + + /// + /// Updates for the repository, extracted from the configuration file. + /// When null or empty, there was a parsing exception. + /// + public IList Updates { get; set; } = new List(); + + /// + /// Registries for the repository, extracted from the configuration file. + /// When null or empty, there was a parsing exception. + /// + [JsonIgnore] // only for internal use + public List Registries { get; set; } = new List(); + + [Timestamp] + public byte[]? Etag { get; set; } +} diff --git a/server/Tingle.Dependabot/Models/RepositoryUpdate.cs b/server/Tingle.Dependabot/Models/RepositoryUpdate.cs new file mode 100644 index 00000000..6beaa09d --- /dev/null +++ b/server/Tingle.Dependabot/Models/RepositoryUpdate.cs @@ -0,0 +1,26 @@ +using System.Text.Json.Serialization; + +namespace Tingle.Dependabot.Models; + +public record RepositoryUpdate : DependabotUpdate +{ + public RepositoryUpdate() { } // required for deserialization + + public RepositoryUpdate(DependabotUpdate update) : base(update) { } + + /// The dependency files. + [JsonPropertyName("files")] + public List Files { get; set; } = new List(); + + /// Identifier of the latest job. + [JsonPropertyName("latest-job-id")] + public string? LatestJobId { get; set; } + + /// Status of the latest job. + [JsonPropertyName("latest-job-status")] + public UpdateJobStatus? LatestJobStatus { get; set; } + + /// Time at which the latest update was run. + [JsonPropertyName("latest-update")] + public DateTimeOffset? LatestUpdate { get; set; } +} diff --git a/server/Tingle.Dependabot/Models/SynchronizationRequest.cs b/server/Tingle.Dependabot/Models/SynchronizationRequest.cs new file mode 100644 index 00000000..e9883b6a --- /dev/null +++ b/server/Tingle.Dependabot/Models/SynchronizationRequest.cs @@ -0,0 +1,12 @@ +namespace Tingle.Dependabot.Models; + +/// +/// Represents a model for processing a synchronization request +/// +public class SynchronizationRequest +{ + /// + /// Indicates whether we should trigger the update jobs where changes have been detected. + /// + public bool Trigger { get; set; } +} diff --git a/server/Tingle.Dependabot/Models/TriggerUpdateRequest.cs b/server/Tingle.Dependabot/Models/TriggerUpdateRequest.cs new file mode 100644 index 00000000..3faa5932 --- /dev/null +++ b/server/Tingle.Dependabot/Models/TriggerUpdateRequest.cs @@ -0,0 +1,15 @@ +using System.ComponentModel.DataAnnotations; + +namespace Tingle.Dependabot.Models; + +/// +/// Represents a model for triggering an update job. +/// +public class TriggerUpdateRequest +{ + /// + /// Index of the repository update. + /// + [Required] + public int? Id { get; set; } +} diff --git a/server/Tingle.Dependabot/Models/UpdateJob.cs b/server/Tingle.Dependabot/Models/UpdateJob.cs new file mode 100644 index 00000000..11ad8bf8 --- /dev/null +++ b/server/Tingle.Dependabot/Models/UpdateJob.cs @@ -0,0 +1,76 @@ +using System.ComponentModel.DataAnnotations; +using System.Text.Json.Serialization; + +namespace Tingle.Dependabot.Models; + +// This class independent of one-to-many relationships for detached and prolonged tracking. +// The records are cleaned up on a schedule. +public class UpdateJob +{ + [Key, MaxLength(50)] + public string? Id { get; set; } + + public DateTimeOffset Created { get; set; } + + /// Status of the update job. + public UpdateJobStatus Status { get; set; } + + /// Trigger for the update job. + public UpdateJobTrigger Trigger { get; set; } + + /// Identifier of the repository. + [Required] + [JsonIgnore] // only for internal use + public string? RepositoryId { get; set; } + + /// Slug of the repository. + [Required] + [JsonIgnore] // only for internal use + public string? RepositorySlug { get; set; } + + /// Identifier of the event on the EventBus, if any. + [JsonIgnore] // only for internal use + public string? EventBusId { get; set; } + + /// + /// Commit SHA of the configuration file used for the update. + /// + /// 1dabbdfa71465a6eb6c0b44be9f3f6461b4b35e2 + [MaxLength(50)] + public string? Commit { get; set; } + + /// Ecosystem for the update. + [JsonIgnore] // only for internal use + public DependabotPackageEcosystem PackageEcosystem { get; set; } + + /// Identifier of the repository update. + [Required] + public string? Directory { get; set; } + + /// Resources provisioned for the update. + [Required] + public UpdateJobResources? Resources { get; set; } + + /// + /// Authorization key for the job. + /// Used by the updater to make API calls. + /// + [Required] + [JsonIgnore] // only for internal use + public string? AuthKey { get; set; } + + /// When the job started. + public DateTimeOffset? Start { get; set; } + + /// When the job ended. + public DateTimeOffset? End { get; set; } + + /// Duration in milliseconds. + public long? Duration { get; set; } + + /// Detailed log output. + public string? Log { get; set; } + + [Timestamp] + public byte[]? Etag { get; set; } +} diff --git a/server/Tingle.Dependabot/Models/UpdateJobResources.cs b/server/Tingle.Dependabot/Models/UpdateJobResources.cs new file mode 100644 index 00000000..dd8ca2e9 --- /dev/null +++ b/server/Tingle.Dependabot/Models/UpdateJobResources.cs @@ -0,0 +1,48 @@ +using Azure.ResourceManager.ContainerInstance.Models; +using System.ComponentModel.DataAnnotations; + +namespace Tingle.Dependabot.Models; + +public class UpdateJobResources +{ + public UpdateJobResources() { } // required for deserialization + + public UpdateJobResources(double cpu, double memory) + { + // multiplication by 100 to avoid the approximation + if (memory * 100 % (0.1 * 100) != 0) + { + throw new ArgumentException("The memory requirement should be in increments of 0.1.", nameof(memory)); + } + + Cpu = cpu; + Memory = memory; + } + + /// CPU units provisioned. + /// 0.25 + [Required] + public double Cpu { get; set; } + + /// Memory provisioned in GB. + /// 1.2 + [Required] + public double Memory { get; set; } + + public static UpdateJobResources FromEcosystem(DependabotPackageEcosystem ecosystem) + { + return ecosystem switch + { + DependabotPackageEcosystem.NuGet => new(cpu: 0.25, memory: 0.2), + DependabotPackageEcosystem.GitSubmodule => new(cpu: 0.1, memory: 0.2), + DependabotPackageEcosystem.Terraform => new(cpu: 0.25, memory: 1), + DependabotPackageEcosystem.Npm => new(cpu: 0.25, memory: 1), + _ => new UpdateJobResources(cpu: 0.25, memory: 0.5), + }; + } + + public static implicit operator ContainerResourceRequestsContent(UpdateJobResources resources) + { + return new(memoryInGB: resources.Memory, cpu: resources.Cpu); + } +} diff --git a/server/Tingle.Dependabot/Models/UpdateJobResponse.cs b/server/Tingle.Dependabot/Models/UpdateJobResponse.cs new file mode 100644 index 00000000..ebd05dd0 --- /dev/null +++ b/server/Tingle.Dependabot/Models/UpdateJobResponse.cs @@ -0,0 +1,82 @@ +using System.Text.Json.Serialization; + +namespace Tingle.Dependabot.Models; + +public sealed record UpdateJobResponse(UpdateJobData Data); +public sealed record UpdateJobData(UpdateJobAttributes Attributes); + +public sealed record UpdateJobAttributes() +{ + public UpdateJobAttributes(UpdateJob job) : this() + { + } + + [JsonPropertyName("allowed-updates")] + public required IEnumerable AllowedUpdates { get; set; } + + [JsonPropertyName("credentials-metadata")] + public required IEnumerable CredentialsMetadata { get; set; } + + [JsonPropertyName("dependencies")] + public required IEnumerable Dependencies { get; set; } + + [JsonPropertyName("directory")] + public required string Directory { get; set; } + + [JsonPropertyName("existing-pull-requests")] + public required IEnumerable ExistingPullRequests { get; set; } + + [JsonPropertyName("ignore-conditions")] + public required IEnumerable IgnoreConditions { get; set; } + + [JsonPropertyName("security-advisories")] + public required IEnumerable SecurityAdvisories { get; set; } + + [JsonPropertyName("package_manager")] + public required DependabotPackageEcosystem PackageManager { get; set; } + + [JsonPropertyName("repo-name")] + public required string RepoName { get; set; } + + [JsonPropertyName("source")] + public required UpdateJobAttributesSource Source { get; set; } + + [JsonPropertyName("lockfile-only")] + public bool? LockfileOnly { get; set; } + + [JsonPropertyName("requirements-update-strategy")] + public string? RequirementsUpdateStrategy { get; set; } + + [JsonPropertyName("update-subdependencies")] + public bool? UpdateSubdependencies { get; set; } + + [JsonPropertyName("updating-a-pull-request")] + public bool? UpdatingAPullRequest { get; set; } + + [JsonPropertyName("vendor-dependencies")] + public bool? VendorDependencies { get; set; } + + [JsonPropertyName("security-updates-only")] + public bool? SecurityUpdatesOnly { get; set; } +} + +public sealed record UpdateJobAttributesSource() +{ + [JsonPropertyName("provider")] + public required string Provider { get; set; } + + [JsonPropertyName("repo")] + public required string Repo { get; set; } + + [JsonPropertyName("directory")] + public required string Directory { get; set; } + + [JsonPropertyName("branch")] + public string? Branch { get; set; } + + [JsonPropertyName("hostname")] + public string? Hostname { get; set; } + + [JsonPropertyName("api-endpoint")] + public string? ApiEndpoint { get; set; } +} diff --git a/server/Tingle.Dependabot/Models/UpdateJobStatus.cs b/server/Tingle.Dependabot/Models/UpdateJobStatus.cs new file mode 100644 index 00000000..17ab6c3c --- /dev/null +++ b/server/Tingle.Dependabot/Models/UpdateJobStatus.cs @@ -0,0 +1,9 @@ +namespace Tingle.Dependabot.Models; + +public enum UpdateJobStatus +{ + Scheduled = 0, + Running = 1, + Succeeded = 2, + Failed = 3, +} diff --git a/server/Tingle.Dependabot/Models/UpdateJobTrigger.cs b/server/Tingle.Dependabot/Models/UpdateJobTrigger.cs new file mode 100644 index 00000000..84f6eaa6 --- /dev/null +++ b/server/Tingle.Dependabot/Models/UpdateJobTrigger.cs @@ -0,0 +1,15 @@ +using System.Runtime.Serialization; + +namespace Tingle.Dependabot.Models; + +public enum UpdateJobTrigger +{ + Scheduled = 0, + + [EnumMember(Value = "missed_schedule")] + MissedSchedule = 1, + + Synchronization = 2, + + Manual = 3, +} diff --git a/server/Tingle.Dependabot/Program.cs b/server/Tingle.Dependabot/Program.cs new file mode 100644 index 00000000..17dded7b --- /dev/null +++ b/server/Tingle.Dependabot/Program.cs @@ -0,0 +1,309 @@ +using AspNetCore.Authentication.ApiKey; +using AspNetCore.Authentication.Basic; +using Microsoft.AspNetCore.DataProtection; +using Microsoft.AspNetCore.Diagnostics.HealthChecks; +using Microsoft.AspNetCore.Mvc; +using Microsoft.EntityFrameworkCore; +using MiniValidation; +using System.ComponentModel.DataAnnotations; +using System.Text.Json; +using Tingle.Dependabot; +using Tingle.Dependabot.Consumers; +using Tingle.Dependabot.Events; +using Tingle.Dependabot.Models; +using Tingle.Dependabot.Workflow; +using Tingle.EventBus; + +var builder = WebApplication.CreateBuilder(args); + +builder.Services.AddApplicationInsightsTelemetry(builder.Configuration); + +// Add DbContext +builder.Services.AddDbContext(options => +{ + options.UseSqlServer(builder.Configuration.GetConnectionString("Sql"), options => options.EnableRetryOnFailure()); + options.EnableDetailedErrors(); +}); +// restore this once the we no longer pull schedules from DB on startup +//builder.Services.AddDatabaseMigrator(); + +builder.Services.AddDatabaseDeveloperPageExceptionFilter(); + +// Add data protection +builder.Services.AddDataProtection().PersistKeysToDbContext(); + + +// Configure any generated URL to be in lower case +builder.Services.Configure(options => options.LowercaseUrls = true); + +builder.Services.AddAuthentication() + .AddJwtBearer(AuthConstants.SchemeNameManagement) + .AddApiKeyInAuthorizationHeader(AuthConstants.SchemeNameUpdater, options => options.Realm = "Dependabot") + .AddBasic(AuthConstants.SchemeNameServiceHooks, options => options.Realm = "Dependabot"); + +builder.Services.AddAuthorization(options => +{ + options.AddPolicy(AuthConstants.PolicyNameManagement, policy => + { + policy.AddAuthenticationSchemes(AuthConstants.SchemeNameManagement) + .RequireAuthenticatedUser(); + }); + + options.AddPolicy(AuthConstants.PolicyNameServiceHooks, policy => + { + policy.AddAuthenticationSchemes(AuthConstants.SchemeNameServiceHooks) + .RequireAuthenticatedUser(); + }); + + options.AddPolicy(AuthConstants.PolicyNameUpdater, policy => + { + policy.AddAuthenticationSchemes(AuthConstants.SchemeNameUpdater) + .RequireAuthenticatedUser(); + }); +}); + +builder.Services.AddMemoryCache(); +builder.Services.AddDistributedMemoryCache(); + +// Configure other services +builder.Services.ConfigureHttpJsonOptions(options => +{ + options.SerializerOptions.AllowTrailingCommas = true; + options.SerializerOptions.ReadCommentHandling = JsonCommentHandling.Skip; + + options.SerializerOptions.Converters.Add( + new Tingle.Extensions.Json.JsonStringEnumMemberConverter( + namingPolicy: options.SerializerOptions.PropertyNamingPolicy, + allowIntegerValues: true)); +}); +builder.Services.AddNotificationsHandler(); +builder.Services.AddWorkflowServices(builder.Configuration.GetSection("Workflow")); + +// Add event bus +var selectedTransport = builder.Configuration.GetValue("EventBus:SelectedTransport"); +builder.Services.AddEventBus(builder => +{ + // Setup consumers + builder.AddConsumer(); + builder.AddConsumer(); + builder.AddConsumer(); + builder.AddConsumer(); + + // Setup transports + var credential = new Azure.Identity.DefaultAzureCredential(); + if (selectedTransport is EventBusTransportKind.ServiceBus) + { + builder.AddAzureServiceBusTransport( + options => ((AzureServiceBusTransportCredentials)options.Credentials).TokenCredential = credential); + } + else if (selectedTransport is EventBusTransportKind.InMemory) + { + builder.AddInMemoryTransport(); + } +}); + +// Add health checks +builder.Services.AddHealthChecks() + .AddDbContextCheck(); + +var app = builder.Build(); + +app.UseRouting(); + +app.UseAuthentication(); +app.UseAuthorization(); + +app.MapHealthChecks("/health"); +app.MapHealthChecks("/liveness", new HealthCheckOptions { Predicate = _ => false, }); +app.MapWebhooks(); +app.MapManagementApi(); +app.MapUpdateJobsApi(); + +// setup the application environment +await AppSetup.SetupAsync(app); + +await app.RunAsync(); + +internal enum EventBusTransportKind { InMemory, ServiceBus, } + +internal static class AuthConstants +{ + // These values are fixed strings due to configuration sections + internal const string SchemeNameManagement = "Management"; + internal const string SchemeNameServiceHooks = "ServiceHooks"; + internal const string SchemeNameUpdater = "Updater"; + + internal const string PolicyNameManagement = "Management"; + internal const string PolicyNameServiceHooks = "ServiceHooks"; + internal const string PolicyNameUpdater = "Updater"; +} + +internal static class ApplicationExtensions +{ + public static IServiceCollection AddNotificationsHandler(this IServiceCollection services) + { + services.AddScoped(); + return services; + } + + public static IServiceCollection AddWorkflowServices(this IServiceCollection services, IConfiguration configuration) + { + services.Configure(configuration); + services.ConfigureOptions(); + + services.AddSingleton(); + services.AddSingleton(); + + services.AddScoped(); + services.AddScoped(); + services.AddHostedService(); + + return services; + } + + public static IEndpointConventionBuilder MapWebhooks(this IEndpointRouteBuilder builder) + { + var endpoint = builder.MapPost("/webhooks/azure", async (AzureDevOpsEventHandler handler, [FromBody] AzureDevOpsEvent model) => + { + if (!MiniValidator.TryValidate(model, out var errors)) return Results.ValidationProblem(errors); + + await handler.HandleAsync(model); + return Results.Ok(); + }); + + endpoint.RequireAuthorization(AuthConstants.PolicyNameServiceHooks); + + return endpoint; + } + + public static IEndpointRouteBuilder MapManagementApi(this IEndpointRouteBuilder builder) + { + var group = builder.MapGroup(""); + group.RequireAuthorization(AuthConstants.PolicyNameManagement); + + group.MapPost("/sync", async (IEventPublisher publisher, [FromBody] SynchronizationRequest model) => + { + // request synchronization of the project + var evt = new ProcessSynchronization(model.Trigger); + await publisher.PublishAsync(evt); + + return Results.Ok(); + }); + + group.MapPost("/webhooks/register/azure", async (AzureDevOpsProvider adoProvider) => + { + await adoProvider.CreateOrUpdateSubscriptionsAsync(); + return Results.Ok(); + }); + + group.MapGet("repos", async (MainDbContext dbContext) => Results.Ok(await dbContext.Repositories.ToListAsync())); + group.MapGet("repos/{id}", async (MainDbContext dbContext, [FromRoute, Required] string id) => Results.Ok(await dbContext.Repositories.SingleOrDefaultAsync(r => r.Id == id))); + group.MapPost("repos/{id}/sync", async (IEventPublisher publisher, MainDbContext dbContext, [FromRoute, Required] string id, [FromBody] SynchronizationRequest model) => + { + if (!MiniValidator.TryValidate(model, out var errors)) return Results.ValidationProblem(errors); + + // ensure repository exists + var repository = await dbContext.Repositories.SingleOrDefaultAsync(r => r.Id == id); + if (repository is null) + { + return Results.Problem(title: "repository_not_found", statusCode: 400); + } + + // request synchronization of the repository + var evt = new ProcessSynchronization(model.Trigger, repositoryId: repository.Id, null); + await publisher.PublishAsync(evt); + + return Results.Ok(repository); + }); + group.MapGet("repos/{id}/jobs/{jobId}", async (MainDbContext dbContext, [FromRoute, Required] string id, [FromRoute, Required] string jobId) => + { + // ensure repository exists + var repository = await dbContext.Repositories.SingleOrDefaultAsync(r => r.Id == id); + if (repository is null) + { + return Results.Problem(title: "repository_not_found", statusCode: 400); + } + + // find the job + var job = dbContext.UpdateJobs.Where(j => j.RepositoryId == repository.Id && j.Id == jobId).SingleOrDefaultAsync(); + return Results.Ok(job); + }); + group.MapPost("repos/{id}/trigger", async (IEventPublisher publisher, MainDbContext dbContext, [FromRoute, Required] string id, [FromBody] TriggerUpdateRequest model) => + { + if (!MiniValidator.TryValidate(model, out var errors)) return Results.ValidationProblem(errors); + + // ensure repository exists + var repository = await dbContext.Repositories.SingleOrDefaultAsync(r => r.Id == id); + if (repository is null) + { + return Results.Problem(title: "repository_not_found", statusCode: 400); + } + + // ensure the repository update exists + var update = repository.Updates.ElementAtOrDefault(model.Id!.Value); + if (update is null) + { + return Results.Problem(title: "repository_update_not_found", statusCode: 400); + } + + // trigger update for specific update + var evt = new TriggerUpdateJobsEvent + { + RepositoryId = repository.Id, + RepositoryUpdateId = model.Id.Value, + Trigger = UpdateJobTrigger.Manual, + }; + await publisher.PublishAsync(evt); + + return Results.Ok(repository); + }); + + return builder; + } + + public static IEndpointRouteBuilder MapUpdateJobsApi(this IEndpointRouteBuilder builder) + { + var group = builder.MapGroup("update_jobs"); + group.RequireAuthorization(AuthConstants.PolicyNameUpdater); + + // TODO: create endpoints accessed by the updater during execution similar to the one hosted by GitHub + + //group.MapGet("/{id}", async (MainDbContext dbContext, [FromRoute, Required] string id) => + //{ + // var job = await dbContext.UpdateJobs.SingleAsync(p => p.Id == id); + + // var attr = new UpdateJobAttributes(job) + // { + // AllowedUpdates = Array.Empty(), + // CredentialsMetadata = Array.Empty(), + // Dependencies = Array.Empty(), + // Directory = job.Directory!, + // ExistingPullRequests = Array.Empty(), + // IgnoreConditions = Array.Empty(), + // PackageManager = job.PackageEcosystem, + // RepoName = job.RepositorySlug!, + // SecurityAdvisories = Array.Empty(), + // Source = new UpdateJobAttributesSource + // { + // Directory = job.Directory!, + // Provider = "azure", + // Repo = job.RepositorySlug!, + // Branch = job.Branch, + // Hostname = , + // ApiEndpoint =, + // }, + // }; + // return Results.Ok(new UpdateJobResponse(new(attr))); + //}); + + //group.MapPost("/{id}/create_pull_request", async (MainDbContext dbContext, [FromRoute, Required] string id, [FromBody] CreatePullRequestModel model) => { }); + //group.MapPost("/{id}/update_pull_request", async (MainDbContext dbContext, [FromRoute, Required] string id, [FromBody] UpdatePullRequestModel model) => { }); + //group.MapPost("/{id}/close_pull_request", async (MainDbContext dbContext, [FromRoute, Required] string id, [FromBody] ClosePullRequestModel model) => { }); + //group.MapPost("/{id}/record_update_job_error", async (MainDbContext dbContext, [FromRoute, Required] string id, [FromBody] RecordUpdateJobErrorModel model) => { }); + //group.MapPatch("/{id}/mark_as_processed", async (MainDbContext dbContext, [FromRoute, Required] string id, [FromBody] MarkAsProcessedModel model) => { }); + //group.MapPost("/{id}/update_dependency_list", async (MainDbContext dbContext, [FromRoute, Required] string id, [FromBody] UpdateDependencyListModel model) => { }); + //group.MapPost("/{id}/record_package_manager_version", async (MainDbContext dbContext, [FromRoute, Required] string id, [FromBody] RecordPackageManagerVersionModel model) => { }); + + return builder; + } +} diff --git a/server/Tingle.Dependabot/Properties/launchSettings.json b/server/Tingle.Dependabot/Properties/launchSettings.json new file mode 100644 index 00000000..4617c8a6 --- /dev/null +++ b/server/Tingle.Dependabot/Properties/launchSettings.json @@ -0,0 +1,22 @@ +{ + "$schema": "http://json.schemastore.org/launchsettings.json", + "profiles": { + "Tingle.Dependabot": { + "commandName": "Project", + "launchBrowser": true, + "launchUrl": "health", + "environmentVariables": { + "ASPNETCORE_ENVIRONMENT": "Development", + "EFCORE_PERFORM_MIGRATIONS": "true" + }, + "applicationUrl": "https://localhost:44390;http://localhost:59269" + }, + "Docker": { + "commandName": "Docker", + "launchBrowser": true, + "launchUrl": "{Scheme}://{ServiceHost}:{ServicePort}/health", + "publishAllPorts": true, + "useSSL": true + } + } +} \ No newline at end of file diff --git a/server/Tingle.Dependabot/Tingle.Dependabot.csproj b/server/Tingle.Dependabot/Tingle.Dependabot.csproj new file mode 100644 index 00000000..ea151f82 --- /dev/null +++ b/server/Tingle.Dependabot/Tingle.Dependabot.csproj @@ -0,0 +1,47 @@ + + + + 11.0 + enable + enable + net7.0 + true + true + $(NoWarn);1591;CA1819;CA1031 + $(GITVERSION_NUGETVERSION) + e58d698d-4791-43fc-8b76-ce1f01cbd092 + Linux + ..\.. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/server/Tingle.Dependabot/Workflow/AzureDevOpsProjectUrl.cs b/server/Tingle.Dependabot/Workflow/AzureDevOpsProjectUrl.cs new file mode 100644 index 00000000..9f555150 --- /dev/null +++ b/server/Tingle.Dependabot/Workflow/AzureDevOpsProjectUrl.cs @@ -0,0 +1,105 @@ +using System.ComponentModel; +using System.Globalization; + +namespace Tingle.Dependabot.Workflow; + +/// Easier manager and parser for URLs of projects on Azure DevOps. +[TypeConverter(typeof(AzureDevOpsProjectUrlTypeConverter))] +public readonly struct AzureDevOpsProjectUrl : IEquatable +{ + private readonly Uri uri; // helps with case slash matching as compared to a plain string + + public AzureDevOpsProjectUrl(string value) : this(new Uri(value)) { } + + public AzureDevOpsProjectUrl(Uri uri) + { + this.uri = uri ?? throw new ArgumentNullException(nameof(uri)); + var host = Hostname = uri.Host; + + var builder = new UriBuilder(uri) { UserName = null, Password = null }; + if (string.Equals(host, "dev.azure.com", StringComparison.OrdinalIgnoreCase)) + { + OrganizationName = uri.AbsolutePath.Split("/")[1]; + builder.Path = OrganizationName + "/"; + ProjectIdOrName = uri.AbsolutePath.Replace("_apis/projects/", "").Split("/")[2]; + } + else if (host.EndsWith("visualstudio.com", StringComparison.OrdinalIgnoreCase)) + { + OrganizationName = host.Split(".")[0]; + builder.Path = string.Empty; + ProjectIdOrName = uri.AbsolutePath.Replace("_apis/projects/", "").Split("/")[1]; + } + else throw new ArgumentException($"Error parsing: '{uri}' into components"); + + OrganizationUrl = builder.Uri.ToString(); + UsesProjectId = Guid.TryParse(ProjectIdOrName, out _); // Azure uses GUID for identifiers + } + + public static AzureDevOpsProjectUrl Create(string hostname, string organizationName, string projectIdOrName) + { + var builder = new UriBuilder(Uri.UriSchemeHttps, hostname); + if (string.Equals(hostname, "dev.azure.com", StringComparison.OrdinalIgnoreCase)) + { + builder.Path = organizationName + "/" + projectIdOrName; + } + else if (hostname.EndsWith("visualstudio.com", StringComparison.OrdinalIgnoreCase)) + { + builder.Path = "/" + projectIdOrName; + } + else throw new ArgumentException($"The hostname '{hostname}' cannot be used for creation."); + + return new(builder.Uri); + } + + public string Hostname { get; } + public string OrganizationName { get; } + public string OrganizationUrl { get; } + public string ProjectIdOrName { get; } + public bool UsesProjectId { get; } + + public string? ProjectId => UsesProjectId ? ProjectIdOrName : null; + public string? ProjectName => UsesProjectId ? null : ProjectIdOrName; + + public string MakeRepositorySlug(string name) => $"{OrganizationName}/{ProjectName}/_git/{name}"; + + public override string ToString() => uri.ToString(); + public override int GetHashCode() => uri.GetHashCode(); + public override bool Equals(object? obj) => obj is AzureDevOpsProjectUrl url && Equals(url); + public bool Equals(AzureDevOpsProjectUrl other) => uri == other.uri; + + public static bool operator ==(AzureDevOpsProjectUrl left, AzureDevOpsProjectUrl right) => left.Equals(right); + public static bool operator !=(AzureDevOpsProjectUrl left, AzureDevOpsProjectUrl right) => !(left == right); + + public static implicit operator AzureDevOpsProjectUrl(string value) => new(value); + public static implicit operator AzureDevOpsProjectUrl(Uri value) => new(value); + public static implicit operator string(AzureDevOpsProjectUrl url) => url.ToString(); + public static implicit operator Uri(AzureDevOpsProjectUrl url) => url.uri; + + private class AzureDevOpsProjectUrlTypeConverter : TypeConverter + { + /// + public override bool CanConvertFrom(ITypeDescriptorContext? context, Type sourceType) => sourceType == typeof(string) || sourceType == typeof(Uri); + + /// + public override bool CanConvertTo(ITypeDescriptorContext? context, Type? destinationType) => destinationType == typeof(string) || destinationType == typeof(Uri); + + /// + public override object? ConvertFrom(ITypeDescriptorContext? context, CultureInfo? culture, object value) + { + if (value is Uri u) return new AzureDevOpsProjectUrl(u); + else if (value is string s) return new AzureDevOpsProjectUrl(s); + return base.ConvertFrom(context, culture, value); + } + + /// + public override object? ConvertTo(ITypeDescriptorContext? context, CultureInfo? culture, object? value, Type destinationType) + { + if (value is AzureDevOpsProjectUrl u) + { + if (destinationType == typeof(Uri)) return u.uri; + else if (destinationType == typeof(string)) return u.ToString(); + } + return base.ConvertTo(context, culture, value, destinationType); + } + } +} diff --git a/server/Tingle.Dependabot/Workflow/AzureDevOpsProvider.cs b/server/Tingle.Dependabot/Workflow/AzureDevOpsProvider.cs new file mode 100644 index 00000000..d9367fac --- /dev/null +++ b/server/Tingle.Dependabot/Workflow/AzureDevOpsProvider.cs @@ -0,0 +1,227 @@ +using Microsoft.Extensions.Caching.Memory; +using Microsoft.Extensions.Options; +using Microsoft.TeamFoundation.Core.WebApi; +using Microsoft.TeamFoundation.SourceControl.WebApi; +using Microsoft.VisualStudio.Services.Common; +using Microsoft.VisualStudio.Services.FormInput; +using Microsoft.VisualStudio.Services.ServiceHooks.WebApi; +using Microsoft.VisualStudio.Services.WebApi; +using System.Security.Cryptography; +using System.Text; + +namespace Tingle.Dependabot.Workflow; + +internal class AzureDevOpsProvider +{ + private static readonly (string, string)[] SubscriptionEventTypes = + { + ("git.push", "1.0"), + ("git.pullrequest.updated", "1.0"), + ("git.pullrequest.merged", "1.0"), + ("ms.vss-code.git-pullrequest-comment-event", "2.0"), + }; + + private readonly IMemoryCache cache; + private readonly WorkflowOptions options; + + public AzureDevOpsProvider(IMemoryCache cache, IOptions optionsAccessor) + { + this.cache = cache ?? throw new ArgumentNullException(nameof(cache)); + options = optionsAccessor?.Value ?? throw new ArgumentNullException(nameof(optionsAccessor)); + } + + public async Task> CreateOrUpdateSubscriptionsAsync(CancellationToken cancellationToken = default) + { + // get a connection to Azure DevOps + var url = options.ProjectUrl!.Value; + var connection = CreateVssConnection(url, options.ProjectToken!); + + // get the projectId + var projectId = (await (await connection.GetClientAsync(cancellationToken)).GetProject(url.ProjectIdOrName)).Id.ToString(); + + // fetch the subscriptions + var client = await connection.GetClientAsync(cancellationToken); + var subscriptions = (await client.QuerySubscriptionsAsync(new SubscriptionsQuery + { + PublisherId = "tfs", + PublisherInputFilters = new List + { + new InputFilter + { + Conditions = new List + { + new InputFilterCondition + { + InputId = "projectId", + Operator = InputFilterOperator.Equals, + InputValue = projectId, + }, + }, + }, + }, + + ConsumerId = "webHooks", + ConsumerActionId = "httpRequest", + })).Results; + + var webhookUrl = options.WebhookEndpoint; + var ids = new List(); + foreach (var (eventType, resourceVersion) in SubscriptionEventTypes) + { + // find an existing one + Subscription? existing = null; + foreach (var sub in subscriptions) + { + if (sub.EventType == eventType + && sub.ConsumerInputs.TryGetValue("url", out var rawUrl) + && webhookUrl == new Uri(rawUrl)) // comparing with Uri ensure we don't have to deal with slashes and default ports + { + existing = sub; + break; + } + } + + // if we have an existing one, update it, otherwise create a new one + + if (existing is not null) + { + // publisherId, consumerId, and consumerActionId cannot be updated + existing.EventType = eventType; + existing.ResourceVersion = resourceVersion; + existing.PublisherInputs = MakeTfsPublisherInputs(eventType, projectId); + existing.ConsumerInputs = MakeWebHooksConsumerInputs(); + existing = await client.UpdateSubscriptionAsync(existing); + } + else + { + existing = new Subscription + { + EventType = eventType, + ResourceVersion = resourceVersion, + + PublisherId = "tfs", + PublisherInputs = MakeTfsPublisherInputs(eventType, projectId), + ConsumerId = "webHooks", + ConsumerActionId = "httpRequest", + ConsumerInputs = MakeWebHooksConsumerInputs(), + }; + existing = await client.CreateSubscriptionAsync(existing); + } + + // track the identifier of the subscription + ids.Add(existing.Id.ToString()); + } + + return ids; + } + + public async Task> GetRepositoriesAsync(CancellationToken cancellationToken) + { + // get a connection to Azure DevOps + var url = options.ProjectUrl!.Value; + var connection = CreateVssConnection(url, options.ProjectToken!); + + // fetch the repositories + var client = await connection.GetClientAsync(cancellationToken); + var repos = await client.GetRepositoriesAsync(project: url.ProjectIdOrName, cancellationToken: cancellationToken); + return repos.OrderBy(r => r.Name).ToList(); + } + + public async Task GetRepositoryAsync(string repositoryIdOrName, CancellationToken cancellationToken) + { + // get a connection to Azure DevOps + var url = options.ProjectUrl!.Value; + var connection = CreateVssConnection(url, options.ProjectToken!); + + // get the repository + var client = await connection.GetClientAsync(cancellationToken); + return await client.GetRepositoryAsync(project: url.ProjectIdOrName, repositoryId: repositoryIdOrName, cancellationToken: cancellationToken); + } + + public async Task GetConfigurationFileAsync(string repositoryIdOrName, CancellationToken cancellationToken = default) + { + // get a connection to Azure DevOps + var url = options.ProjectUrl!.Value; + var connection = CreateVssConnection(url, options.ProjectToken!); + + // Try all known paths + var paths = options.ConfigurationFilePaths; + var client = await connection.GetClientAsync(cancellationToken); + foreach (var path in paths) + { + try + { + var item = await client.GetItemAsync(project: url.ProjectIdOrName, + repositoryId: repositoryIdOrName, + path: path, + latestProcessedChange: true, + includeContent: true, + cancellationToken: cancellationToken); + + if (item is not null) return item; + } + catch (VssServiceException) { } + } + + return null; + } + + private static Dictionary MakeTfsPublisherInputs(string type, string projectId) + { + // possible inputs are available via an authenticated request to + // https://dev.azure.com/{organization}/_apis/hooks/publishers/tfs + + // always include the project identifier, to restrict events from that project + var result = new Dictionary { ["projectId"] = projectId, }; + + if (type is "git.pullrequest.updated") + { + result["notificationType"] = "StatusUpdateNotification"; + } + + if (type is "git.pullrequest.merged") + { + result["mergeResult"] = "Conflicts"; + } + + return result; + } + + private Dictionary MakeWebHooksConsumerInputs() + { + return new Dictionary + { + // possible inputs are available via an authenticated request to + // https://dev.azure.com/{organization}/_apis/hooks/consumers/webHooks + + ["detailedMessagesToSend"] = "none", + ["messagesToSend"] = "none", + ["url"] = options.WebhookEndpoint!.ToString(), + ["basicAuthUsername"] = "vsts", + ["basicAuthPassword"] = options.SubscriptionPassword!, + }; + } + + private VssConnection CreateVssConnection(AzureDevOpsProjectUrl url, string token) + { + static string hash(string v) + { + var bytes = Encoding.UTF8.GetBytes(v); + var hash = SHA256.HashData(bytes); + return BitConverter.ToString(hash).Replace("-", ""); + } + + // The cache key uses the project URL in case the token is different per project. + // It also, uses the token to ensure a new connection if the token is updated. + // The token is hashed to avoid exposing it just in case it is exposed. + var cacheKey = $"vss_connections:{hash($"{url}{token}")}"; + var cached = cache.Get(cacheKey); + if (cached is not null) return cached; + + var uri = new Uri(url.OrganizationUrl); + var creds = new VssBasicCredential(string.Empty, token); + cached = new VssConnection(uri, creds); + + return cache.Set(cacheKey, cached, TimeSpan.FromHours(1)); + } +} diff --git a/server/Tingle.Dependabot/Workflow/SchedulableUpdate.cs b/server/Tingle.Dependabot/Workflow/SchedulableUpdate.cs new file mode 100644 index 00000000..a826521e --- /dev/null +++ b/server/Tingle.Dependabot/Workflow/SchedulableUpdate.cs @@ -0,0 +1,12 @@ +using Tingle.Dependabot.Models; + +namespace Tingle.Dependabot.Workflow; + +public readonly record struct SchedulableUpdate(int Index, DependabotUpdateSchedule Supplied) +{ + public void Deconstruct(out int index, out DependabotUpdateSchedule supplied) + { + index = Index; + supplied = Supplied; + } +} diff --git a/server/Tingle.Dependabot/Workflow/Synchronizer.cs b/server/Tingle.Dependabot/Workflow/Synchronizer.cs new file mode 100644 index 00000000..76555435 --- /dev/null +++ b/server/Tingle.Dependabot/Workflow/Synchronizer.cs @@ -0,0 +1,230 @@ +using Microsoft.EntityFrameworkCore; +using Microsoft.Extensions.Options; +using System.ComponentModel.DataAnnotations; +using Tingle.Dependabot.Events; +using Tingle.Dependabot.Models; +using Tingle.EventBus; +using Tingle.Extensions; +using YamlDotNet.Serialization; +using YamlDotNet.Serialization.NamingConventions; + +namespace Tingle.Dependabot.Workflow; + +internal class Synchronizer +{ + private readonly MainDbContext dbContext; + private readonly AzureDevOpsProvider adoProvider; + private readonly IEventPublisher publisher; + private readonly WorkflowOptions options; + private readonly ILogger logger; + + private readonly IDeserializer yamlDeserializer; + + public Synchronizer(MainDbContext dbContext, + AzureDevOpsProvider adoProvider, + IEventPublisher publisher, + IOptions optionsAccessor, + ILogger logger) + { + this.dbContext = dbContext ?? throw new ArgumentNullException(nameof(dbContext)); + this.adoProvider = adoProvider ?? throw new ArgumentNullException(nameof(adoProvider)); + this.publisher = publisher ?? throw new ArgumentNullException(nameof(publisher)); + options = optionsAccessor?.Value ?? throw new ArgumentNullException(nameof(optionsAccessor)); + this.logger = logger ?? throw new ArgumentNullException(nameof(logger)); + + yamlDeserializer = new DeserializerBuilder().WithNamingConvention(HyphenatedNamingConvention.Instance) + .IgnoreUnmatchedProperties() + .Build(); + } + + public async Task SynchronizeAsync(bool trigger, CancellationToken cancellationToken = default) + { + // track the synchronization pairs + var syncPairs = new List<(SynchronizerConfigurationItem, Repository?)>(); + + // get the repositories from Azure + logger.LogDebug("Listing repositories ..."); + var adoRepos = await adoProvider.GetRepositoriesAsync(cancellationToken); + logger.LogDebug("Found {RepositoriesCount} repositories", adoRepos.Count); + var adoReposMap = adoRepos.ToDictionary(r => r.Id.ToString(), r => r); + + // synchronize each project + foreach (var (adoRepositoryId, adoRepo) in adoReposMap) + { + // get the repository from the database + var adoRepositoryName = adoRepo.Name; + var repository = await (from r in dbContext.Repositories + where r.ProviderId == adoRepositoryId + select r).SingleOrDefaultAsync(cancellationToken); + + var item = await adoProvider.GetConfigurationFileAsync(repositoryIdOrName: adoRepositoryId, + cancellationToken: cancellationToken); + + // Track for further synchronization + var sci = new SynchronizerConfigurationItem(options.ProjectUrl!.Value.MakeRepositorySlug(adoRepo.Name), adoRepo, item); + syncPairs.Add((sci, repository)); + } + + // remove repositories that are no longer tracked (i.e. the repository was removed) + var providerIdsToKeep = syncPairs.Where(p => p.Item1.HasConfiguration).Select(p => p.Item1.Id).ToList(); + var deleted = await dbContext.Repositories.Where(r => !providerIdsToKeep.Contains(r.ProviderId!)).ExecuteDeleteAsync(cancellationToken); + if (deleted > 0) + { + logger.LogInformation("Deleted {Count} repositories that are no longer needed", deleted); + } + + // synchronize each repository + foreach (var (pi, repository) in syncPairs) + { + await SynchronizeAsync(repository, pi, trigger, cancellationToken); + } + } + + public async Task SynchronizeAsync(Repository repository, bool trigger, CancellationToken cancellationToken = default) + { + // get repository + var adoRepo = await adoProvider.GetRepositoryAsync(repositoryIdOrName: repository.ProviderId!, + cancellationToken: cancellationToken); + + // get the configuration file + var item = await adoProvider.GetConfigurationFileAsync(repositoryIdOrName: repository.ProviderId!, + cancellationToken: cancellationToken); + + // perform synchronization + var sci = new SynchronizerConfigurationItem(options.ProjectUrl!.Value.MakeRepositorySlug(adoRepo.Name), adoRepo, item); + await SynchronizeAsync(repository, sci, trigger, cancellationToken); + } + + public async Task SynchronizeAsync(string? repositoryProviderId, bool trigger, CancellationToken cancellationToken = default) + { + var repository = await (from r in dbContext.Repositories + where r.ProviderId == repositoryProviderId + select r).SingleOrDefaultAsync(cancellationToken); + + // get repository + var adoRepo = await adoProvider.GetRepositoryAsync(repositoryIdOrName: repositoryProviderId!, + cancellationToken: cancellationToken); + + // get the configuration file + var item = await adoProvider.GetConfigurationFileAsync(repositoryIdOrName: repositoryProviderId!, + cancellationToken: cancellationToken); + + // perform synchronization + var sci = new SynchronizerConfigurationItem(options.ProjectUrl!.Value.MakeRepositorySlug(adoRepo.Name), adoRepo, item); + await SynchronizeAsync(repository, sci, trigger, cancellationToken); + } + + internal async Task SynchronizeAsync(Repository? repository, + SynchronizerConfigurationItem providerInfo, + bool trigger, + CancellationToken cancellationToken = default) + { + // ensure not null (can be null when deleted and an event is sent) + if (!providerInfo.HasConfiguration) + { + // delete repository + if (repository is not null) + { + dbContext.Repositories.Remove(repository); + await dbContext.SaveChangesAsync(cancellationToken); + + // publish RepositoryDeletedEvent event + var evt = new RepositoryDeletedEvent { RepositoryId = repository.Id, }; + await publisher.PublishAsync(evt, cancellationToken: cancellationToken); + } + + return; + } + + // check if the file changed (different commit) + bool commitChanged = true; // assume changes unless otherwise + var commitId = providerInfo.CommitId; + if (repository is not null && (commitChanged = !string.Equals(commitId, repository.LatestCommit))) + { + logger.LogDebug("Configuration file for '{Slug}' is new or has been updated.", repository.Slug); + } + + // create repository + RepositoryCreatedEvent? rce = null; + if (repository is null) + { + repository = new Repository + { + Id = Ksuid.Generate(), + Created = DateTimeOffset.UtcNow, + ProviderId = providerInfo.Id, + }; + await dbContext.Repositories.AddAsync(repository, cancellationToken); + rce = new RepositoryCreatedEvent { RepositoryId = repository.Id, }; + } + + // if the name of the repository has changed then we assume the commit changed so that we update stuff + if (repository.Name != providerInfo.Name) commitChanged = true; + + if (commitChanged) + { + // set/update existing values + repository.Updated = DateTimeOffset.UtcNow; + repository.Name = providerInfo.Name; + repository.Slug = providerInfo.Slug; + repository.LatestCommit = commitId; + repository.ConfigFileContents = providerInfo.Content; + + try + { + var configuration = yamlDeserializer.Deserialize(repository.ConfigFileContents); + RecursiveValidator.ValidateObjectRecursive(configuration); + + // set the registries + repository.Registries = configuration.Registries?.Values.ToList() ?? new List(); + + // set the updates a fresh + var updates = configuration.Updates!; + repository.Updates = updates.Select(update => new RepositoryUpdate(update) + { + Files = new List(), // files are populated by an API call from Ruby during job execution + + LatestJobId = null, + LatestJobStatus = null, + LatestUpdate = null, + }).ToList(); + } + catch (YamlDotNet.Core.YamlException ye) + { + logger.LogWarning(ye, "Skipping '{Slug}'. The YAML file is invalid.", repository.Slug); + repository.SyncException = ye.Message; + } + catch (ValidationException ve) + { + logger.LogWarning(ve, "Configuration file for '{Slug}' is invalid.", repository.Slug); + repository.SyncException = ve.Message; + } + + // Update the database + await dbContext.SaveChangesAsync(cancellationToken); + + // publish RepositoryCreatedEvent or RepositoryUpdatedEvent event + if (rce is not null) + { + await publisher.PublishAsync(rce, cancellationToken: cancellationToken); + } + else + { + var evt = new RepositoryUpdatedEvent { RepositoryId = repository.Id, }; + await publisher.PublishAsync(evt, cancellationToken: cancellationToken); + } + + if (trigger) + { + // trigger update jobs for the whole repository + var evt = new TriggerUpdateJobsEvent + { + RepositoryId = repository.Id, + RepositoryUpdateId = null, // run all + Trigger = UpdateJobTrigger.Synchronization, + }; + await publisher.PublishAsync(evt, cancellationToken: cancellationToken); + } + } + } +} diff --git a/server/Tingle.Dependabot/Workflow/SynchronizerConfigurationItem.cs b/server/Tingle.Dependabot/Workflow/SynchronizerConfigurationItem.cs new file mode 100644 index 00000000..c2fa856a --- /dev/null +++ b/server/Tingle.Dependabot/Workflow/SynchronizerConfigurationItem.cs @@ -0,0 +1,20 @@ +using System.Diagnostics.CodeAnalysis; + +namespace Tingle.Dependabot.Workflow; + +public readonly record struct SynchronizerConfigurationItem(string Id, string Name, string Slug, string? CommitId, string? Content) +{ + public SynchronizerConfigurationItem(string slug, + Microsoft.TeamFoundation.SourceControl.WebApi.GitRepository repo, + Microsoft.TeamFoundation.SourceControl.WebApi.GitItem? item) + : this(Id: repo.Id.ToString(), + Name: repo.Name, + Slug: slug, + CommitId: item?.LatestProcessedChange.CommitId, + Content: item?.Content) + { } + + [MemberNotNullWhen(true, nameof(CommitId))] + [MemberNotNullWhen(true, nameof(Content))] + public bool HasConfiguration => !string.IsNullOrEmpty(CommitId) && !string.IsNullOrEmpty(Content); +} diff --git a/server/Tingle.Dependabot/Workflow/UpdateJobHostType.cs b/server/Tingle.Dependabot/Workflow/UpdateJobHostType.cs new file mode 100644 index 00000000..627ed096 --- /dev/null +++ b/server/Tingle.Dependabot/Workflow/UpdateJobHostType.cs @@ -0,0 +1,7 @@ +namespace Tingle.Dependabot.Workflow; + +public enum UpdateJobHostType +{ + ContainerInstances, + ContainerApps, +} diff --git a/server/Tingle.Dependabot/Workflow/UpdateRunner.cs b/server/Tingle.Dependabot/Workflow/UpdateRunner.cs new file mode 100644 index 00000000..5eb52faf --- /dev/null +++ b/server/Tingle.Dependabot/Workflow/UpdateRunner.cs @@ -0,0 +1,319 @@ +using Azure.Identity; +using Azure.Monitor.Query; +using Azure.ResourceManager; +using Azure.ResourceManager.ContainerInstance; +using Azure.ResourceManager.ContainerInstance.Models; +using Azure.ResourceManager.Resources; +using Microsoft.Extensions.Options; +using System.Diagnostics.CodeAnalysis; +using System.Text.Json; +using System.Text.RegularExpressions; +using Tingle.Dependabot.Models; + +namespace Tingle.Dependabot.Workflow; + +internal partial class UpdateRunner +{ + [GeneratedRegex("\\${{\\s*([a-zA-Z_]+[a-zA-Z0-9_-]*)\\s*}}", RegexOptions.Compiled)] + private static partial Regex PlaceholderPattern(); + + [GeneratedRegex("^((?:[a-zA-Z0-9-_]+)\\.azurecr\\.io)\\/")] + private static partial Regex ContainerRegistryPattern(); + + private const string UpdaterContainerName = "updater"; + + private static readonly JsonSerializerOptions serializerOptions = new(JsonSerializerDefaults.Web); + + private readonly WorkflowOptions options; + private readonly ILogger logger; + + private readonly ArmClient armClient; + private readonly ResourceGroupResource resourceGroup; + private readonly LogsQueryClient logsQueryClient; + + public UpdateRunner(IOptions optionsAccessor, ILogger logger) + { + options = optionsAccessor?.Value ?? throw new ArgumentNullException(nameof(optionsAccessor)); + this.logger = logger ?? throw new ArgumentNullException(nameof(logger)); + armClient = new ArmClient(new DefaultAzureCredential()); + resourceGroup = armClient.GetResourceGroupResource(new(options.ResourceGroupId!)); + logsQueryClient = new LogsQueryClient(new DefaultAzureCredential()); + } + + public async Task CreateAsync(Repository repository, RepositoryUpdate update, UpdateJob job, CancellationToken cancellationToken = default) + { + var resourceName = MakeResourcename(job); + var type = options.JobHostType; + + if (type is UpdateJobHostType.ContainerInstances) + { + // if we have an existing one, there is nothing more to do + var containerGroups = resourceGroup.GetContainerGroups(); + try + { + var response = await containerGroups.GetAsync(resourceName, cancellationToken); + if (response.Value is not null) return; + } + catch (Azure.RequestFailedException rfe) when (rfe.Status is 404) { } + + // prepare the container + var container = new ContainerInstanceContainer(UpdaterContainerName, options.UpdaterContainerImage, new(job.Resources!)); + var env = CreateVariables(repository, update, job); + foreach (var (key, value) in env) container.EnvironmentVariables.Add(new ContainerEnvironmentVariable(key) { Value = value, }); + + // prepare the container group + var data = new ContainerGroupData(options.Location!, new[] { container, }, ContainerInstanceOperatingSystemType.Linux) + { + RestartPolicy = ContainerGroupRestartPolicy.Never, // should run to completion without restarts + DiagnosticsLogAnalytics = new ContainerGroupLogAnalytics(options.LogAnalyticsWorkspaceId, options.LogAnalyticsWorkspaceKey), + Identity = new Azure.ResourceManager.Models.ManagedServiceIdentity(Azure.ResourceManager.Models.ManagedServiceIdentityType.UserAssigned) + { + UserAssignedIdentities = { [new(options.ManagedIdentityId!)] = new() { /*ttk bug*/} }, + }, + }; + + // add credentials for pulling image(s) from azure container registry + if (TryGetAzureContainerRegistry(options.UpdaterContainerImage!, out var registry)) + { + data.ImageRegistryCredentials.Add(new ContainerGroupImageRegistryCredential(registry) { Identity = options.ManagedIdentityId, }); + } + + // add tags to the data for tracing purposes + data.Tags["purpose"] = "dependabot"; + data.Tags.AddIfNotDefault("ecosystem", job.PackageEcosystem.GetEnumMemberAttrValueOrDefault()) + .AddIfNotDefault("repository", repository.Slug) + .AddIfNotDefault("directory", update.Directory) + .AddIfNotDefault("machine-name", Environment.MachineName); + + // create the container group (do not wait completion because it might take too long, do not use the result) + _ = await containerGroups.CreateOrUpdateAsync(Azure.WaitUntil.Started, resourceName, data, cancellationToken); + logger.LogInformation("Created ContainerGroup for {UpdateJobId}", job.Id); + job.Status = UpdateJobStatus.Running; + } + else + { + throw new NotSupportedException($"Hosting jobs on '{type}' is not yet supported."); + } + } + + public async Task DeleteAsync(UpdateJob job, CancellationToken cancellationToken = default) + { + var resourceName = MakeResourcename(job); + + try + { + // if it does not exist, there is nothing more to do + var containerGroups = resourceGroup.GetContainerGroups(); + var response = await containerGroups.GetAsync(resourceName, cancellationToken); + if (response.Value is null) return; + + // delete the container group + await response.Value.DeleteAsync(Azure.WaitUntil.Completed, cancellationToken); + } + catch (Azure.RequestFailedException rfe) when (rfe.Status is 404) { } + } + + public async Task GetStateAsync(UpdateJob job, CancellationToken cancellationToken = default) + { + var resourceName = MakeResourcename(job); + + try + { + // if it does not exist, there is nothing more to do + var response = await resourceGroup.GetContainerGroups().GetAsync(resourceName, cancellationToken); + var resource = response.Value; + + var status = resource.Data.InstanceView.State switch + { + "Succeeded" => UpdateJobStatus.Succeeded, + "Failed" => UpdateJobStatus.Failed, + _ => UpdateJobStatus.Running, + }; + + // there is no state for jobs that are running + if (status is UpdateJobStatus.Running) return null; + + // get the period + var currentState = resource.Data.Containers.Single(c => c.Name == UpdaterContainerName).InstanceView?.CurrentState; + DateTimeOffset? start = currentState?.StartOn, end = currentState?.FinishOn; + + // create and return state + return new UpdateRunnerState(status, start, end); + } + catch (Azure.RequestFailedException rfe) when (rfe.Status is 404) { } + + return null; + } + + public async Task GetLogsAsync(UpdateJob job, CancellationToken cancellationToken = default) + { + var logs = (string?)null; + var resourceName = MakeResourcename(job); + + // pull logs from ContainerInstances + if (string.IsNullOrWhiteSpace(logs)) + { + var query = $"ContainerInstanceLog_CL | where ContainerGroup_s == '{resourceName}' | order by TimeGenerated asc | project Message"; + var response = await logsQueryClient.QueryWorkspaceAsync(workspaceId: options.LogAnalyticsWorkspaceId, + query: query, + timeRange: QueryTimeRange.All, + cancellationToken: cancellationToken); + + logs = string.Join(Environment.NewLine, response.Value); + } + + // pull logs from ContainerApps + if (string.IsNullOrWhiteSpace(logs)) + { + var query = $"ContainerAppConsoleLogs_CL | where ContainerAppName_s == '{resourceName}' | order by TimeGenerated asc | project Log_s"; + var response = await logsQueryClient.QueryWorkspaceAsync(workspaceId: options.LogAnalyticsWorkspaceId, + query: query, + timeRange: QueryTimeRange.All, + cancellationToken: cancellationToken); + + logs = string.Join(Environment.NewLine, response.Value); + } + + return logs; + } + + internal static string MakeResourcename(UpdateJob job) => $"dependabot-job-{job.Id}"; + internal static bool TryGetAzureContainerRegistry(string input, [NotNullWhen(true)] out string? registry) + { + registry = null; + var match = ContainerRegistryPattern().Match(input); + if (match.Success) + { + registry = match.Groups[1].Value; + return true; + } + + return false; + } + + internal IDictionary CreateVariables(Repository repository, RepositoryUpdate update, UpdateJob job) + { + static string? ToJson(T? entries) => entries is null ? null : JsonSerializer.Serialize(entries, serializerOptions); // null ensures we do not add to the values + + // Add compulsory values + var values = new Dictionary + { + ["DEPENDABOT_PACKAGE_MANAGER"] = job.PackageEcosystem.GetEnumMemberAttrValueOrDefault(), + ["DEPENDABOT_DIRECTORY"] = update.Directory!, + ["DEPENDABOT_OPEN_PULL_REQUESTS_LIMIT"] = update.OpenPullRequestsLimit!.Value.ToString(), + }; + + // Add optional values + values.AddIfNotDefault("GITHUB_ACCESS_TOKEN", options.GithubToken) + .AddIfNotDefault("DEPENDABOT_REBASE_STRATEGY", update.RebaseStrategy.GetEnumMemberAttrValueOrDefault()) + .AddIfNotDefault("DEPENDABOT_TARGET_BRANCH", update.TargetBranch) + .AddIfNotDefault("DEPENDABOT_VENDOR", update.Vendor ? "true" : null) + .AddIfNotDefault("DEPENDABOT_REJECT_EXTERNAL_CODE", (update.InsecureExternalCodeExecution == DependabotInsecureExternalCodeExecution.Deny).ToString().ToLowerInvariant()) + .AddIfNotDefault("DEPENDABOT_VERSIONING_STRATEGY", update.VersioningStrategy.GetEnumMemberAttrValueOrDefault()) + .AddIfNotDefault("DEPENDABOT_ALLOW_CONDITIONS", ToJson(MakeAllowEntries(update.Allow))) + .AddIfNotDefault("DEPENDABOT_LABELS", ToJson(update.Labels)) + .AddIfNotDefault("DEPENDABOT_BRANCH_NAME_SEPARATOR", update.PullRequestBranchName?.Separator) + .AddIfNotDefault("DEPENDABOT_MILESTONE", update.Milestone?.ToString()) + .AddIfNotDefault("DEPENDABOT_FAIL_ON_EXCEPTION", "true"); + + var secrets = new Dictionary(options.Secrets) { ["DEFAULT_TOKEN"] = options.ProjectToken!, }; + + // Add values for Azure DevOps + var url = options.ProjectUrl!.Value; + values.AddIfNotDefault("AZURE_HOSTNAME", url.Hostname) + .AddIfNotDefault("AZURE_ORGANIZATION", url.OrganizationName) + .AddIfNotDefault("AZURE_PROJECT", url.ProjectName) + .AddIfNotDefault("AZURE_REPOSITORY", Uri.EscapeDataString(repository.Name!)) + .AddIfNotDefault("AZURE_ACCESS_TOKEN", options.ProjectToken) + .AddIfNotDefault("AZURE_SET_AUTO_COMPLETE", (options.AutoComplete ?? false).ToString().ToLowerInvariant()) + .AddIfNotDefault("AZURE_AUTO_COMPLETE_IGNORE_CONFIG_IDS", ToJson(options.AutoCompleteIgnoreConfigs?.Split(';'))) + .AddIfNotDefault("AZURE_MERGE_STRATEGY", options.AutoCompleteMergeStrategy?.GetEnumMemberAttrValueOrDefault()) + .AddIfNotDefault("AZURE_AUTO_APPROVE_PR", (options.AutoApprove ?? false).ToString().ToLowerInvariant()); + + // Add extra credentials with replaced secrets + values.AddIfNotDefault("DEPENDABOT_EXTRA_CREDENTIALS", ToJson(MakeExtraCredentials(repository.Registries, secrets))); + + return values; + } + internal static IList>? MakeExtraCredentials(ICollection? registries, IDictionary secrets) + { + return registries?.Select(v => + { + var type = v.Type?.Replace("-", "_") ?? throw new InvalidOperationException("Type should not be null"); + + var values = new Dictionary().AddIfNotDefault("type", type); + + // values for hex-organization + values.AddIfNotDefault("organization", v.Organization); + + // values for hex-repository + values.AddIfNotDefault("repo", v.Repo); + values.AddIfNotDefault("auth-key", v.AuthKey); + values.AddIfNotDefault("public-key-fingerprint", v.PublicKeyFingerprint); + + values.AddIfNotDefault("username", v.Username); + values.AddIfNotDefault("password", ConvertPlaceholder(v.Password, secrets)); + values.AddIfNotDefault("key", ConvertPlaceholder(v.Key, secrets)); + values.AddIfNotDefault("token", ConvertPlaceholder(v.Token, secrets)); + values.AddIfNotDefault("replaces-base", v.ReplacesBase is true ? "true" : null); + + // Some credentials do not use the 'url' property in the Ruby updater. + // npm_registry and docker_registry use 'registry' which should be stripped off the scheme. + // terraform_registry uses 'host' which is the hostname from the given URL. + + if (type == "docker_registry" || type == "npm_registry") + { + values.Add("registry", v.Url!.Replace("https://", "").Replace("http://", "")); + } + else if (type == "terraform_registry") + { + values.Add("host", new Uri(v.Url!).Host); + } + else + { + values.AddIfNotDefault("url", v.Url!); + } + var useRegistryProperty = type.Contains("npm") || type.Contains("docker"); + + return values; + }).ToList(); + } + internal static string? ConvertPlaceholder(string? input, IDictionary secrets) + { + if (string.IsNullOrWhiteSpace(input)) return input; + + var result = input; + var matches = PlaceholderPattern().Matches(input); + foreach (var m in matches) + { + if (m is not Match match || !match.Success) continue; + + var placeholder = match.Value; + var name = match.Groups[1].Value; + if (secrets.TryGetValue(name, out var replacement)) + { + result = result.Replace(placeholder, replacement); + } + } + + return result; + } + internal static IList>? MakeAllowEntries(List? entries) + { + return entries?.Where(e => e.IsValid()) + .Select(e => new Dictionary() + .AddIfNotDefault("dependency-name", e.DependencyName) + .AddIfNotDefault("dependency-type", e.DependencyType?.GetEnumMemberAttrValueOrDefault())) + .ToList(); + } +} + +public readonly record struct UpdateRunnerState(UpdateJobStatus Status, DateTimeOffset? Start, DateTimeOffset? End) +{ + public void Deconstruct(out UpdateJobStatus status, out DateTimeOffset? start, out DateTimeOffset? end) + { + status = Status; + start = Start; + end = End; + } +} diff --git a/server/Tingle.Dependabot/Workflow/UpdateScheduler.cs b/server/Tingle.Dependabot/Workflow/UpdateScheduler.cs new file mode 100644 index 00000000..c57bba4e --- /dev/null +++ b/server/Tingle.Dependabot/Workflow/UpdateScheduler.cs @@ -0,0 +1,91 @@ +using System.Collections.Concurrent; +using Tingle.Dependabot.Events; +using Tingle.Dependabot.Models; +using Tingle.EventBus; +using Tingle.PeriodicTasks; + +namespace Tingle.Dependabot.Workflow; + +internal class UpdateScheduler +{ + private readonly IEventPublisher publisher; + private readonly ILogger logger; + + private ConcurrentDictionary> store = new(); + + public UpdateScheduler(IEventPublisher publisher, IHostApplicationLifetime lifetime, ILogger logger) + { + this.publisher = publisher ?? throw new ArgumentNullException(nameof(publisher)); + this.logger = logger ?? throw new ArgumentNullException(nameof(logger)); + + lifetime.ApplicationStopping.Register(() => + { + // stop all timers in 1 second, max + var cached = Interlocked.Exchange(ref store, null!); + var timers = cached.Values.ToArray().SelectMany(l => l).ToArray(); + var cts = new CancellationTokenSource(TimeSpan.FromSeconds(1)); + Task.WaitAll(timers.Select(t => t.StopAsync(cts.Token)).ToArray()); + }); + } + + public async Task CreateOrUpdateAsync(Repository repository, CancellationToken cancellationToken = default) + { + logger.LogDebug("Creating/Updating schedules for repository '{RepositoryId}'.", repository.Id); + var updates = new List(); + foreach (var update in repository.Updates) + { + updates.Add(new(repository.Updates.IndexOf(update), update.Schedule!)); + } + + var repositoryId = repository.Id!; + var timers = new List(); + foreach (var (index, supplied) in updates) + { + var schedule = supplied.GenerateCron(); + var payload = new TimerPayload(repositoryId, index); + var timer = new CronScheduleTimer(schedule, supplied.Timezone, CustomTimerCallback, payload); + timers.Add(timer); + } + + // remove existing then add the new ones + await RemoveAsync(repositoryId, cancellationToken); + store[repositoryId] = timers; + + // start all the timers + await Task.WhenAll(timers.Select(t => t.StartAsync(cancellationToken))); + } + + public async Task RemoveAsync(string repositoryId, CancellationToken cancellationToken = default) + { + // remove existing ones + if (store.TryGetValue(repositoryId, out var timers)) + { + // stop all the timers + await Task.WhenAll(timers.Select(t => t.StopAsync(cancellationToken))); + + // dispose all the timers + foreach (var timer in timers) timer.Dispose(); + } + } + + private async Task CustomTimerCallback(CronScheduleTimer timer, object? arg2, CancellationToken cancellationToken) + { + if (arg2 is not TimerPayload payload) + { + logger.LogError("Timer call back does not have correct argument"); + return; + } + + // publish event for the job to be run + var evt = new TriggerUpdateJobsEvent + { + RepositoryId = payload.RepositoryId, + RepositoryUpdateId = payload.RepositoryUpdateId, + Trigger = UpdateJobTrigger.Scheduled, + }; + + await publisher.PublishAsync(evt, cancellationToken: cancellationToken); + } + + private readonly record struct TimerPayload(string RepositoryId, int RepositoryUpdateId); +} diff --git a/server/Tingle.Dependabot/Workflow/WorkflowBackgroundService.cs b/server/Tingle.Dependabot/Workflow/WorkflowBackgroundService.cs new file mode 100644 index 00000000..2795e8db --- /dev/null +++ b/server/Tingle.Dependabot/Workflow/WorkflowBackgroundService.cs @@ -0,0 +1,160 @@ +using Microsoft.EntityFrameworkCore; +using Tingle.Dependabot.Events; +using Tingle.Dependabot.Models; +using Tingle.EventBus; +using Tingle.PeriodicTasks; + +namespace Tingle.Dependabot.Workflow; + +internal class WorkflowBackgroundService : BackgroundService +{ + private readonly IServiceProvider serviceProvider; + private readonly ILogger logger; + + public WorkflowBackgroundService(IServiceProvider serviceProvider, ILogger logger) + { + this.serviceProvider = serviceProvider ?? throw new ArgumentNullException(nameof(serviceProvider)); + this.logger = logger ?? throw new ArgumentNullException(nameof(logger)); + } + + protected override async Task ExecuteAsync(CancellationToken stoppingToken) + { + var t_synch = SynchronizationAsync(stoppingToken); + var t_missed = CheckMissedTriggerAsync(stoppingToken); + var t_cleanup = CleanupAsync(stoppingToken); + + await Task.WhenAll(t_synch, t_missed, t_cleanup); + } + + internal virtual async Task SynchronizationAsync(CancellationToken cancellationToken = default) + { + var timer = new PeriodicTimer(TimeSpan.FromHours(6)); + + while (!cancellationToken.IsCancellationRequested) + { + await timer.WaitForNextTickAsync(cancellationToken); + await SynchronizationInnerAsync(cancellationToken); + } + } + + internal virtual async Task SynchronizationInnerAsync(CancellationToken cancellationToken = default) + { + // request synchronization of the whole project via events + using var scope = serviceProvider.CreateScope(); + var provider = scope.ServiceProvider; + var publisher = provider.GetRequiredService(); + var evt = new ProcessSynchronization(false); /* database sync should not trigger, just in case it's too many */ + await publisher.PublishAsync(evt, cancellationToken: cancellationToken); + } + + internal virtual Task CheckMissedTriggerAsync(CancellationToken cancellationToken = default) => CheckMissedTriggerAsync(DateTimeOffset.UtcNow, cancellationToken); + internal virtual async Task CheckMissedTriggerAsync(DateTimeOffset referencePoint, CancellationToken cancellationToken = default) + { + var timer = new PeriodicTimer(TimeSpan.FromHours(1)); + + while (!cancellationToken.IsCancellationRequested) + { + await timer.WaitForNextTickAsync(cancellationToken); + await CheckMissedTriggerInnerAsync(referencePoint, cancellationToken); + } + } + internal virtual async Task CheckMissedTriggerInnerAsync(DateTimeOffset referencePoint, CancellationToken cancellationToken = default) + { + using var scope = serviceProvider.CreateScope(); + var provider = scope.ServiceProvider; + var dbContext = provider.GetRequiredService(); + var publisher = provider.GetRequiredService(); + var repositories = await dbContext.Repositories.ToListAsync(cancellationToken); + + foreach (var repository in repositories) + { + foreach (var update in repository.Updates) + { + var schedule = (CronSchedule)update.Schedule!.GenerateCron(); + var timezone = TimeZoneInfo.FindSystemTimeZoneById(update.Schedule.Timezone); + + // check if we missed an execution + var latestUpdate = update.LatestUpdate; + var missed = latestUpdate is null; // when null, it was missed + if (latestUpdate != null) + { + var nextFromLast = schedule.GetNextOccurrence(latestUpdate.Value, timezone); + if (nextFromLast is null) continue; + + var nextFromReference = schedule.GetNextOccurrence(referencePoint, timezone); + if (nextFromReference is null) continue; + + missed = nextFromLast.Value <= referencePoint; // when next is in the past, it was missed + + // for daily schedules, only check if the next is more than 12 hours away + if (missed && update.Schedule.Interval is DependabotScheduleInterval.Daily) + { + missed = (nextFromReference.Value - referencePoint).Hours > 12; + } + } + + // if we missed an execution, trigger one + if (missed) + { + logger.LogWarning("Schedule was missed for {RepositoryId}({UpdateId}). Triggering now", repository.Id, repository.Updates.IndexOf(update)); + + // publish event for the job to be run + var evt = new TriggerUpdateJobsEvent + { + RepositoryId = repository.Id, + RepositoryUpdateId = repository.Updates.IndexOf(update), + Trigger = UpdateJobTrigger.MissedSchedule, + }; + + await publisher.PublishAsync(evt, cancellationToken: cancellationToken); + } + } + } + } + + internal virtual async Task CleanupAsync(CancellationToken cancellationToken = default) + { + var timer = new PeriodicTimer(TimeSpan.FromMinutes(15)); // change to once per hour once we move to jobs in Azure ContainerApps + + while (!cancellationToken.IsCancellationRequested) + { + await timer.WaitForNextTickAsync(cancellationToken); + await CleanupInnerAsync(cancellationToken); + } + } + internal virtual async Task CleanupInnerAsync(CancellationToken cancellationToken = default) + { + using var scope = serviceProvider.CreateScope(); + var provider = scope.ServiceProvider; + var dbContext = provider.GetRequiredService(); + var publisher = provider.GetRequiredService(); + + // resolve pending jobs + + // Change this to 3 hours once we have figured out how to get events from Azure + var oldest = DateTimeOffset.UtcNow.AddMinutes(-10); // older than 10 minutes + var jobs = await (from j in dbContext.UpdateJobs + where j.Created <= oldest + where j.Status == UpdateJobStatus.Scheduled || j.Status == UpdateJobStatus.Running + orderby j.Created ascending + select j).Take(100).ToListAsync(cancellationToken); + + if (jobs.Count > 0) + { + logger.LogInformation("Found {Count} jobs that are still pending for more than 10 min. Requesting manual resolution ...", jobs.Count); + + var events = jobs.Select(j => new UpdateJobCheckStateEvent { JobId = j.Id, }).ToList(); + await publisher.PublishAsync(events, cancellationToken: cancellationToken); + } + + // delete old jobs + var cutoff = DateTimeOffset.UtcNow.AddDays(-90); + jobs = await dbContext.UpdateJobs.Where(j => j.Created <= cutoff).Take(100).ToListAsync(cancellationToken); + if (jobs.Count > 0) + { + dbContext.UpdateJobs.RemoveRange(jobs); + await dbContext.SaveChangesAsync(cancellationToken); + logger.LogInformation("Removed {Count} jobs that older than {Cutoff}", jobs.Count, cutoff); + } + } +} diff --git a/server/Tingle.Dependabot/Workflow/WorkflowConfigureOptions.cs b/server/Tingle.Dependabot/Workflow/WorkflowConfigureOptions.cs new file mode 100644 index 00000000..8d81f394 --- /dev/null +++ b/server/Tingle.Dependabot/Workflow/WorkflowConfigureOptions.cs @@ -0,0 +1,78 @@ +using Microsoft.Extensions.Options; + +namespace Tingle.Dependabot.Workflow; + +internal class WorkflowConfigureOptions : IPostConfigureOptions, IValidateOptions +{ + private readonly IConfiguration configuration; + + public WorkflowConfigureOptions(IConfiguration configuration) + { + this.configuration = configuration ?? throw new ArgumentNullException(nameof(configuration)); + } + + public void PostConfigure(string? name, WorkflowOptions options) + { + options.SubscriptionPassword ??= configuration.GetValue("Authentication:Schemes:ServiceHooks:Credentials:vsts"); + } + + public ValidateOptionsResult Validate(string? name, WorkflowOptions options) + { + if (options.WebhookEndpoint is null) + { + return ValidateOptionsResult.Fail($"'{nameof(options.WebhookEndpoint)}' is required"); + } + + if (options.ProjectUrl is null) + { + return ValidateOptionsResult.Fail($"'{nameof(options.ProjectUrl)}' is required"); + } + + if (string.IsNullOrWhiteSpace(options.ProjectToken)) + { + return ValidateOptionsResult.Fail($"'{nameof(options.ProjectToken)}' cannot be null or whitespace"); + } + + if (string.IsNullOrWhiteSpace(options.SubscriptionPassword)) + { + return ValidateOptionsResult.Fail($"'{nameof(options.SubscriptionPassword)}' cannot be null or whitespace"); + } + + if (string.IsNullOrWhiteSpace(options.ResourceGroupId)) + { + return ValidateOptionsResult.Fail($"'{nameof(options.ResourceGroupId)}' cannot be null or whitespace"); + } + + if (string.IsNullOrWhiteSpace(options.LogAnalyticsWorkspaceId)) + { + return ValidateOptionsResult.Fail($"'{nameof(options.LogAnalyticsWorkspaceId)}' cannot be null or whitespace"); + } + + if (string.IsNullOrWhiteSpace(options.LogAnalyticsWorkspaceKey)) + { + return ValidateOptionsResult.Fail($"'{nameof(options.LogAnalyticsWorkspaceKey)}' cannot be null or whitespace"); + } + + if (string.IsNullOrWhiteSpace(options.UpdaterContainerImage)) + { + return ValidateOptionsResult.Fail($"'{nameof(options.UpdaterContainerImage)}' cannot be null or whitespace"); + } + + if (string.IsNullOrWhiteSpace(options.ManagedIdentityId)) + { + return ValidateOptionsResult.Fail($"'{nameof(options.ManagedIdentityId)}' cannot be null or whitespace"); + } + + if (options.JobHostType is not UpdateJobHostType.ContainerInstances) + { + return ValidateOptionsResult.Fail($"'{nameof(options.JobHostType)}' only supports container instances"); + } + + if (string.IsNullOrWhiteSpace(options.Location)) + { + return ValidateOptionsResult.Fail($"'{nameof(options.Location)}' cannot be null or whitespace"); + } + + return ValidateOptionsResult.Success; + } +} diff --git a/server/Tingle.Dependabot/Workflow/WorkflowOptions.cs b/server/Tingle.Dependabot/Workflow/WorkflowOptions.cs new file mode 100644 index 00000000..abbec5f5 --- /dev/null +++ b/server/Tingle.Dependabot/Workflow/WorkflowOptions.cs @@ -0,0 +1,96 @@ +using Tingle.Dependabot.Models; + +namespace Tingle.Dependabot.Workflow; + +public class WorkflowOptions +{ + /// Whether to synchronize repositories on startup. + public bool SynchronizeOnStartup { get; set; } = true; + + /// Whether to load schedules on startup. + public bool LoadSchedulesOnStartup { get; set; } = true; + + /// Whether to create/update notifications on startup. + public bool CreateOrUpdateWebhooksOnStartup { get; set; } = true; + + /// URL where subscription notifications shall be sent. + public Uri? WebhookEndpoint { get; set; } + + /// Password used for creation of subscription and authenticating incoming notifications. + public string? SubscriptionPassword { get; set; } + + /// Resource identifier for the resource group to create jobs in. + /// /subscriptions/00000000-0000-1111-0001-000000000000/resourceGroups/DEPENDABOT + public string? ResourceGroupId { get; set; } + + /// CustomerId of the LogAnalytics workspace. + /// 00000000-0000-1111-0001-000000000000 + public string? LogAnalyticsWorkspaceId { get; set; } + + /// AuthenticationKey of the LogAnalytics workspace. + /// AAAAAAAAAAA= + public string? LogAnalyticsWorkspaceKey { get; set; } + + /// Resource identifier for the managed identity used to pull container images. + /// /subscriptions/00000000-0000-1111-0001-000000000000/resourceGroups/DEPENDABOT/providers/Microsoft.ManagedIdentity/userAssignedIdentities/dependabot + public string? ManagedIdentityId { get; set; } + + /// + /// The registry, repository and version of the docker container image to use. + /// Keeping this value fixed in code is important so that the code that depends on it always works. + /// More like a dependency. + ///
+ /// However, in production there maybe an issue that requires a rollback hence the value is placed in options. + ///
+ /// ghcr.io/tinglesoftware/dependabot-updater:0.14 + public string? UpdaterContainerImage { get; set; } + + /// URL for the project. + public AzureDevOpsProjectUrl? ProjectUrl { get; set; } + + /// Authentication token for accessing the project. + public string? ProjectToken { get; set; } + + /// Whether to set automatic completion of pull requests. + public bool? AutoComplete { get; set; } + + public string? AutoCompleteIgnoreConfigs { get; set; } + + public MergeStrategy? AutoCompleteMergeStrategy { get; set; } + + /// Whether to automatically approve pull requests. + public bool? AutoApprove { get; set; } + + /// + /// Token for accessing GitHub APIs. + /// If no value is provided, calls to GitHub are not authenticated. + /// Providing a value avoids being rate limited in case when there + /// are many upgrades at the same time from the same IP. + /// When provided, it must have read access to public repositories. + /// + /// ghp_1234567890 + public string? GithubToken { get; set; } + + /// + /// Secrets that can be replaced in the registries section of the configuration file. + /// + public Dictionary Secrets { get; set; } = new(StringComparer.OrdinalIgnoreCase); + + /// Where to host new update jobs. + public UpdateJobHostType JobHostType { get; set; } = UpdateJobHostType.ContainerInstances; + + /// Location/region where to create new update jobs. + public string? Location { get; set; } // using Azure.Core.Location does not work when binding from IConfiguration + + /// + /// Possible/allowed paths for the configuration files in a repository. + /// + public IReadOnlyList ConfigurationFilePaths { get; set; } = new[] { + // TODO: restore checks in .azuredevops folder once either the code can check that folder or we are passing ignore conditions via update_jobs API + //".azuredevops/dependabot.yml", + //".azuredevops/dependabot.yaml", + + ".github/dependabot.yml", + ".github/dependabot.yaml", + }; +} diff --git a/server/Tingle.Dependabot/appsettings.Development.json b/server/Tingle.Dependabot/appsettings.Development.json new file mode 100644 index 00000000..d8957013 --- /dev/null +++ b/server/Tingle.Dependabot/appsettings.Development.json @@ -0,0 +1,12 @@ +{ + "Logging": { + "LogLevel": { + "Default": "Debug", + "Microsoft": "Information", + "System": "Information", + "System.Net.Http.HttpClient": "Information", + "Tingle.AspNetCore": "Information", + "Tingle.EventBus": "Information" + } + } +} diff --git a/server/Tingle.Dependabot/appsettings.json b/server/Tingle.Dependabot/appsettings.json new file mode 100644 index 00000000..8ffe8722 --- /dev/null +++ b/server/Tingle.Dependabot/appsettings.json @@ -0,0 +1,71 @@ +{ + "Logging": { + "ApplicationInsights": { + "LogLevel": { + "Default": "Warning", + "Microsoft": "Warning" //"Error" + } + }, + "LogLevel": { + "Default": "Information", + "Microsoft": "Warning", + "Microsoft.EntityFrameworkCore": "Warning", + "Microsoft.Hosting.Lifetime": "Information", + "System.Net.Http.HttpClient": "Warning", + "Tingle.AspNetCore": "Warning", + "Tingle.EventBus": "Warning" + } + }, + "AllowedHosts": "*", + + "ConnectionStrings:Sql": "Server=(localdb)\\mssqllocaldb;Database=dependabot;Trusted_Connection=True;MultipleActiveResultSets=true", + + "Authentication": { + "Schemes": { + "Management": { + "Authority": "https://login.microsoftonline.com/dependabot.com/v2.0", + "ValidAudiences": [ + "http://localhost:3000" + ] + }, + "ServiceHooks": { + "Credentials": { + "vsts": "AAAAAAAAAAA=" + } + } + } + }, + + "EventBus": { + "SelectedTransport": "InMemory", // InMemory|ServiceBus + + "DefaultTransportWaitStarted": false, // defaults to true which causes startup tasks to hang + "Naming": { + "UseFullTypeNames": false + }, + "Transports": { + "azure-service-bus": { + "FullyQualifiedNamespace": "{your_namespace}.servicebus.windows.net", + "DefaultEntityKind": "Queue" + } + } + }, + + "Workflow": { + "SynchronizeOnStartup": false, + "LoadSchedulesOnStartup": false, + "CreateOrUpdateWebhooksOnStartup": false, + "WebhookEndpoint": "http://localhost:3000/", + "SubscriptionPassword": "", + "ResourceGroupId": "/subscriptions/00000000-0000-1111-0001-000000000000/resourceGroups/DEPENDABOT", + "LogAnalyticsWorkspaceId": "00000000-0000-1111-0001-000000000000", + "LogAnalyticsWorkspaceKey": "AAAAAAAAAAA=", + "ManagedIdentityId": "/subscriptions/00000000-0000-1111-0001-000000000000/resourceGroups/DEPENDABOT/providers/Microsoft.ManagedIdentity/userAssignedIdentities/dependabot", + "UpdaterContainerImage": "ghcr.io/tinglesoftware/dependabot-updater:0.14.2-ci.37", + "ProjectUrl": "https://dev.azure.com/fabrikam/DefaultCollection", + "ProjectToken": "", + "GithubToken": "", + "JobHostType": "ContainerInstances", + "Location": "westeurope" + } +} diff --git a/.editorconfig b/updater/.editorconfig similarity index 100% rename from .editorconfig rename to updater/.editorconfig