Initial commit: SWP.Core enterprise framework with multi-tenant architecture, configuration management, security, telemetry and comprehensive test suite

This commit is contained in:
Janus C. H. Knudsen 2025-08-02 22:16:39 +02:00
commit 5275a75502
87 changed files with 6140 additions and 0 deletions

View file

@ -0,0 +1,11 @@
{
"permissions": {
"allow": [
"Bash(rm:*)",
"Bash(\"/mnt/c/Program Files/dotnet/dotnet.exe\" build --configuration Release)",
"Bash(\"/mnt/c/Program Files/dotnet/dotnet.exe\" test --configuration Release --logger \"console;verbosity=normal\")",
"Bash(find:*)"
],
"deny": []
}
}

202
.editorconfig Normal file
View file

@ -0,0 +1,202 @@
# EditorConfig is awesome: https://EditorConfig.org
# top-most EditorConfig file
root = true
# All files
[*]
charset = utf-8
end_of_line = crlf
insert_final_newline = true
trim_trailing_whitespace = true
indent_style = space
indent_size = 4
# Code files
[*.{cs,csx,vb,vbx}]
indent_size = 4
# XML project files
[*.{csproj,vbproj,vcxproj,vcxproj.filters,proj,projitems,shproj}]
indent_size = 2
# XML config files
[*.{props,targets,ruleset,config,nuspec,resx,vsixmanifest,vsct}]
indent_size = 2
# JSON files
[*.{json,json5,webmanifest}]
indent_size = 2
# YAML files
[*.{yml,yaml}]
indent_size = 2
# Markdown files
[*.{md,mdx}]
trim_trailing_whitespace = false
# Web files
[*.{htm,html,js,jsm,ts,tsx,css,sass,scss,less,svg,vue}]
indent_size = 2
# Batch files
[*.{cmd,bat}]
end_of_line = crlf
# Bash files
[*.sh]
end_of_line = lf
# C# files
[*.cs]
# Organize usings
dotnet_sort_system_directives_first = true
dotnet_separate_import_directive_groups = false
# this. preferences
dotnet_style_qualification_for_field = false:suggestion
dotnet_style_qualification_for_property = false:suggestion
dotnet_style_qualification_for_method = false:suggestion
dotnet_style_qualification_for_event = false:suggestion
# Language keywords vs BCL types preferences
dotnet_style_predefined_type_for_locals_parameters_members = true:suggestion
dotnet_style_predefined_type_for_member_access = true:suggestion
# Parentheses preferences
dotnet_style_parentheses_in_arithmetic_binary_operators = always_for_clarity:silent
dotnet_style_parentheses_in_relational_binary_operators = always_for_clarity:silent
dotnet_style_parentheses_in_other_binary_operators = always_for_clarity:silent
dotnet_style_parentheses_in_other_operators = never_if_unnecessary:silent
# Modifier preferences
dotnet_style_require_accessibility_modifiers = for_non_interface_members:suggestion
dotnet_style_readonly_field = true:suggestion
# Expression-level preferences
dotnet_style_object_initializer = true:suggestion
dotnet_style_collection_initializer = true:suggestion
dotnet_style_explicit_tuple_names = true:suggestion
dotnet_style_null_propagation = true:suggestion
dotnet_style_coalesce_expression = true:suggestion
dotnet_style_prefer_is_null_check_over_reference_equality_method = true:suggestion
dotnet_style_prefer_inferred_tuple_names = true:suggestion
dotnet_style_prefer_inferred_anonymous_type_member_names = true:suggestion
dotnet_style_prefer_auto_properties = true:silent
dotnet_style_prefer_conditional_expression_over_assignment = true:silent
dotnet_style_prefer_conditional_expression_over_return = true:silent
# Naming conventions
# Constants
dotnet_naming_rule.constants_should_be_pascal_case.severity = suggestion
dotnet_naming_rule.constants_should_be_pascal_case.symbols = constants
dotnet_naming_rule.constants_should_be_pascal_case.style = pascal_case
dotnet_naming_symbols.constants.applicable_kinds = field, local
dotnet_naming_symbols.constants.applicable_accessibilities = *
dotnet_naming_symbols.constants.required_modifiers = const
dotnet_naming_style.pascal_case.capitalization = pascal_case
# Private constants with underscore
dotnet_naming_rule.private_constants_should_be_camel_case_with_underscore.severity = suggestion
dotnet_naming_rule.private_constants_should_be_camel_case_with_underscore.symbols = private_constants
dotnet_naming_rule.private_constants_should_be_camel_case_with_underscore.style = camel_case_with_underscore
dotnet_naming_symbols.private_constants.applicable_kinds = field
dotnet_naming_symbols.private_constants.applicable_accessibilities = private
dotnet_naming_symbols.private_constants.required_modifiers = const
dotnet_naming_style.camel_case_with_underscore.capitalization = camel_case
dotnet_naming_style.camel_case_with_underscore.required_prefix = _
# Static readonly fields
dotnet_naming_rule.static_readonly_should_be_pascal_case.severity = suggestion
dotnet_naming_rule.static_readonly_should_be_pascal_case.symbols = static_readonly
dotnet_naming_rule.static_readonly_should_be_pascal_case.style = pascal_case
dotnet_naming_symbols.static_readonly.applicable_kinds = field
dotnet_naming_symbols.static_readonly.applicable_accessibilities = *
dotnet_naming_symbols.static_readonly.required_modifiers = static, readonly
# Private static readonly with underscore
dotnet_naming_rule.private_static_readonly_should_be_camel_case_with_underscore.severity = suggestion
dotnet_naming_rule.private_static_readonly_should_be_camel_case_with_underscore.symbols = private_static_readonly
dotnet_naming_rule.private_static_readonly_should_be_camel_case_with_underscore.style = camel_case_with_underscore
dotnet_naming_symbols.private_static_readonly.applicable_kinds = field
dotnet_naming_symbols.private_static_readonly.applicable_accessibilities = private
dotnet_naming_symbols.private_static_readonly.required_modifiers = static, readonly
# Private fields
dotnet_naming_rule.private_fields_should_be_camel_case_with_underscore.severity = suggestion
dotnet_naming_rule.private_fields_should_be_camel_case_with_underscore.symbols = private_fields
dotnet_naming_rule.private_fields_should_be_camel_case_with_underscore.style = camel_case_with_underscore
dotnet_naming_symbols.private_fields.applicable_kinds = field
dotnet_naming_symbols.private_fields.applicable_accessibilities = private
# Interfaces
dotnet_naming_rule.interfaces_should_be_prefixed_with_i.severity = suggestion
dotnet_naming_rule.interfaces_should_be_prefixed_with_i.symbols = interfaces
dotnet_naming_rule.interfaces_should_be_prefixed_with_i.style = prefixed_with_i
dotnet_naming_symbols.interfaces.applicable_kinds = interface
dotnet_naming_style.prefixed_with_i.capitalization = pascal_case
dotnet_naming_style.prefixed_with_i.required_prefix = I
# Types
dotnet_naming_rule.types_should_be_pascal_case.severity = suggestion
dotnet_naming_rule.types_should_be_pascal_case.symbols = types
dotnet_naming_rule.types_should_be_pascal_case.style = pascal_case
dotnet_naming_symbols.types.applicable_kinds = class, struct, interface, enum
# Non-field members
dotnet_naming_rule.non_field_members_should_be_pascal_case.severity = suggestion
dotnet_naming_rule.non_field_members_should_be_pascal_case.symbols = non_field_members
dotnet_naming_rule.non_field_members_should_be_pascal_case.style = pascal_case
dotnet_naming_symbols.non_field_members.applicable_kinds = property, event, method
# Parameters
dotnet_naming_rule.parameters_should_be_camel_case.severity = suggestion
dotnet_naming_rule.parameters_should_be_camel_case.symbols = parameters
dotnet_naming_rule.parameters_should_be_camel_case.style = camel_case
dotnet_naming_symbols.parameters.applicable_kinds = parameter
dotnet_naming_style.camel_case.capitalization = camel_case
# Async methods
dotnet_naming_rule.async_methods_should_end_with_async.severity = suggestion
dotnet_naming_rule.async_methods_should_end_with_async.symbols = async_methods
dotnet_naming_rule.async_methods_should_end_with_async.style = ends_with_async
dotnet_naming_symbols.async_methods.applicable_kinds = method
dotnet_naming_symbols.async_methods.required_modifiers = async
dotnet_naming_style.ends_with_async.capitalization = pascal_case
dotnet_naming_style.ends_with_async.required_suffix = Async
# C# formatting rules
# New line preferences
csharp_new_line_before_open_brace = all
csharp_new_line_before_else = true
csharp_new_line_before_catch = true
csharp_new_line_before_finally = true
csharp_new_line_before_members_in_object_initializers = true
csharp_new_line_before_members_in_anonymous_types = true
csharp_new_line_between_query_expression_clauses = true
# Indentation preferences
csharp_indent_case_contents = true
csharp_indent_switch_labels = true
csharp_indent_labels = flush_left
# Space preferences
csharp_space_after_cast = false
csharp_space_after_keywords_in_control_flow_statements = true
csharp_space_between_method_call_parameter_list_parentheses = false
csharp_space_between_method_declaration_parameter_list_parentheses = false
csharp_space_between_parentheses = false
csharp_space_before_colon_in_inheritance_clause = true
csharp_space_after_colon_in_inheritance_clause = true
csharp_space_around_binary_operators = before_and_after
csharp_space_between_method_declaration_empty_parameter_list_parentheses = false
csharp_space_between_method_call_name_and_opening_parenthesis = false
csharp_space_between_method_call_empty_parameter_list_parentheses = false
# Wrapping preferences
csharp_preserve_single_line_statements = true
csharp_preserve_single_line_blocks = true

94
.github/workflows/ci.yml vendored Normal file
View file

@ -0,0 +1,94 @@
name: CI/CD Pipeline
on:
push:
branches: [ main, develop ]
pull_request:
branches: [ main, develop ]
jobs:
build-and-test:
runs-on: ubuntu-latest
services:
postgres:
image: postgres:16
env:
POSTGRES_USER: test
POSTGRES_PASSWORD: test
POSTGRES_DB: swp_test
options: >-
--health-cmd pg_isready
--health-interval 10s
--health-timeout 5s
--health-retries 5
ports:
- 5432:5432
steps:
- uses: actions/checkout@v4
- name: Setup .NET
uses: actions/setup-dotnet@v4
with:
dotnet-version: 9.0.x
- name: Restore dependencies
run: dotnet restore
- name: Build
run: dotnet build --no-restore --configuration Release
- name: Run unit tests
run: dotnet test --no-build --configuration Release --filter "TestCategory!=Integration" --logger "trx;LogFileName=unit-test-results.trx" --settings Tests/.runsettings
- name: Run integration tests
run: dotnet test --no-build --configuration Release --filter "TestCategory=Integration" --logger "trx;LogFileName=integration-test-results.trx"
env:
TEST_DB_CONNECTION: "Host=localhost;Port=5432;Database=swp_test;Username=test;Password=test"
- name: Upload test results
uses: actions/upload-artifact@v4
if: always()
with:
name: test-results
path: |
**/TestResults/*.trx
**/TestResults/*.coverage
- name: Code Coverage Report
uses: irongut/CodeCoverageSummary@v1.3.0
with:
filename: '**/TestResults/*.coverage'
badge: true
format: markdown
hide_branch_rate: false
hide_complexity: true
indicators: true
output: both
thresholds: '60 80'
code-quality:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Setup .NET
uses: actions/setup-dotnet@v4
with:
dotnet-version: 9.0.x
- name: Install dotnet tools
run: |
dotnet tool install --global dotnet-format
dotnet tool install --global security-scan
- name: Check formatting
run: dotnet format --verify-no-changes
- name: Security scan
run: security-scan --project SWP.Core.sln
- name: Run code analysis
run: dotnet build /p:RunAnalyzersDuringBuild=true /p:TreatWarningsAsErrors=true

362
.gitignore vendored Normal file
View file

@ -0,0 +1,362 @@
# Build results
[Dd]ebug/
[Dd]ebugPublic/
[Rr]elease/
[Rr]eleases/
x64/
x86/
[Ww][Ii][Nn]32/
[Aa][Rr][Mm]/
[Aa][Rr][Mm]64/
bld/
[Bb]in/
[Oo]bj/
[Ll]og/
[Ll]ogs/
# Visual Studio 2015/2017 cache/options directory
.vs/
# Uncomment if you have tasks that create the project's static files in wwwroot
#wwwroot/
# Visual Studio 2017 auto generated files
Generated\ Files/
# MSTest test Results
[Tt]est[Rr]esult*/
[Bb]uild[Ll]og.*
# NUnit
*.VisualState.xml
TestResult.xml
nunit-*.xml
# Build Results of an ATL Project
[Dd]ebugPS/
[Rr]eleasePS/
dlldata.c
# Benchmark Results
BenchmarkDotNet.Artifacts/
# .NET Core
project.lock.json
project.fragment.lock.json
artifacts/
# ASP.NET Scaffolding
ScaffoldingReadMe.txt
# StyleCop
StyleCopReport.xml
# Files built by Visual Studio
*_i.c
*_p.c
*_h.h
*.ilk
*.meta
*.obj
*.iobj
*.pch
*.pdb
*.ipdb
*.pgc
*.pgd
*.rsp
*.sbr
*.tlb
*.tli
*.tlh
*.tmp
*.tmp_proj
*_wpftmp.csproj
*.log
*.tlog
*.vspscc
*.vssscc
.builds
*.pidb
*.svclog
*.scc
# Chutzpah Test files
_Chutzpah*
# Visual C++ cache files
ipch/
*.aps
*.ncb
*.opendb
*.opensdf
*.sdf
*.cachefile
*.VC.db
*.VC.VC.opendb
# Visual Studio profiler
*.psess
*.vsp
*.vspx
*.sap
# Visual Studio Trace Files
*.e2e
# TFS 2012 Local Workspace
$tf/
# Guidance Automation Toolkit
*.gpState
# ReSharper is a .NET coding add-in
_ReSharper*/
*.[Rr]e[Ss]harper
*.DotSettings.user
# TeamCity is a build add-in
_TeamCity*
# DotCover is a Code Coverage Tool
*.dotCover
# AxoCover is a Code Coverage Tool
.axoCover/*
!.axoCover/settings.json
# Coverlet is a free, cross platform Code Coverage Tool
coverage*.json
coverage*.xml
coverage*.info
# Visual Studio code coverage results
*.coverage
*.coveragexml
# NCrunch
_NCrunch_*
.*crunch*.local.xml
nCrunchTemp_*
# MightyMoose
*.mm.*
AutoTest.Net/
# Web workbench (sass)
.sass-cache/
# Installshield output folder
[Ee]xpress/
# DocProject is a documentation generator add-in
DocProject/buildhelp/
DocProject/Help/*.HxT
DocProject/Help/*.HxC
DocProject/Help/Html2
DocProject/Help/html
# Click-Once directory
publish/
# Publish Web Output
*.[Pp]ublish.xml
*.azurePubxml
# Note: Comment the next line if you want to checkin your web deploy settings,
# but database connection strings (with potential passwords) will be unencrypted
*.pubxml
*.publishproj
# Microsoft Azure Web App publish settings. Comment the next line if you want to
# checkin your Azure Web App publish settings, but sensitive information contained
# in these files may be visible to others.
*.azurePubxml
# Microsoft Azure Build Output
csx/
*.build.csdef
# Microsoft Azure Emulator
ecf/
rcf/
# Windows Store app package directories and files
AppPackages/
BundleArtifacts/
Package.StoreAssociation.xml
_pkginfo.txt
*.appx
*.appxbundle
*.appxupload
# Visual Studio cache files
# files ending in .cache can be ignored
*.[Cc]ache
# but keep track of directories ending in .cache
!?*.[Cc]ache/
# Others
ClientBin/
~$*
*~
*.dbmdl
*.dbproj.schemaview
*.jfm
*.pfx
*.publishsettings
orleans.codegen.cs
# Including strong name files can present a security risk
# (https://github.com/github/gitignore/pull/2483#issue-259490424)
#*.snk
# Since there are multiple workflows, uncomment the next line to ignore bower_components
# (https://github.com/github/gitignore/pull/1529#issuecomment-104372622)
#bower_components/
# RIA/Silverlight projects
Generated_Code/
# Backup & report files from converting an old project file
# to a newer Visual Studio version. Backup files are not needed,
# because we have git ;-)
_UpgradeReport_Files/
Backup*/
UpgradeLog*.XML
UpgradeLog*.htm
CDF_UpgradeLog*.xml
# SQL Server files
*.mdf
*.ldf
*.ndf
# Business Intelligence projects
*.rdl.data
*.bim.layout
*.bim_*.settings
*.rptproj.rsuser
*- [Bb]ackup.rdl
*- [Bb]ackup ([0-9]).rdl
*- [Bb]ackup ([0-9][0-9]).rdl
# Microsoft Fakes
FakesAssemblies/
# GhostDoc plugin setting file
*.GhostDoc.xml
# Node.js Tools for Visual Studio
.ntvs_analysis.dat
node_modules/
# Visual Studio 6 build log
*.plg
# Visual Studio 6 workspace options file
*.opt
# Visual Studio 6 auto-generated workspace file (contains which files were open etc.)
*.vbw
# Visual Studio 6 auto-generated project file (contains which files were open etc.)
*.vbp
# Visual Studio 6 workspace and project file (working project files containing files to include in project)
*.dsw
*.dsp
# Visual Studio 6 technical files
*.ncb
*.aps
# Visual Studio LightSwitch build output
**/*.HTMLClient/GeneratedArtifacts
**/*.DesktopClient/GeneratedArtifacts
**/*.DesktopClient/ModelManifest.xml
**/*.Server/GeneratedArtifacts
**/*.Server/ModelManifest.xml
_Pvt_Extensions
# Paket dependency manager
.paket/paket.exe
paket-files/
# FAKE - F# Make
.fake/
# CodeRush personal settings
.cr/personal
# Python Tools for Visual Studio (PTVS)
__pycache__/
*.pyc
# Cake - Uncomment if you are using it
# tools/**
# !tools/packages.config
# Tabs Studio
*.tss
# Telerik's JustMock configuration file
*.jmconfig
# BizTalk build output
*.btp.cs
*.btm.cs
*.odx.cs
*.xsd.cs
# OpenCover UI analysis results
OpenCover/
# Azure Stream Analytics local run output
ASALocalRun/
# MSBuild Binary and Structured Log
*.binlog
# NVidia Nsight GPU debugger configuration file
*.nvuser
# MFractors (Xamarin productivity tool) working folder
.mfractor/
# Local History for Visual Studio
.localhistory/
# Visual Studio History (VSHistory) files
.vshistory/
# BeatPulse healthcheck temp database
healthchecksdb
# Backup folder for Package Reference Convert tool in Visual Studio 2017
MigrationBackup/
# Ionide (cross platform F# VS Code tools) working folder
.ionide/
# Fody - auto-generated XML schema
FodyWeavers.xsd
# VS Code files for those working on multiple tools
.vscode/*
!.vscode/settings.json
!.vscode/tasks.json
!.vscode/launch.json
!.vscode/extensions.json
*.code-workspace
# Local History for Visual Studio Code
.history/
# Windows Installer files from build outputs
*.cab
*.msi
*.msix
*.msm
*.msp
# JetBrains Rider
*.sln.iml

136
CLAUDE.md Normal file
View file

@ -0,0 +1,136 @@
# CLAUDE.md
This file provides guidance to Claude Code (claude.ai/code) when working with code in this repository.
## Build and Development Commands
```bash
# Build the solution
dotnet build
# Build in release mode
dotnet build -c Release
# Clean build artifacts
dotnet clean
# Restore NuGet packages
dotnet restore
# Run a specific project (from project directory)
dotnet run
# Run all tests
dotnet test
# Run tests with detailed output
dotnet test --logger "console;verbosity=normal"
# Run only unit tests (excluding integration tests)
dotnet test --filter "TestCategory!=Integration"
# Run tests with coverage
dotnet test --settings Tests/.runsettings --collect:"XPlat Code Coverage"
```
## Architecture Overview
This is a .NET 9.0 solution with a modular architecture using Autofac for dependency injection. The codebase follows these key patterns:
### Core Project Structure (SWP.Core)
- **CommandQueries**: CQRS pattern implementation with ICommand interface
- **Configurations**: Multi-provider configuration system supporting JSON files and database-backed smart configuration
- **Database**: PostgreSQL data access layer using Insight.Database ORM with connection factory pattern
- **ModuleRegistry**: Autofac modules for dependency registration (Security, Telemetry, Seq Logging, Database)
- **Telemetry**: Application Insights integration with custom enrichers and Seq telemetry channel
- **SeqLogging**: Structured logging implementation with Seq API integration
### Database Project Structure (PlanTempus.Database)
- Database setup and configuration management
- Identity system setup (DDL)
- User management with DCL scripts for different user types (Application, Organization, DbAdmin)
- Tenant initialization
- Navigation and roles/permissions systems
### Key Architectural Patterns
1. **Repository Pattern**: Used for configuration management (IConfigurationRepository)
2. **Factory Pattern**: Database connection management (IDbConnectionFactory)
3. **Module Pattern**: Autofac modules organize dependencies by feature
4. **CQRS Elements**: Command pattern with correlation and transaction IDs
5. **Smart Configuration**: Database-backed configuration provider that integrates with .NET configuration system
### Technology Stack
- .NET 9.0
- PostgreSQL with Npgsql and Insight.Database ORM
- Autofac for dependency injection
- FluentValidation for validation
- Seq API for structured logging
- Application Insights for telemetry
- Sodium.Core for encryption
- Newtonsoft.Json for JSON processing
### Security Features
- SecureTokenizer service for token generation
- Multi-key encryption with MasterKey management
- Secure connection string handling
## Testing Structure
The solution includes a comprehensive test project `SWP.Core.X.TDD` using:
- **MSTest** as the test framework
- **Moq** for mocking dependencies
- **Shouldly** for fluent assertions
- **Coverlet** for code coverage
### Test Categories
- **Unit Tests**: Fast, isolated tests for individual components
- **Integration Tests**: Tests requiring database or external dependencies (marked with `[TestCategory("Integration")]`)
### Running Specific Tests
```bash
# Run tests for a specific class
dotnet test --filter "ClassName=SecureTokenizerTests"
# Run tests by category
dotnet test --filter "TestCategory=Integration"
# Run tests by name pattern
dotnet test --filter "Name~Token"
```
### Test Helpers
- `TestDataBuilder`: Factory methods for creating test data
- `TestFixtureBase`: Base class for test setup and teardown
### Known Test Issues
- Some tests require external dependencies (Seq logging server, PostgreSQL)
- Integration tests are marked with `[Ignore]` when external dependencies are not available
- Current SecureTokenizer implementation has null handling issues that need fixing
## Naming Conventions
The project follows a comprehensive naming convention documented in `NAMING_CONVENTION.md`. Key points:
### Code Style
- **Namespaces**: PascalCase with `SWP.Core` prefix (not `PlanTempus.Core`)
- **Classes**: PascalCase with appropriate suffixes (`Factory`, `Service`, `Exception`, etc.)
- **Interfaces**: PascalCase with `I` prefix
- **Methods**: PascalCase, async methods end with `Async`
- **Properties**: PascalCase
- **Private fields**: camelCase with `_` prefix
- **Parameters**: camelCase
### File Organization
- One main class per file
- File names match class names (PascalCase)
- Folder structure mirrors namespace hierarchy
### Test Naming
- Test classes: `[ClassName]Tests`
- Test methods: `[MethodName]_Should[Behavior]_[Condition]`
- Test projects: `[ProjectName].X.TDD`
### Configuration
- `.editorconfig` enforces formatting rules
- Hierarchical configuration keys with PascalCase sections
- Database uses snake_case, C# uses PascalCase with proper mapping

194
CODE_REVIEW_FINDINGS.md Normal file
View file

@ -0,0 +1,194 @@
# Omfattende Kodereview - SWP.Core
## Oversigt
Dette dokument indeholder resultater fra en omfattende kodereview af SWP.Core løsningen. Reviewet har identificeret flere kritiske sikkerhedsproblemer, arkitektoniske udfordringer og vedligeholdelsesmæssige bekymringer.
## Kritiske Sikkerhedsproblemer (Skal rettes OMGÅENDE)
### 1. Hardkodet Master Key
**Fil:** `Core/MultiKeyEncryption/SecureConnectionString.cs:12`
**Problem:** Hardkodet krypteringsnøgle i kildekoden
```csharp
const string _masterKey = "5AFD74B1C26E87FE6656099E850DC67A";
```
**Anbefaling:** Implementer proper key management via Azure Key Vault eller lignende
### 2. Fast Salt for Alle Brugere
**Fil:** `Core/MultiKeyEncryption/SecureConnectionString.cs:52`
**Problem:** Bruger samme salt for alle passwords
```csharp
new byte[16], // Fast salt for simpelhed - i produktion bør dette være unikt per bruger
```
**Anbefaling:** Generer unikt salt per bruger
### 3. Timing Attack Sårbarhed
**Fil:** `Core/SecureTokenizer.cs:41`
**Problem:** Bruger `SequenceEqual()` som er sårbar overfor timing attacks
**Anbefaling:** Implementer constant-time comparison
### 4. Svage Krypteringsparametre
**Problemer:**
- Kun 10.000 PBKDF2 iterationer (skal være 100.000+)
- Ingen authenticated encryption (AES-GCM eller HMAC)
- Manglende input validering
## Arkitektoniske Problemer
### 1. SOLID Princip Overtrædelser
#### Single Responsibility Principle (SRP)
- `SmartConfigProvider`: Håndterer fil I/O, database queries, JSON konvertering
- `SqlOperations`: Blander database operationer med telemetri
- `SeqBackgroundService`: Håndterer både kø-management og netværkskommunikation
#### Open/Closed Principle (OCP)
- Hardkodede dependencies forhindrer udvidelse
- Direkte instansiering af konkrete typer i modules
#### Liskov Substitution Principle (LSP)
- `PostgresConnectionFactory` eksponerer PostgreSQL-specifikke detaljer gennem generisk interface
#### Interface Segregation Principle (ISP)
- `IDatabaseOperations` tvinger implementering af både generiske og ikke-generiske metoder
- `IConfigurationProvider` har for mange ansvarsområder
#### Dependency Inversion Principle (DIP)
- High-level modules afhænger direkte af low-level modules
- Manglende abstraktion for fil-system operationer
### 2. Ufuldstændige Implementeringer
#### Command/Query Pattern
- Interfaces defineret men mangler:
- Command handlers
- Query handlers
- Mediator pattern
- Validation pipeline
#### MasterKey Klasse
**Fil:** `Core/MultiKeyEncryption/MasterKey.cs`
- Hele klassen er udkommenteret
- Ingen funktionalitet implementeret
#### Telemetry Enricher
**Fil:** `Core/Telemetry/Enrichers/EnrichWithMetaTelemetry.cs`
- Tom implementation med kommentar "nothing going on here yet"
### 3. Anti-Patterns
#### God Classes
- Configuration providers håndterer for mange ansvarsområder
- Database connection factories blander bekymringer
#### Feature Envy
- Klasser der tilgår for meget ekstern state
- Manglende indkapsling
#### Leaky Abstractions
- Database-specifikke detaljer lækker gennem generiske interfaces
- PostgreSQL typer eksponeret i abstractions
## Kodekvalitet Problemer
### 1. Inkonsistent Navngivning
- Blanding af dansk og engelsk i kommentarer
- Forskellige naming conventions på tværs af modules
### 2. Manglende Error Handling
- Exceptions swallowed uden logging
- Ingen retry policies for transiente fejl
- Manglende validation af inputs
### 3. Hardkodede Værdier
**Eksempler:**
- `CommandResponse.cs:41`: `StatusUrl = "statusUrl"`
- Security keys og connection strings
### 4. Ressource Management
- Potentielle memory leaks i connection factories
- Manglende proper disposal patterns nogle steder
### 5. Dependencies
- Sodium.Core inkluderet men aldrig brugt
- Potentielt outdated package versions
## Database Projekt Problemer
### 1. SQL Injection Risici
- Skal verificeres at alle queries bruger parameterisering korrekt
- Dynamisk SQL konstruktion skal undgås
### 2. Manglende Transaction Support
- Ingen transaction management i database abstraction layer
- Risiko for inkonsistent data
### 3. Connection Pooling
- Ingen klar strategi for connection pooling
- Potentielle performance problemer
## Manglende Komponenter
### 1. Caching Layer
- Ingen caching abstraktion
- Configuration genindlæses potentielt for ofte
### 2. Health Checks
- Ingen health check endpoints
- Manglende monitoring capabilities
### 3. Metrics Collection
- Ingen business metrics abstraktion
- Kun teknisk telemetri
### 4. Retry Policies
- Ingen Polly eller lignende retry mekanismer
- Transiente fejl håndteres ikke
## Test Problemer
### 1. Test Coverage
- Mange komponenter uden tests
- Kritiske sikkerhedskomponenter ikke testet
### 2. Integration Tests
- Manglende database integration tests
- Ingen end-to-end tests
### 3. Test Patterns
- Inkonsistent brug af test patterns
- Manglende test fixtures og builders
## Anbefalinger
### Øjeblikkelige Handlinger (Kritisk)
1. Fjern hardkodet master key
2. Implementer unikke salts per bruger
3. Øg PBKDF2 iterationer til 100.000+
4. Fix timing attack sårbarhed
5. Implementer authenticated encryption
### Kortsigtede Forbedringer (1-2 uger)
1. Refaktorer store klasser (SRP)
2. Implementer manglende command/query handlers
3. Tilføj comprehensive error handling
4. Implementer retry policies med Polly
5. Tilføj input validation overalt
### Langsigtede Forbedringer (1-3 måneder)
1. Implementer fuld CQRS med mediator pattern
2. Tilføj caching layer
3. Implementer health checks
4. Tilføj business metrics
5. Opret comprehensive test suite
### Test Strategi
1. Unit tests for alle kritiske komponenter
2. Integration tests for database layer
3. Security tests for kryptering
4. Performance tests for connection pooling
5. End-to-end tests for hele flows
## Konklusion
Kodebasen viser lovende arkitektonisk tænkning men kræver betydelig refaktorering for at opnå produktionsklar kvalitet. De kritiske sikkerhedsproblemer skal adresseres øjeblikkeligt før koden bruges i produktion.
Prioriter sikkerhedsrettelser først, derefter arkitektoniske forbedringer, og til sidst generel kodekvalitet og test coverage.

View file

@ -0,0 +1,7 @@
namespace SWP.Core.CommandQueries;
public abstract class Command : ICommand
{
public required Guid CorrelationId { get; set; }
public Guid TransactionId { get; set; }
}

View file

@ -0,0 +1,42 @@
namespace SWP.Core.CommandQueries;
/// <summary>
/// Represents a response to a command request
/// This class includes details such as a unique request ID, correlation ID, command name,
/// transaction ID, creation timestamp, and a URL to check the status of the command.
/// </summary>
/// <param name="correlationId">A unique identifier used to track the request across services.</param>
/// <param name="commandName">The name of the command being executed.</param>
/// <param name="transactionId">An optional unique identifier for the transaction associated with the command.</param>
public class CommandResponse(Guid correlationId, string commandName, Guid? transactionId)
{
/// <summary>
/// A unique identifier for the request. This is automatically generated using Guid.CreateVersion7().
/// </summary>
public Guid RequestId { get; } = Guid.CreateVersion7();
/// <summary>
/// A unique identifier used to track the request across services. This is provided when creating the response.
/// </summary>
public Guid CorrelationId { get; } = correlationId;
/// <summary>
/// The name of the command being executed.
/// </summary>
public string CommandName { get; } = commandName;
/// <summary>
/// An optional unique identifier for the transaction associated with the command.
/// </summary>
public Guid? TransactionId { get; } = transactionId;
/// <summary>
/// The timestamp when the command response was created. This is automatically set to the current UTC time.
/// </summary>
public DateTime CreatedAt { get; } = DateTime.UtcNow;
/// <summary>
/// A URL where the client can check the status of the command. This is typically used in asynchronous operations.
/// </summary>
public string StatusUrl { get; } = "statusUrl";
}

View file

@ -0,0 +1,7 @@
namespace SWP.Core.CommandQueries;
public interface ICommand
{
Guid CorrelationId { get; set; }
Guid TransactionId { get; set; }
}

View file

@ -0,0 +1,56 @@
namespace SWP.Core.CommandQueries;
/// <summary>
/// Represents a standardized error response according to RFC 9457 (Problem Details for HTTP APIs).
/// This class provides a consistent way to communicate errors in HTTP APIs, including details about the error type,
/// status code, and additional context. It also supports extensions for custom error information.
///
/// RFC 9457 Documentation: https://www.rfc-editor.org/rfc/rfc9457.html
/// </summary>
public class ProblemDetails
{
/// <summary>
/// A URI reference that identifies the problem type. This is typically a link to human-readable documentation about the error.
/// </summary>
public string Type { get; set; }
/// <summary>
/// A short, human-readable summary of the problem. It should not change between occurrences of the same error.
/// </summary>
public string Title { get; set; }
/// <summary>
/// The HTTP status code generated by the server for this occurrence of the problem. This allows the client to understand the general category of the error.
/// </summary>
public int? Status { get; set; }
/// <summary>
/// A human-readable explanation specific to this occurrence of the problem. It provides additional details about the error.
/// </summary>
public string Detail { get; set; }
/// <summary>
/// A URI reference that identifies the specific occurrence of the problem. This can be used to trace the error in logs or debugging tools.
/// </summary>
public string Instance { get; set; }
/// <summary>
/// A dictionary for additional, custom error information. This allows extending the problem details with application-specific fields.
/// </summary>
[Newtonsoft.Json.JsonExtensionData]
public Dictionary<string, object> Extensions { get; } = new();
/// <summary>
/// Adds a custom extension to the problem details.
/// </summary>
/// <param name="key">The key for the extension.</param>
/// <param name="value">The value of the extension.</param>
public void AddExtension(string key, object value) => Extensions.Add(key, value);
/// <summary>
/// Removes a custom extension from the problem details.
/// </summary>
/// <param name="key">The key of the extension to remove.</param>
public void RemoveExtension(string key) => Extensions.Remove(key);
}

View file

@ -0,0 +1,75 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace SWP.Core.Configurations.Common
{
using Newtonsoft.Json.Linq;
using System.Collections.Generic;
public static class KeyValueToJson
{
public static JObject Convert(IEnumerable<KeyValuePair<string, JToken>> pairs)
{
var root = new JObject();
foreach (var pair in pairs)
{
var keys = pair.Key.Split(':');
var current = root;
// Gennemgå hierarkiet og opret underobjekter, hvis de ikke eksisterer
for (int i = 0; i < keys.Length - 1; i++)
{
var key = keys[i];
if (current[key] == null)
current[key] = new JObject();
current = (JObject)current[key];
}
// Håndter den sidste nøgle og tilføj værdien
var lastKey = keys[keys.Length - 1];
var value = ConvertValue(pair.Value);
// Hvis den sidste nøgle allerede eksisterer, tilføj til en liste
if (current[lastKey] != null)
// Hvis den allerede er en liste, tilføj til listen
if (current[lastKey].Type == JTokenType.Array)
((JArray)current[lastKey]).Add(value);
// Hvis den ikke er en liste, konverter til en liste
else
{
var existingValue = current[lastKey];
current[lastKey] = new JArray { existingValue, value };
}
// Ellers tilføj som en enkelt værdi
else
current[lastKey] = value;
}
return root;
}
private static JToken ConvertValue(object value)
{
// Hvis værdien allerede er en JToken, returner den direkte
if (value is JToken token)
return token;
// Konverter andre typer
return value switch
{
int i => new JValue(i),
double d => new JValue(d),
bool b => new JValue(b),
string s => new JValue(s),
_ => new JValue(value.ToString())
};
}
}
}

View file

@ -0,0 +1,152 @@
using Newtonsoft.Json.Linq;
namespace SWP.Core.Configurations
{
public interface IConfigurationBuilder
{
ConfigurationBuilder AddProvider(IConfigurationProvider provider);
IConfigurationRoot Build();
List<IConfigurationProvider> ConfigurationProviders { get; }
}
public class ConfigurationBuilder : IConfigurationBuilder
{
public List<IConfigurationProvider> ConfigurationProviders { get; private set; } = [];
public ConfigurationBuilder AddProvider(IConfigurationProvider provider)
{
((IConfigurationBuilder)this).ConfigurationProviders.Add(provider);
return this;
}
public IConfigurationRoot Build()
{
foreach (var provider in ConfigurationProviders)
provider.Build();
//TODO: we need to come up with merge strategy, right now the latest key-path dominates
return new ConfigurationRoot(ConfigurationProviders);
}
}
public class Configuration : IConfiguration
{
List<IConfigurationProvider> _providers = [];
/// <summary>
/// Implements a string-based indexer for backwards compatibility with Microsoft.Extensions.Configuration.
/// This implementation is marked as obsolete and should be replaced with type-safe alternatives.
/// </summary>
/// <param name="key">The configuration key to retrieve.</param>
/// <returns>The configuration value for the specified key.</returns>
/// <exception cref="NotSupportedException">Thrown when attempting to set a value, as this operation is not supported.</exception>
[Obsolete("Use type-safe configuration methods instead")]
public string this[string key]
{
get => GetConfiguration(_providers, key);
set => throw new NotSupportedException();
}
List<IConfigurationProvider> IConfiguration.ConfigurationProviders
{
get { return _providers; }
set { _providers = value; }
}
internal static string GetConfiguration(IList<IConfigurationProvider> providers, string key)
{
string value = null;
foreach (var provider in providers)
{
var test = provider.Configuration().SelectToken(ConfigurationBinder.NormalizePath(key));
if (test != null)
value = test.ToString();
}
return value;
}
}
public class ConfigurationRoot : Configuration, IConfigurationRoot
{
public ConfigurationRoot(List<IConfigurationProvider> configurationProviders)
{
((IConfiguration)this).ConfigurationProviders = configurationProviders;
}
}
public static class ConfigurationBinder
{
public static string NormalizePath(string path)
{
return path?.Replace(":", ".", StringComparison.Ordinal) ?? string.Empty;
}
public static string GetConnectionString(this IConfigurationRoot configuration, string name)
{
return configuration.GetSection("ConnectionStrings").Get<string>(name);
}
public static IConfigurationSection GetSection(this IConfigurationRoot configuration, string path)
{
JToken value = null;
foreach (var provider in configuration.ConfigurationProviders)
{
var test = provider.Configuration().SelectToken(NormalizePath(path));
if (test != null)
value = test;
}
return new ConfigurationSection { Path = path, Key = path.Split(':').Last(), Value = value };
}
public static T Get<T>(this IConfigurationRoot configuration, string path)
{
JToken value = null;
foreach (var provider in configuration.ConfigurationProviders)
{
var test = provider.Configuration().SelectToken(NormalizePath(path));
if (test != null)
value = test;
}
return value.ToObject<T>();
}
public static T Get<T>(this IConfigurationSection configuration, string path)
{
var value = configuration.Value.SelectToken(NormalizePath(path)).ToObject<T>();
return value;
}
public static T ToObject<T>(this IConfigurationSection configuration)
{
var value = configuration.Value.ToObject<T>();
return value;
}
[Obsolete("Use ToObject")]
public static T Get<T>(this IConfigurationSection configuration)
{
return configuration.Value.ToObject<T>();
}
}
public interface IConfigurationProvider
{
void Build();
JObject Configuration();
}
public class ConfigurationSection : IConfigurationSection
{
public required string Path { get; set; }
public required string Key { get; set; }
public required JToken Value { get; set; }
}
public interface IConfigurationSection
{
string Path { get; }
string Key { get; }
JToken Value { get; set; }
}
}

View file

@ -0,0 +1,9 @@
namespace SWP.Core.Configurations
{
/// <summary>
/// Marker interface for application configurations that should be automatically registered in the DI container.
/// Classes implementing this interface will be loaded from configuration and registered as singletons.
/// </summary>
public interface IAppConfiguration { }
}

View file

@ -0,0 +1,10 @@
namespace SWP.Core.Configurations
{
public interface IConfigurationRoot : IConfiguration { }
public interface IConfiguration
{
internal List<IConfigurationProvider> ConfigurationProviders { get; set; }
string this[string key] { get; set; }
}
}

View file

@ -0,0 +1,60 @@
using SWP.Core.Exceptions;
using Newtonsoft.Json;
using Newtonsoft.Json.Linq;
namespace SWP.Core.Configurations.JsonConfigProvider
{
public static class JsonConfigExtension
{
/// <summary>
/// Adds a JSON configuration source to the configuration builder.
/// </summary>
/// <param name="builder">The configuration builder to add to</param>
/// <param name="configurationFilePath">Path to the JSON configuration file. Defaults to "appconfiguration.json"</param>
/// <param name="optional">If true, the configuration file is optional. Defaults to true</param>
/// <param name="reloadOnChange">If true, the configuration will be reloaded when the file changes. Defaults to false</param>
/// <returns>The configuration builder</returns>
public static IConfigurationBuilder AddJsonFile(this IConfigurationBuilder builder, string configurationFilePath = "appconfiguration.json", bool? optional = true, bool? reloadOnChange = false)
{
return builder.AddProvider(new JsonConfigProvider(builder, configurationFilePath, optional ?? true, reloadOnChange ?? false));
}
}
public interface IHasConfigurationFilePath
{
string ConfigurationFilePath { get; }
}
public class JsonConfigProvider : IConfigurationProvider, IHasConfigurationFilePath
{
private readonly IConfigurationBuilder _builder;
private readonly bool _reloadOnChange;
JObject _configuration;
public string ConfigurationFilePath { get; private set; }
public JsonConfigProvider() { }
public JsonConfigProvider(IConfigurationBuilder builder, string configurationFilePath, bool optional, bool reloadOnChange)
{
if (!optional && !File.Exists(configurationFilePath))
throw new ConfigurationException($"File not found, path: {configurationFilePath}");
if (optional && !File.Exists(configurationFilePath))
return;
ConfigurationFilePath = configurationFilePath;
_builder = builder;
_reloadOnChange = reloadOnChange;
}
public void Build()
{
using (StreamReader file = File.OpenText(ConfigurationFilePath))
using (JsonTextReader reader = new JsonTextReader(file))
_configuration = (JObject)JToken.ReadFrom(reader);
}
public JObject Configuration()
{
return _configuration;
}
}
}

View file

@ -0,0 +1,14 @@
namespace SWP.Core.Configurations.SmartConfigProvider;
public class AppConfiguration
{
public long Id { get; set; }
public string Key { get; set; }
public object Value { get; set; }
public string Label { get; set; }
public string ContentType { get; set; }
public DateTime? ValidFrom { get; set; }
public DateTime? ExpiresAt { get; set; }
public DateTime? CreatedAt { get; set; }
public DateTime? ModifiedAt { get; set; }
public Guid? Etag { get; set; }
}

View file

@ -0,0 +1,7 @@
namespace SWP.Core.Configurations.SmartConfigProvider;
public interface IConfigurationRepository
{
string ConnectionString { get; set; }
IEnumerable<AppConfiguration> GetActiveConfigurations();
}

View file

@ -0,0 +1,35 @@
using System.Data;
using Insight.Database;
using SWP.Core.Configurations.SmartConfigProvider;
namespace SWP.Core.Configurations.SmartConfigProvider.Repositories;
public class PostgresConfigurationRepository : IConfigurationRepository
{
private IDbConnection _connection;
public string ConnectionString { get; set; }
public PostgresConfigurationRepository(string connectionString)
{
_connection = new Npgsql.NpgsqlConnection(connectionString);
}
public PostgresConfigurationRepository()
{
}
public IEnumerable<AppConfiguration> GetActiveConfigurations()
{
_connection ??= new Npgsql.NpgsqlConnection(ConnectionString);
const string sql = @"
SELECT id, ""key"", value, label, content_type,
valid_from, expires_at, created_at, modified_at, etag
FROM app_configuration
WHERE CURRENT_TIMESTAMP BETWEEN valid_from AND expires_at
OR (valid_from IS NULL AND expires_at IS NULL)";
return _connection.QuerySql<AppConfiguration>(sql);
}
}

View file

@ -0,0 +1,33 @@
namespace SWP.Core.Configurations.SmartConfigProvider
{
/// <summary>
/// Extension methods for adding smart configuration providers to IConfigurationBuilder.
/// </summary>
public static class SmartConfigExtension
{
/// <summary>
/// Adds a smart configuration provider using a connection string from appsettings.
/// </summary>
/// <param name="builder">The configuration builder to add to</param>
/// <param name="configKey">The key to find the connection string in the ConnectionStrings section. Defaults to "DefaultConnection"</param>
/// <param name="path">Optional path to configuration file if different from default appsettings location</param>
/// <returns>The configuration builder</returns>
public static IConfigurationBuilder AddSmartConfig(this IConfigurationBuilder builder, string configKey = "DefaultConnection", string path = null)
{
return builder.AddProvider(new SmartConfigProvider(builder, configKey, path));
}
/// <summary>
/// Adds a smart configuration provider with custom configuration options.
/// </summary>
/// <param name="builder">The configuration builder to add to</param>
/// <param name="setupAction">Action to configure the smart configuration options</param>
/// <returns>The configuration builder</returns>
public static IConfigurationBuilder AddSmartConfig(this IConfigurationBuilder builder, Action<SmartConfigOptions> setupAction)
{
var options = new SmartConfigOptions();
setupAction(options);
return builder.AddProvider(new SmartConfigProvider(builder, options));
}
}
}

View file

@ -0,0 +1,45 @@
namespace SWP.Core.Configurations.SmartConfigProvider
{
/// <summary>
/// Configuration options for setting up smart configuration providers.
/// Provides fluent configuration methods for specifying the repository type and settings.
/// </summary>
public class SmartConfigOptions
{
private IConfigurationRepository _repository;
internal string _configKey;
/// <summary>
/// Configures the smart configuration to use PostgreSQL as the configuration store.
/// </summary>
/// <param name="configKey">The configuration key used to find the connection string</param>
/// <returns>The configuration options instance for method chaining</returns>
public SmartConfigOptions UsePostgres(string configKey)
{
_configKey = configKey;
_repository = new Repositories.PostgresConfigurationRepository();
return this;
}
/// <summary>
/// Configures the smart configuration to use SQL Server as the configuration store.
/// </summary>
/// <returns>The configuration options instance for method chaining</returns>
/// <exception cref="NotImplementedException">This feature is not yet implemented</exception>
public SmartConfigOptions UseSqlServer()
{
throw new NotImplementedException();
}
/// <summary>
/// Configures the smart configuration to use a custom configuration repository.
/// </summary>
/// <param name="repository">The configuration repository to use</param>
/// <returns>The configuration options instance for method chaining</returns>
public SmartConfigOptions UseRepository(IConfigurationRepository repository)
{
_repository = repository;
return this;
}
internal IConfigurationRepository GetRepository() => _repository;
}
}

View file

@ -0,0 +1,85 @@
using Newtonsoft.Json;
using Newtonsoft.Json.Linq;
using SWP.Core.Exceptions;
using SWP.Core.Configurations.JsonConfigProvider;
namespace SWP.Core.Configurations.SmartConfigProvider
{
/// <summary>
/// Configuration provider that loads configuration from a smart configuration source (e.g. database).
/// The provider reads connection details from a JSON file and uses them to connect to a configuration repository.
/// </summary>
/// <remarks>
/// The provider supports multiple initialization methods:
/// - Through SmartConfigOptions for flexible repository configuration
/// - Through direct configuration key and file path
/// Configuration is loaded from the repository during Build() and converted to a JSON structure.
/// </remarks>
public class SmartConfigProvider : IConfigurationProvider
{
string _configKey;
string _connectionString;
string _path;
IConfigurationBuilder _builder;
JObject _configuration;
SmartConfigOptions _smartConfigOptions;
public SmartConfigProvider() { }
public SmartConfigProvider(IConfigurationBuilder builder, SmartConfigOptions smartConfigOptions)
{
_builder = builder;
_smartConfigOptions = smartConfigOptions;
_configKey = smartConfigOptions._configKey;
SetConnectionString();
}
public SmartConfigProvider(IConfigurationBuilder builder, string configKey, string configurationFilePath)
{
_builder = builder;
_configKey = configKey;
_path = configurationFilePath;
SetConnectionString();
}
void SetConnectionString()
{
var carrier = _builder.ConfigurationProviders.OfType<IHasConfigurationFilePath>().SingleOrDefault();
if (carrier?.ConfigurationFilePath is null && _path is null)
throw new ConfigurationException($"Expected a previous added ConfigurationProvider with IHasConfigurationFilePath or a configurationFilePath where to find the appsettingsfile");
_path ??= carrier.ConfigurationFilePath;
if (!File.Exists(_path))
throw new ConfigurationException($"File not found, configurationFilePath: {_path}");
using (StreamReader file = File.OpenText(_path))
using (JsonTextReader reader = new JsonTextReader(file))
{
var jsonConfiguration = (JObject)JToken.ReadFrom(reader);
_connectionString = jsonConfiguration.SelectToken($"ConnectionStrings.{_configKey}")?.ToString();
}
}
public void Build()
{
var repository = _smartConfigOptions.GetRepository();
repository.ConnectionString = _connectionString;
var configs = repository.GetActiveConfigurations();
var pairs = configs.Select(x => new KeyValuePair<string, JToken>(x.Key, JToken.Parse(x.Value.ToString())));
_configuration = Common.KeyValueToJson.Convert(pairs);
}
public JObject Configuration()
{
return _configuration;
}
}
}

View file

@ -0,0 +1,166 @@
using Insight.Database;
using System.Data;
using SWP.Core.Database.ConnectionFactory;
namespace SWP.Core.Database.ConfigurationManagementSystem;
public class SetupConfiguration(IDbConnectionFactory connectionFactory) : IDbConfigure<SetupConfiguration.Command>
{
public class Command { }
public void With(Command notInUse, ConnectionStringParameters parameters = null)
{
using var conn = parameters is null ? connectionFactory.Create() : connectionFactory.Create(parameters);
using var transaction = conn.OpenWithTransaction();
try
{
CreateConfigurationTable(conn);
CreateHistoryTable(conn);
CreateConfigurationIndexes(conn);
CreateModifiedAtTrigger(conn);
CreateNotifyTrigger(conn);
CreateHistoryTrigger(conn);
transaction.Commit();
}
catch (Exception ex)
{
transaction.Rollback();
throw new InvalidOperationException("Failed to SetupConfiguration in Database", ex);
}
}
void CreateConfigurationTable(IDbConnection db)
{
const string sql = @"
CREATE TABLE IF NOT EXISTS app_configuration (
id bigserial NOT NULL,
""key"" varchar(255) NOT NULL,
value text NULL,
""label"" varchar(255) NULL,
content_type varchar(255) DEFAULT 'text/plain'::character varying NULL,
valid_from timestamptz NULL,
expires_at timestamptz NULL,
created_at timestamptz DEFAULT CURRENT_TIMESTAMP NULL,
modified_at timestamptz DEFAULT CURRENT_TIMESTAMP NULL,
etag uuid DEFAULT gen_random_uuid() NULL,
CONSTRAINT app_configuration_pkey PRIMARY KEY (id)
);";
db.ExecuteSql(sql);
}
void CreateHistoryTable(IDbConnection db)
{
const string sql = @"
CREATE TABLE IF NOT EXISTS app_configuration_history (
history_id bigserial NOT NULL,
action_type char(1) NOT NULL,
action_timestamp timestamptz NOT NULL DEFAULT CURRENT_TIMESTAMP,
action_by text NOT NULL DEFAULT CURRENT_USER,
id bigint NOT NULL,
""key"" varchar(255) NOT NULL,
value text NULL,
""label"" varchar(255) NULL,
content_type varchar(255) NULL,
valid_from timestamptz NULL,
expires_at timestamptz NULL,
created_at timestamptz NULL,
modified_at timestamptz NULL,
etag uuid NULL,
CONSTRAINT app_configuration_history_pkey PRIMARY KEY (history_id)
);";
db.ExecuteSql(sql);
}
void CreateConfigurationIndexes(IDbConnection db)
{
const string sql = @"
CREATE INDEX IF NOT EXISTS idx_app_configuration_key ON app_configuration(""key"");
CREATE INDEX IF NOT EXISTS idx_app_configuration_validity ON app_configuration(valid_from, expires_at);";
db.ExecuteSql(sql);
}
void CreateModifiedAtTrigger(IDbConnection db)
{
const string sql = @"
CREATE OR REPLACE FUNCTION update_app_configuration_modified_at()
RETURNS TRIGGER AS $$
BEGIN
NEW.modified_at = CURRENT_TIMESTAMP;
RETURN NEW;
END;
$$ LANGUAGE plpgsql;
CREATE OR REPLACE TRIGGER trg_app_configuration_modified_at
BEFORE UPDATE ON app_configuration
FOR EACH ROW
EXECUTE FUNCTION update_app_configuration_modified_at();";
db.ExecuteSql(sql);
}
void CreateNotifyTrigger(IDbConnection db)
{
const string sql = @"
CREATE OR REPLACE FUNCTION notify_app_configuration_change()
RETURNS TRIGGER AS $$
BEGIN
PERFORM pg_notify('config_changes', NEW.key);
RETURN NEW;
END;
$$ LANGUAGE plpgsql;
CREATE OR REPLACE TRIGGER trg_app_configuration_notify
AFTER INSERT OR UPDATE ON app_configuration
FOR EACH ROW
EXECUTE FUNCTION notify_app_configuration_change();";
db.ExecuteSql(sql);
}
void CreateHistoryTrigger(IDbConnection db)
{
const string sql = @"
CREATE OR REPLACE FUNCTION log_app_configuration_changes()
RETURNS TRIGGER AS $$
BEGIN
IF (TG_OP = 'INSERT') THEN
INSERT INTO app_configuration_history (
action_type, id, ""key"", value, label, content_type,
valid_from, expires_at, created_at, modified_at, etag
)
VALUES (
'I', NEW.id, NEW.key, NEW.value, NEW.label, NEW.content_type,
NEW.valid_from, NEW.expires_at, NEW.created_at, NEW.modified_at, NEW.etag
);
ELSIF (TG_OP = 'UPDATE') THEN
INSERT INTO app_configuration_history (
action_type, id, ""key"", value, label, content_type,
valid_from, expires_at, created_at, modified_at, etag
)
VALUES (
'U', OLD.id, OLD.key, OLD.value, OLD.label, OLD.content_type,
OLD.valid_from, OLD.expires_at, OLD.created_at, OLD.modified_at, OLD.etag
);
ELSIF (TG_OP = 'DELETE') THEN
INSERT INTO app_configuration_history (
action_type, id, ""key"", value, label, content_type,
valid_from, expires_at, created_at, modified_at, etag
)
VALUES (
'D', OLD.id, OLD.key, OLD.value, OLD.label, OLD.content_type,
OLD.valid_from, OLD.expires_at, OLD.created_at, OLD.modified_at, OLD.etag
);
END IF;
RETURN NULL;
END;
$$ LANGUAGE plpgsql;
CREATE OR REPLACE TRIGGER trg_app_configuration_history
AFTER INSERT OR UPDATE OR DELETE ON app_configuration
FOR EACH ROW EXECUTE FUNCTION log_app_configuration_changes();";
db.ExecuteSql(sql);
}
}

View file

@ -0,0 +1,8 @@
namespace SWP.Core.Database.ConnectionFactory
{
public interface IDbConnectionFactory
{
System.Data.IDbConnection Create();
System.Data.IDbConnection Create(ConnectionStringParameters connectionStringTemplateParameters);
}
}

View file

@ -0,0 +1,65 @@
using System.Data;
using Npgsql;
namespace SWP.Core.Database.ConnectionFactory
{
public record ConnectionStringParameters(string User, string Pwd);
public class PostgresConnectionFactory : IDbConnectionFactory, IAsyncDisposable
{
private readonly NpgsqlDataSource _baseDataSource;
private readonly Action<NpgsqlDataSourceBuilder> _configureDataSource;
private readonly Microsoft.Extensions.Logging.ILoggerFactory _loggerFactory; //this is not tested nor implemented, I just created it as an idea
public PostgresConnectionFactory(
string connectionString,
Microsoft.Extensions.Logging.ILoggerFactory loggerFactory = null,
Action<NpgsqlDataSourceBuilder> configureDataSource = null)
{
_loggerFactory = loggerFactory;
_configureDataSource = configureDataSource ?? (builder => { });
// Opret base data source med konfiguration
var dataSourceBuilder = new NpgsqlDataSourceBuilder(connectionString);
ConfigureDataSourceBuilder(dataSourceBuilder);
_baseDataSource = dataSourceBuilder.Build();
}
public IDbConnection Create()
{
return _baseDataSource.CreateConnection();
}
public IDbConnection Create(ConnectionStringParameters param)
{
var connectionStringBuilder = new NpgsqlConnectionStringBuilder(
_baseDataSource.ConnectionString)
{
Username = param.User,
Password = param.Pwd
};
var tempDataSourceBuilder = new NpgsqlDataSourceBuilder(
connectionStringBuilder.ToString());
ConfigureDataSourceBuilder(tempDataSourceBuilder);
var tempDataSource = tempDataSourceBuilder.Build();
return tempDataSource.CreateConnection();
}
private void ConfigureDataSourceBuilder(NpgsqlDataSourceBuilder builder)
{
if (_loggerFactory != null)
builder.UseLoggerFactory(_loggerFactory);
_configureDataSource?.Invoke(builder);
}
public async ValueTask DisposeAsync()
{
await _baseDataSource.DisposeAsync();
}
}
}

View file

@ -0,0 +1,38 @@
using System.Data;
using System.Diagnostics;
using Microsoft.ApplicationInsights.DataContracts;
using Microsoft.ApplicationInsights.Extensibility;
namespace SWP.Core.Database;
public class DatabaseScope : IDisposable
{
internal readonly IOperationHolder<DependencyTelemetry> _operation;
private readonly Stopwatch _stopwatch;
public DatabaseScope(IDbConnection connection, IOperationHolder<DependencyTelemetry> operation)
{
Connection = connection;
_operation = operation;
_operation.Telemetry.Success = true;
_operation.Telemetry.Timestamp = DateTimeOffset.UtcNow;
_stopwatch = Stopwatch.StartNew();
}
public IDbConnection Connection { get; }
public void Dispose()
{
_stopwatch.Stop();
_operation.Telemetry.Duration = _stopwatch.Elapsed;
_operation.Dispose();
Connection.Dispose();
}
public void Error(Exception ex)
{
_operation.Telemetry.Success = false;
_operation.Telemetry.Properties["Error"] = ex.Message;
}
}

View file

@ -0,0 +1,10 @@
using System.Data;
namespace SWP.Core.Database;
public interface IDatabaseOperations
{
DatabaseScope CreateScope(string operationName);
Task<T> ExecuteAsync<T>(Func<IDbConnection, Task<T>> operation, string operationName);
Task ExecuteAsync(Func<IDbConnection, Task> operation, string operationName);
}

View file

@ -0,0 +1,9 @@
using SWP.Core.Database.ConnectionFactory;
namespace SWP.Core.Database
{
public interface IDbConfigure<T>
{
void With(T command, ConnectionStringParameters parameters = null);
}
}

View file

@ -0,0 +1,25 @@
using Autofac;
using SWP.Core.Database.ConnectionFactory;
namespace SWP.Core.Database.ModuleRegistry
{
public class DbPostgreSqlModule : Module
{
public required string ConnectionString { get; set; }
protected override void Load(ContainerBuilder builder)
{
Insight.Database.Providers.PostgreSQL.PostgreSQLInsightDbProvider.RegisterProvider();
builder.RegisterType<PostgresConnectionFactory>()
.As<IDbConnectionFactory>()
.WithParameter(new TypedParameter(typeof(string), ConnectionString))
.SingleInstance();
builder.RegisterType<SqlOperations>()
.As<IDatabaseOperations>();
}
}
}

View file

@ -0,0 +1,57 @@
using System.Data;
using Microsoft.ApplicationInsights;
using Microsoft.ApplicationInsights.DataContracts;
using SWP.Core.Database.ConnectionFactory;
namespace SWP.Core.Database;
public class SqlOperations : IDatabaseOperations
{
private readonly IDbConnectionFactory _connectionFactory;
private readonly TelemetryClient _telemetryClient;
public SqlOperations(IDbConnectionFactory connectionFactory, TelemetryClient telemetryClient)
{
_connectionFactory = connectionFactory;
_telemetryClient = telemetryClient;
}
public DatabaseScope CreateScope(string operationName)
{
var connection = _connectionFactory.Create();
var operation = _telemetryClient.StartOperation<DependencyTelemetry>(operationName);
operation.Telemetry.Type = "SQL";
operation.Telemetry.Target = "PostgreSQL";
return new DatabaseScope(connection, operation);
}
public async Task<T> ExecuteAsync<T>(Func<IDbConnection, Task<T>> operation, string operationName)
{
using var scope = CreateScope(operationName);
try
{
var result = await operation(scope.Connection);
return result;
}
catch (Exception ex)
{
scope.Error(ex);
throw;
}
}
public async Task ExecuteAsync(Func<IDbConnection, Task> operation, string operationName)
{
using var scope = CreateScope(operationName);
try
{
await operation(scope.Connection);
}
catch (Exception ex)
{
scope.Error(ex);
throw;
}
}
}

View file

@ -0,0 +1,35 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace SWP.Core.Entities.Users
{
public class User
{
public int Id { get; set; }
public string Email { get; set; }
public string PasswordHash { get; set; }
public string SecurityStamp { get; set; }
public bool EmailConfirmed { get; set; }
public DateTime CreatedDate { get; set; }
public DateTime? LastLoginDate { get; set; }
}
public class Organization
{
public int Id { get; set; }
public string ConnectionString { get; set; }
public DateTime CreatedDate { get; set; }
public int CreatedBy { get; set; }
public bool IsActive { get; set; }
}
public class UserOrganization
{
public int UserId { get; set; }
public int OrganizationId { get; set; }
public DateTime CreatedDate { get; set; }
}
}

View file

@ -0,0 +1,9 @@
namespace SWP.Core.Exceptions
{
internal class ConfigurationException : Exception
{
public ConfigurationException(string message) : base(message)
{
}
}
}

8
Core/ISecureTokenizer.cs Normal file
View file

@ -0,0 +1,8 @@
namespace SWP.Core
{
public interface ISecureTokenizer
{
string TokenizeText(string word);
bool VerifyToken(string hash, string word);
}
}

View file

@ -0,0 +1,14 @@
using Autofac;
using SWP.Core.SeqLogging;
namespace SWP.Core.ModuleRegistry
{
public class SecurityModule : Module
{
protected override void Load(ContainerBuilder builder)
{
builder.RegisterType<SecureTokenizer>()
.As<ISecureTokenizer>();
}
}
}

View file

@ -0,0 +1,31 @@
using Autofac;
using SWP.Core.SeqLogging;
namespace SWP.Core.ModuleRegistry
{
public class SeqLoggingModule : Module
{
public required SeqConfiguration SeqConfiguration { get; set; }
protected override void Load(ContainerBuilder builder)
{
//builder.RegisterType<MessageChannel>()
// .As<IMessageChannel<Microsoft.ApplicationInsights.Channel.ITelemetry>>()
// .SingleInstance();
builder.RegisterType<SeqBackgroundService>()
//.As<Microsoft.Extensions.Hosting.IHostedService>()
.SingleInstance();
builder.RegisterGeneric(typeof(SeqLogger<>));
builder.RegisterInstance(SeqConfiguration);
builder.RegisterType<SeqHttpClient>()
.As<SeqHttpClient>()
.SingleInstance();
}
}
}

View file

@ -0,0 +1,50 @@
using Autofac;
using Microsoft.ApplicationInsights.Channel;
using Microsoft.ApplicationInsights.Extensibility;
using SWP.Core.Telemetry;
namespace SWP.Core.ModuleRegistry
{
public class TelemetryModule : Module
{
public required TelemetryConfig TelemetryConfig { get; set; }
protected override void Load(ContainerBuilder builder)
{
var configuration = TelemetryConfiguration.CreateDefault();
configuration.ConnectionString = TelemetryConfig.ConnectionString;
configuration.TelemetryChannel.DeveloperMode = true;
var client = new Microsoft.ApplicationInsights.TelemetryClient(configuration);
client.Context.GlobalProperties["Application"] = GetType().Namespace?.Split('.')[0];
client.Context.GlobalProperties["MachineName"] = Environment.MachineName;
client.Context.GlobalProperties["CLRVersion"] = Environment.Version.ToString();
client.Context.GlobalProperties["ProcessorCount"] = Environment.ProcessorCount.ToString();
builder.Register(c => client).InstancePerLifetimeScope();
if (TelemetryConfig.UseSeqLoggingTelemetryChannel)
{
var messageChannel = new MessageChannel();
builder.RegisterInstance(messageChannel)
.As<IMessageChannel<ITelemetry>>()
.SingleInstance();
configuration.TelemetryChannel = new SeqTelemetryChannel(messageChannel, client);
}
var telemetryProcessorChain =
new Microsoft.ApplicationInsights.Extensibility.Implementation.TelemetryProcessorChainBuilder(
configuration);
telemetryProcessorChain.Use(next => new Telemetry.Enrichers.EnrichWithMetaTelemetry(next));
telemetryProcessorChain.Build();
}
}
public class TelemetryConfig
{
public string ConnectionString { get; set; }
public bool UseSeqLoggingTelemetryChannel { get; set; }
}
}

View file

@ -0,0 +1,28 @@
namespace SWP.Core.MultiKeyEncryption
{
internal class MasterKey
{
public async Task RotateMasterKey(int tenantId, string oldMasterKey, string newMasterKey)
{
await Task.CompletedTask;
// Hent alle bruger-keys for tenant
//var users = await GetTenantUsers(tenantId);
//// Dekrypter connection string med gammel master key
//var connString = DecryptWithKey(encryptedConnString, oldMasterKey);
//// Krypter med ny master key
//var newEncryptedConnString = EncryptWithKey(connString, newMasterKey);
//// Re-krypter master key for alle brugere
//foreach (var user in users)
//{
// var userKey = DeriveKeyFromPassword(user.Password);
// var newEncryptedMasterKey = EncryptWithKey(newMasterKey, userKey);
// await UpdateUserMasterKey(user.UserId, newEncryptedMasterKey);
//}
//await UpdateTenantConnectionString(tenantId, newEncryptedConnString);
}
}
}

View file

@ -0,0 +1,98 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Security.Cryptography;
using System.Text;
using System.Threading.Tasks;
namespace SWP.Core.MultiKeyEncryption
{
public class SecureConnectionString
{
const string _masterKey = "5AFD74B1C26E87FE6656099E850DC67A";
public class EncryptedData
{
public string EncryptedConnectionString { get; set; }
public Dictionary<string, string> UserMasterKeys { get; set; } = new();
}
public EncryptedData EncryptConnectionString(string connectionString)
{
var encryptedConnString = EncryptWithKey(connectionString, _masterKey);
var userKeys = new Dictionary<string, string>();
return new EncryptedData
{
EncryptedConnectionString = encryptedConnString,
UserMasterKeys = userKeys
};
}
public string AddNewUser(string username, string password)
{
var userKey = DeriveKeyFromPassword(password);
var encryptedMasterKey = EncryptWithKey(_masterKey, userKey);
return encryptedMasterKey;
}
public string Decrypt(string encryptedConnString, string encryptedMasterKey, string password)
{
var userKey = DeriveKeyFromPassword(password);
var masterKey = DecryptWithKey(encryptedMasterKey, userKey);
return DecryptWithKey(encryptedConnString, masterKey);
}
private string DeriveKeyFromPassword(string password)
{
using var deriveBytes = new Rfc2898DeriveBytes(
password,
new byte[16], // Fast salt for simpelhed - i produktion bør dette være unikt per bruger
10000,
HashAlgorithmName.SHA256);
return Convert.ToBase64String(deriveBytes.GetBytes(32));
}
private string EncryptWithKey(string value, string key)
{
using var aes = Aes.Create();
var keyBytes = Convert.FromBase64String(key);
aes.Key = keyBytes;
aes.GenerateIV();
using var encryptor = aes.CreateEncryptor();
var valueBytes = Encoding.UTF8.GetBytes(value);
var encrypted = encryptor.TransformFinalBlock(valueBytes, 0, valueBytes.Length);
var result = new byte[aes.IV.Length + encrypted.Length];
Array.Copy(aes.IV, 0, result, 0, aes.IV.Length);
Array.Copy(encrypted, 0, result, aes.IV.Length, encrypted.Length);
return Convert.ToBase64String(result);
}
private string DecryptWithKey(string encryptedValue, string key)
{
var encryptedBytes = Convert.FromBase64String(encryptedValue);
using var aes = Aes.Create();
var keyBytes = Convert.FromBase64String(key);
aes.Key = keyBytes;
var iv = new byte[16];
Array.Copy(encryptedBytes, 0, iv, 0, iv.Length);
aes.IV = iv;
using var decryptor = aes.CreateDecryptor();
var decrypted = decryptor.TransformFinalBlock(
encryptedBytes,
iv.Length,
encryptedBytes.Length - iv.Length);
return Encoding.UTF8.GetString(decrypted);
}
}
}

28
Core/SWP.Core.csproj Normal file
View file

@ -0,0 +1,28 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net9.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Autofac" Version="8.1.1"/>
<PackageReference Include="Autofac.Extensions.DependencyInjection" Version="10.0.0"/>
<PackageReference Include="FluentValidation" Version="11.11.0"/>
<PackageReference Include="Insight.Database" Version="8.0.1"/>
<PackageReference Include="Insight.Database.Providers.PostgreSQL" Version="8.0.1"/>
<PackageReference Include="Microsoft.ApplicationInsights" Version="2.22.0"/>
<PackageReference Include="Microsoft.ApplicationInsights.WindowsServer.TelemetryChannel" Version="2.22.0"/>
<PackageReference Include="Microsoft.AspNetCore.Mvc" Version="2.3.0"/>
<PackageReference Include="Microsoft.Extensions.Hosting.Abstractions" Version="9.0.1"/>
<PackageReference Include="npgsql" Version="9.0.2"/>
<PackageReference Include="Seq.Api" Version="2024.3.0"/>
<PackageReference Include="Sodium.Core" Version="1.3.5"/>
</ItemGroup>
<ItemGroup>
<Folder Include="Configurations\AzureAppConfigurationProvider\"/>
<Folder Include="Configurations\PostgresqlConfigurationBuilder\"/>
</ItemGroup>
</Project>

45
Core/SecureTokenizer.cs Normal file
View file

@ -0,0 +1,45 @@
namespace SWP.Core
{
public class SecureTokenizer : ISecureTokenizer
{
private const int _saltSize = 16; // 128 bit
private const int _keySize = 32; // 256 bit
private const int _iterations = 100000;
public string TokenizeText(string word)
{
using (var algorithm = new System.Security.Cryptography.Rfc2898DeriveBytes(
word,
_saltSize,
_iterations,
System.Security.Cryptography.HashAlgorithmName.SHA256))
{
var key = Convert.ToBase64String(algorithm.GetBytes(_keySize));
var salt = Convert.ToBase64String(algorithm.Salt);
return $"{_iterations}.{salt}.{key}";
}
}
public bool VerifyToken(string hash, string word)
{
var parts = hash.Split('.', 3);
if (parts.Length != 3)
return false;
var iterations = Convert.ToInt32(parts[0]);
var salt = Convert.FromBase64String(parts[1]);
var key = Convert.FromBase64String(parts[2]);
using (var algorithm = new System.Security.Cryptography.Rfc2898DeriveBytes(
word,
salt,
iterations,
System.Security.Cryptography.HashAlgorithmName.SHA256))
{
var keyToCheck = algorithm.GetBytes(_keySize);
return keyToCheck.SequenceEqual(key);
}
}
}
}

View file

@ -0,0 +1,86 @@
using Microsoft.ApplicationInsights;
using Microsoft.ApplicationInsights.Channel;
using Microsoft.ApplicationInsights.DataContracts;
using Microsoft.Extensions.Hosting;
using SWP.Core.Telemetry;
namespace SWP.Core.SeqLogging
{
public class SeqBackgroundService : BackgroundService
{
private readonly IMessageChannel<ITelemetry> _messageChannel;
private readonly TelemetryClient _telemetryClient;
private readonly SeqLogger<SeqBackgroundService> _seqLogger;
public SeqBackgroundService(TelemetryClient telemetryClient,
IMessageChannel<ITelemetry> messageChannel,
SeqLogger<SeqBackgroundService> seqlogger)
{
_telemetryClient = telemetryClient;
_messageChannel = messageChannel;
_seqLogger = seqlogger;
_telemetryClient.TrackTrace("SeqBackgroundService started");
}
protected override async Task ExecuteAsync(CancellationToken stoppingToken)
{
try
{
while (!stoppingToken.IsCancellationRequested)
await foreach (var telemetry in _messageChannel.Reader.ReadAllAsync(stoppingToken))
try
{
switch (telemetry)
{
case ExceptionTelemetry et:
await _seqLogger.LogAsync(et);
break;
case TraceTelemetry et:
await _seqLogger.LogAsync(et);
break;
case DependencyTelemetry et:
await _seqLogger.LogAsync(et);
break;
case RequestTelemetry et:
await _seqLogger.LogAsync(et);
break;
case EventTelemetry et:
await _seqLogger.LogAsync(et);
break;
default:
throw new NotSupportedException(telemetry.GetType().Name);
}
}
catch
{
throw;
//_telemetryClient.TrackException(ex); this is disabled for now, we need to think about the channel structure first
}
}
catch (Exception ex)
{
if (ex is not OperationCanceledException)
{
_telemetryClient.TrackException(ex);
throw;
}
}
}
public override async Task StopAsync(CancellationToken cancellationToken)
{
_telemetryClient.TrackTrace("StopAsync called: Service shutdown started");
_messageChannel.Dispose();
await base.StopAsync(cancellationToken);
}
}
}

View file

@ -0,0 +1,4 @@
namespace SWP.Core.SeqLogging
{
public record SeqConfiguration(string IngestionEndpoint, string ApiKey, string Environment);
}

View file

@ -0,0 +1,28 @@
namespace SWP.Core.SeqLogging
{
public class SeqHttpClient
{
HttpClient _httpClient;
public SeqHttpClient(SeqConfiguration seqConfiguration, HttpMessageHandler httpMessageHandler)
{
_httpClient = new HttpClient(httpMessageHandler)
{
BaseAddress = new Uri(seqConfiguration.IngestionEndpoint),
Timeout = TimeSpan.FromSeconds(30)
};
_httpClient.DefaultRequestHeaders.Accept.Clear();
_httpClient.DefaultRequestHeaders.Accept.Add(new System.Net.Http.Headers.MediaTypeWithQualityHeaderValue("application/json"));
if (seqConfiguration.ApiKey != null)
_httpClient.DefaultRequestHeaders.Add("X-Seq-ApiKey", seqConfiguration.ApiKey);
}
public SeqHttpClient(SeqConfiguration seqConfiguration) : this(seqConfiguration, new HttpClientHandler()) { }
public async Task<HttpResponseMessage> SendAsync(HttpRequestMessage httpRequestMessage, CancellationToken cancellationToken)
{
return await _httpClient.SendAsync(httpRequestMessage, cancellationToken);
}
}
}

View file

@ -0,0 +1,236 @@
using System.Text;
using Microsoft.ApplicationInsights.DataContracts;
namespace SWP.Core.SeqLogging
{
public class SeqLogger<T>
{
private readonly SeqHttpClient _httpClient;
private readonly SeqConfiguration _configuration;
public SeqLogger(SeqHttpClient httpClient, SeqConfiguration configuration)
{
_httpClient = httpClient;
_configuration = configuration;
}
public async Task LogAsync(TraceTelemetry trace, CancellationToken cancellationToken = default)
{
var seqEvent = new Dictionary<string, object>
{
{ "@t", trace.Timestamp.UtcDateTime.ToString("o") },
{ "@mt", trace.Message },
{ "@l", MapSeverityToLevel(trace.SeverityLevel) },
{ "Environment", _configuration.Environment },
};
foreach (var prop in trace.Properties)
seqEvent.Add($"prop_{prop.Key}", prop.Value);
foreach (var prop in trace.Context.GlobalProperties)
seqEvent.Add($"global_{prop.Key}", prop.Value);
await SendToSeqAsync(seqEvent, cancellationToken);
}
public async Task LogAsync(EventTelemetry evt, CancellationToken cancellationToken = default)
{
var seqEvent = new Dictionary<string, object>
{
{ "@t", evt.Timestamp.UtcDateTime.ToString("o") },
{ "@mt", evt.Name },
{ "@l", "Information" },
{ "Environment", _configuration.Environment }
};
foreach (var prop in evt.Properties)
seqEvent.Add($"prop_{prop.Key}", prop.Value);
foreach (var prop in evt.Context.GlobalProperties)
seqEvent.Add($"global_{prop.Key}", prop.Value);
foreach (var metric in evt.Metrics)
seqEvent.Add($"metric_{metric.Key}", metric.Value);
await SendToSeqAsync(seqEvent, cancellationToken);
}
public async Task LogAsync(ExceptionTelemetry ex, CancellationToken cancellationToken = default)
{
var seqEvent = new Dictionary<string, object>
{
{ "@t", ex.Timestamp.UtcDateTime.ToString("o") },
{ "@mt", ex.Exception.Message },
{ "@l", "Error" },
{ "@x", FormatExceptionForSeq(ex.Exception) },
{ "Environment", _configuration.Environment },
{ "ExceptionType", ex.Exception.GetType().Name }
};
foreach (var prop in ex.Properties)
seqEvent.Add($"prop_{prop.Key}", prop.Value);
foreach (var prop in ex.Context.GlobalProperties)
seqEvent.Add($"global_{prop.Key}", prop.Value);
await SendToSeqAsync(seqEvent, cancellationToken);
}
public async Task LogAsync(DependencyTelemetry dep, CancellationToken cancellationToken = default)
{
var seqEvent = new Dictionary<string, object>
{
{ "@t", dep.Timestamp.UtcDateTime.ToString("o") },
{ "@mt", $"Dependency: {dep.Name}" },
{ "@l", dep.Success ?? true ? "Information" : "Error" },
{ "Environment", _configuration.Environment },
{ "DependencyType", dep.Type },
{ "Target", dep.Target },
{ "Duration", dep.Duration.TotalMilliseconds }
};
foreach (var prop in dep.Properties)
seqEvent.Add($"prop_{prop.Key}", prop.Value);
foreach (var prop in dep.Context.GlobalProperties)
seqEvent.Add($"global_{prop.Key}", prop.Value);
await SendToSeqAsync(seqEvent, cancellationToken);
}
public async Task LogAsync(RequestTelemetry req, CancellationToken cancellationToken = default)
{
await Task.CompletedTask;
throw new NotImplementedException();
}
public async Task LogAsync(
Microsoft.ApplicationInsights.Extensibility.IOperationHolder<RequestTelemetry> operationHolder,
CancellationToken cancellationToken = default)
{
var req = operationHolder.Telemetry;
//https://docs.datalust.co/v2025.1/docs/posting-raw-events
var seqEvent = new Dictionary<string, object>
{
{ "@t", req.Timestamp.UtcDateTime.ToString("o") },
{ "@mt", req.Name },
{ "@l", req.Success ?? true ? "Information" : "Error" },
{ "@sp", req.Id }, //Span id Unique identifier of a span Yes, if the event is a span
{
"@tr", req.Context.Operation.Id
}, //Trace id An identifier that groups all spans and logs that are in the same trace Yes, if the event is a span
{
"@sk", "Server"
}, //Span kind Describes the relationship of the span to others in the trace: Client, Server, Internal, Producer, or Consumer
{
"@st", req.Timestamp.UtcDateTime.Subtract(req.Duration).ToString("o")
}, //Start The start ISO 8601 timestamp of this span Yes, if the event is a span
{ "SourceContext", typeof(T).FullName },
{ "Url", req.Url },
{ "RequestId", req.Id },
{ "ItemTypeFlag", req.ItemTypeFlag.ToString() }
};
if (!string.IsNullOrEmpty(req.ResponseCode))
if (int.TryParse(req.ResponseCode, out int statusCode))
if (Enum.IsDefined(typeof(System.Net.HttpStatusCode), statusCode))
seqEvent["StatusCode"] = $"{statusCode} {(System.Net.HttpStatusCode)statusCode}";
else
seqEvent["StatusCode"] = $"{statusCode} Unknown";
if (!string.IsNullOrEmpty(req.Context.Operation.ParentId))
seqEvent["@ps"] = req.Context.Operation.ParentId;
if (req.Properties.TryGetValue("httpMethod", out string method))
{
seqEvent["RequestMethod"] = method;
seqEvent["@mt"] = $"{req.Properties["httpMethod"]} {req.Name}";
req.Properties.Remove("httpMethod");
}
foreach (var prop in req.Properties)
seqEvent.Add($"prop_{prop.Key}", prop.Value);
foreach (var prop in req.Context.GlobalProperties)
seqEvent.Add($"{prop.Key}", prop.Value);
await SendToSeqAsync(seqEvent, cancellationToken);
}
private async Task SendToSeqAsync(Dictionary<string, object> seqEvent, CancellationToken cancellationToken)
{
var content = new StringContent(
Newtonsoft.Json.JsonConvert.SerializeObject(seqEvent),
Encoding.UTF8,
"application/vnd.serilog.clef");
var requestMessage = new HttpRequestMessage(HttpMethod.Post, "/ingest/clef")
{
Content = content
};
var result = await _httpClient.SendAsync(requestMessage, cancellationToken);
result.EnsureSuccessStatusCode();
}
private static string MapSeverityToLevel(SeverityLevel? severity)
{
return severity switch
{
SeverityLevel.Verbose => "Verbose",
SeverityLevel.Information => "Information",
SeverityLevel.Warning => "Warning",
SeverityLevel.Error => "Error",
SeverityLevel.Critical => "Fatal",
_ => "Information"
};
}
private static string FormatExceptionForSeq(Exception ex)
{
var sb = new StringBuilder();
var exceptionCount = 0;
void FormatSingleException(Exception currentEx, int depth)
{
if (depth > 0) sb.AppendLine("\n--- Inner Exception ---");
sb.AppendLine($"Exception Type: {currentEx.GetType().FullName}");
sb.AppendLine($"Message: {currentEx.Message}");
sb.AppendLine($"Source: {currentEx.Source}");
sb.AppendLine($"HResult: 0x{currentEx.HResult:X8}");
sb.AppendLine("Stack Trace:");
sb.AppendLine(currentEx.StackTrace?.Trim());
if (currentEx.Data.Count > 0)
{
sb.AppendLine("Additional Data:");
foreach (var key in currentEx.Data.Keys)
sb.AppendLine($" {key}: {currentEx.Data[key]}");
}
}
void RecurseExceptions(Exception currentEx, int depth = 0)
{
if (currentEx is AggregateException aggEx)
foreach (var inner in aggEx.InnerExceptions)
{
RecurseExceptions(inner, depth);
depth++;
}
else if (currentEx.InnerException != null)
RecurseExceptions(currentEx.InnerException, depth + 1);
FormatSingleException(currentEx, depth);
exceptionCount++;
}
RecurseExceptions(ex);
sb.Insert(0, $"EXCEPTION CHAIN ({exceptionCount} exceptions):\n");
return sb.ToString();
}
}
}

View file

@ -0,0 +1,14 @@
using Microsoft.ApplicationInsights.Channel;
using Microsoft.ApplicationInsights.Extensibility;
namespace SWP.Core.Telemetry.Enrichers
{
public class EnrichWithMetaTelemetry(ITelemetryProcessor next) : ITelemetryProcessor
{
public void Process(ITelemetry item)
{
//nothing going on here yet :)
next.Process(item);
}
}
}

View file

@ -0,0 +1,9 @@
using System.Threading.Channels;
namespace SWP.Core.Telemetry
{
public interface IMessageChannel<T> : IDisposable
{
ChannelWriter<T> Writer { get; }
ChannelReader<T> Reader { get; }
}
}

View file

@ -0,0 +1,23 @@
using Microsoft.ApplicationInsights.Channel;
using System.Threading.Channels;
namespace SWP.Core.Telemetry
{
public class MessageChannel : IMessageChannel<ITelemetry>
{
private readonly Channel<ITelemetry> _channel;
public MessageChannel()
{
_channel = Channel.CreateUnbounded<ITelemetry>();
}
public ChannelWriter<ITelemetry> Writer => _channel.Writer;
public ChannelReader<ITelemetry> Reader => _channel.Reader;
public void Dispose()
{
_channel.Writer.Complete();
}
}
}

View file

@ -0,0 +1,36 @@
using Microsoft.ApplicationInsights;
using Microsoft.ApplicationInsights.Channel;
namespace SWP.Core.Telemetry
{
public class SeqTelemetryChannel(IMessageChannel<ITelemetry> messageChannel, TelemetryClient telemetryClient)
: InMemoryChannel, ITelemetryChannel
{
public new void Send(ITelemetry telemetry)
{
if (telemetry.Context.GlobalProperties.TryGetValue("OmitSeqTelemetryChannel", out var value))
if (value == "true")
{
base.Send(telemetry);
return;
}
try
{
var writeTask = messageChannel.Writer.WriteAsync(telemetry).AsTask();
writeTask.ContinueWith(t =>
{
if (t.Exception != null)
throw t.Exception;
}, TaskContinuationOptions.OnlyOnFaulted);
}
catch (Exception e)
{
telemetryClient.TrackException(e,
new Dictionary<string, string> { { "OmitSeqTelemetryChannel", "true" } });
}
base.Send(telemetry);
}
}
}

View file

@ -0,0 +1,12 @@
using Newtonsoft.Json;
using Newtonsoft.Json.Linq;
namespace SWP.Core.Telemetry;
public static class TelemetryExtensions
{
public static Dictionary<string, string> Format(this object obj)
{
return new Dictionary<string, string> { { "Object", JObject.FromObject(obj).ToString() } };
}
}

View file

@ -0,0 +1,13 @@
using System.Text.RegularExpressions;
namespace PlanTempus.Database.Common
{
internal class Validations
{
public static bool IsValidSchemaName(string schema)
{
return !string.IsNullOrEmpty(schema) && Regex.IsMatch(schema, "^[a-zA-Z0-9_]+$");
}
}
}

View file

@ -0,0 +1,167 @@
using Insight.Database;
using PlanTempus.Database.Core;
using System.Data;
using PlanTempus.Core.Database.ConnectionFactory;
namespace PlanTempus.Database.ConfigurationManagementSystem;
public class SetupConfiguration(IDbConnectionFactory connectionFactory) : IDbConfigure<SetupConfiguration.Command>
{
public class Command { }
public void With(Command notInUse, ConnectionStringParameters parameters = null)
{
using var conn = parameters is null ? connectionFactory.Create() : connectionFactory.Create(parameters);
using var transaction = conn.OpenWithTransaction();
try
{
CreateConfigurationTable(conn);
CreateHistoryTable(conn);
CreateConfigurationIndexes(conn);
CreateModifiedAtTrigger(conn);
CreateNotifyTrigger(conn);
CreateHistoryTrigger(conn);
transaction.Commit();
}
catch (Exception ex)
{
transaction.Rollback();
throw new InvalidOperationException("Failed to SetupConfiguration in Database", ex);
}
}
void CreateConfigurationTable(IDbConnection db)
{
const string sql = @"
CREATE TABLE IF NOT EXISTS app_configuration (
id bigserial NOT NULL,
""key"" varchar(255) NOT NULL,
value text NULL,
""label"" varchar(255) NULL,
content_type varchar(255) DEFAULT 'text/plain'::character varying NULL,
valid_from timestamptz NULL,
expires_at timestamptz NULL,
created_at timestamptz DEFAULT CURRENT_TIMESTAMP NULL,
modified_at timestamptz DEFAULT CURRENT_TIMESTAMP NULL,
etag uuid DEFAULT gen_random_uuid() NULL,
CONSTRAINT app_configuration_pkey PRIMARY KEY (id)
);";
db.ExecuteSql(sql);
}
void CreateHistoryTable(IDbConnection db)
{
const string sql = @"
CREATE TABLE IF NOT EXISTS app_configuration_history (
history_id bigserial NOT NULL,
action_type char(1) NOT NULL,
action_timestamp timestamptz NOT NULL DEFAULT CURRENT_TIMESTAMP,
action_by text NOT NULL DEFAULT CURRENT_USER,
id bigint NOT NULL,
""key"" varchar(255) NOT NULL,
value text NULL,
""label"" varchar(255) NULL,
content_type varchar(255) NULL,
valid_from timestamptz NULL,
expires_at timestamptz NULL,
created_at timestamptz NULL,
modified_at timestamptz NULL,
etag uuid NULL,
CONSTRAINT app_configuration_history_pkey PRIMARY KEY (history_id)
);";
db.ExecuteSql(sql);
}
void CreateConfigurationIndexes(IDbConnection db)
{
const string sql = @"
CREATE INDEX IF NOT EXISTS idx_app_configuration_key ON app_configuration(""key"");
CREATE INDEX IF NOT EXISTS idx_app_configuration_validity ON app_configuration(valid_from, expires_at);";
db.ExecuteSql(sql);
}
void CreateModifiedAtTrigger(IDbConnection db)
{
const string sql = @"
CREATE OR REPLACE FUNCTION update_app_configuration_modified_at()
RETURNS TRIGGER AS $$
BEGIN
NEW.modified_at = CURRENT_TIMESTAMP;
RETURN NEW;
END;
$$ LANGUAGE plpgsql;
CREATE OR REPLACE TRIGGER trg_app_configuration_modified_at
BEFORE UPDATE ON app_configuration
FOR EACH ROW
EXECUTE FUNCTION update_app_configuration_modified_at();";
db.ExecuteSql(sql);
}
void CreateNotifyTrigger(IDbConnection db)
{
const string sql = @"
CREATE OR REPLACE FUNCTION notify_app_configuration_change()
RETURNS TRIGGER AS $$
BEGIN
PERFORM pg_notify('config_changes', NEW.key);
RETURN NEW;
END;
$$ LANGUAGE plpgsql;
CREATE OR REPLACE TRIGGER trg_app_configuration_notify
AFTER INSERT OR UPDATE ON app_configuration
FOR EACH ROW
EXECUTE FUNCTION notify_app_configuration_change();";
db.ExecuteSql(sql);
}
void CreateHistoryTrigger(IDbConnection db)
{
const string sql = @"
CREATE OR REPLACE FUNCTION log_app_configuration_changes()
RETURNS TRIGGER AS $$
BEGIN
IF (TG_OP = 'INSERT') THEN
INSERT INTO app_configuration_history (
action_type, id, ""key"", value, label, content_type,
valid_from, expires_at, created_at, modified_at, etag
)
VALUES (
'I', NEW.id, NEW.key, NEW.value, NEW.label, NEW.content_type,
NEW.valid_from, NEW.expires_at, NEW.created_at, NEW.modified_at, NEW.etag
);
ELSIF (TG_OP = 'UPDATE') THEN
INSERT INTO app_configuration_history (
action_type, id, ""key"", value, label, content_type,
valid_from, expires_at, created_at, modified_at, etag
)
VALUES (
'U', OLD.id, OLD.key, OLD.value, OLD.label, OLD.content_type,
OLD.valid_from, OLD.expires_at, OLD.created_at, OLD.modified_at, OLD.etag
);
ELSIF (TG_OP = 'DELETE') THEN
INSERT INTO app_configuration_history (
action_type, id, ""key"", value, label, content_type,
valid_from, expires_at, created_at, modified_at, etag
)
VALUES (
'D', OLD.id, OLD.key, OLD.value, OLD.label, OLD.content_type,
OLD.valid_from, OLD.expires_at, OLD.created_at, OLD.modified_at, OLD.etag
);
END IF;
RETURN NULL;
END;
$$ LANGUAGE plpgsql;
CREATE OR REPLACE TRIGGER trg_app_configuration_history
AFTER INSERT OR UPDATE OR DELETE ON app_configuration
FOR EACH ROW EXECUTE FUNCTION log_app_configuration_changes();";
db.ExecuteSql(sql);
}
}

View file

@ -0,0 +1,106 @@
using System.Data;
using Insight.Database;
using PlanTempus.Core.Database.ConnectionFactory;
using PlanTempus.Database.Common;
namespace PlanTempus.Database.Core.DCL
{
/// <summary>
/// Only a superadmin or similar can create Application Users
/// </summary>
public class SetupApplicationUser : IDbConfigure<SetupApplicationUser.Command>
{
public class Command
{
public required string Schema { get; init; }
public required string User { get; init; }
public required string Password { get; init; }
}
Command _command;
private readonly IDbConnectionFactory _connectionFactory;
public SetupApplicationUser(IDbConnectionFactory connectionFactory)
{
_connectionFactory = connectionFactory;
}
public void With(Command command, ConnectionStringParameters parameters = null)
{
_command = command;
if (!Validations.IsValidSchemaName(_command.Schema))
throw new ArgumentException("Invalid schema name", _command.Schema);
using var conn = parameters is null ? _connectionFactory.Create() : _connectionFactory.Create(parameters);
using var transaction = conn.OpenWithTransaction();
try
{
CreateSchema(conn);
CreateRole(conn);
GrantSchemaRights(conn);
transaction.Commit();
}
catch (Exception ex)
{
transaction.Rollback();
throw new InvalidOperationException("Failed to SetupApplicationUser in Database", ex);
}
}
private void CreateSchema(IDbConnection db)
{
var sql = $"CREATE SCHEMA IF NOT EXISTS {_command.Schema}";
db.ExecuteSql(sql);
}
private void CreateRole(IDbConnection db)
{
var sql = $@"
DO $$
BEGIN
IF NOT EXISTS (SELECT FROM pg_roles WHERE rolname = '{_command.User}') THEN
CREATE ROLE {_command.User} WITH CREATEDB CREATEROLE LOGIN PASSWORD '{_command.Password}';
END IF;
END $$;";
db.ExecuteSql(sql);
var sql1 = $"ALTER ROLE {_command.User} SET search_path='{_command.Schema}';";
db.ExecuteSql(sql1);
}
private void GrantSchemaRights(IDbConnection db)
{
// Grant USAGE og alle CREATE rettigheder på schema niveau
var sql = $@"
GRANT USAGE ON SCHEMA {_command.Schema} TO {_command.User};
GRANT ALL ON SCHEMA {_command.Schema} TO {_command.User};";
db.ExecuteSql(sql);
// Grant rettigheder på eksisterende og fremtidige tabeller
var sql1 = $"GRANT ALL PRIVILEGES ON ALL TABLES IN SCHEMA {_command.Schema} TO {_command.User};";
db.ExecuteSql(sql1);
var sql2 = $"ALTER DEFAULT PRIVILEGES IN SCHEMA {_command.Schema} GRANT ALL PRIVILEGES ON TABLES TO {_command.User};";
db.ExecuteSql(sql2);
// Grant sequence rettigheder
var sql3 = $"GRANT ALL PRIVILEGES ON ALL SEQUENCES IN SCHEMA {_command.Schema} TO {_command.User};";
db.ExecuteSql(sql3);
// Grant execute på functions
var sql4 = $"GRANT EXECUTE ON ALL FUNCTIONS IN SCHEMA {_command.Schema} TO {_command.User};";
db.ExecuteSql(sql4);
// Grant for fremtidige functions
var sql5 = $"ALTER DEFAULT PRIVILEGES IN SCHEMA {_command.Schema} GRANT EXECUTE ON FUNCTIONS TO {_command.User};";
db.ExecuteSql(sql5);
// Grant for fremtidige sequences
var sql6 = $"ALTER DEFAULT PRIVILEGES IN SCHEMA {_command.Schema} GRANT USAGE ON SEQUENCES TO {_command.User};";
db.ExecuteSql(sql6);
}
}
}

View file

@ -0,0 +1,89 @@
using System.Data;
using Insight.Database;
using PlanTempus.Core.Database.ConnectionFactory;
using PlanTempus.Database.Common;
namespace PlanTempus.Database.Core.DCL
{
/// <summary>
/// Only a superadmin or similar can create Application Users
/// </summary>
public class SetupDbAdmin : IDbConfigure<SetupDbAdmin.Command>
{
public class Command
{
public required string Schema { get; init; }
public required string User { get; init; }
public required string Password { get; init; }
}
Command _command;
private readonly IDbConnectionFactory _connectionFactory;
public SetupDbAdmin(IDbConnectionFactory connectionFactory)
{
_connectionFactory = connectionFactory;
}
public void With(Command command, ConnectionStringParameters parameters = null)
{
_command = command;
if (!Validations.IsValidSchemaName(_command.Schema))
throw new ArgumentException("Invalid schema name", _command.Schema);
using var conn = parameters is null ? _connectionFactory.Create() : _connectionFactory.Create(parameters);
using var transaction = conn.OpenWithTransaction();
try
{
CreateSchema(conn);
CreateRole(conn);
GrantSchemaRights(conn);
transaction.Commit();
}
catch (Exception ex)
{
transaction.Rollback();
throw new InvalidOperationException("Failed to SetupApplicationUser in Database", ex);
}
}
private void CreateSchema(IDbConnection db)
{
var sql = $"CREATE SCHEMA IF NOT EXISTS {_command.Schema}";
db.ExecuteSql(sql);
}
private void CreateRole(IDbConnection db)
{
var sql = $@"
DO $$
BEGIN
IF NOT EXISTS (SELECT FROM pg_roles WHERE rolname = '{_command.User}') THEN
CREATE ROLE {_command.User} WITH CREATEDB CREATEROLE LOGIN PASSWORD '{_command.Password}';
END IF;
END $$;";
db.ExecuteSql(sql);
var sql1 = $"ALTER ROLE {_command.User} SET search_path='{_command.Schema}';";
db.ExecuteSql(sql1);
var sql2 = $"ALTER SCHEMA {_command.Schema} OWNER TO {_command.User};";
db.ExecuteSql(sql2);
}
private void GrantSchemaRights(IDbConnection db)
{
var sql = $@"GRANT CREATE ON SCHEMA {_command.Schema} TO {_command.User};";
db.ExecuteSql(sql);
}
}
}

View file

@ -0,0 +1,89 @@
using System.Data;
using Insight.Database;
using PlanTempus.Core.Database.ConnectionFactory;
using PlanTempus.Database.Common;
using PlanTempus.Database.Core;
namespace PlanTempus.Database.Core.DCL
{
public class SetupOrganization : IDbConfigure<SetupOrganization.Command>
{
public class Command
{
public required string Schema { get; init; }
public required string User { get; init; }
public required string Password { get; init; }
}
Command _command;
private readonly IDbConnectionFactory _connectionFactory;
public SetupOrganization(IDbConnectionFactory connectionFactory)
{
_connectionFactory = connectionFactory;
}
public void With(Command command, ConnectionStringParameters parameters = null)
{
_command = command;
if (!Validations.IsValidSchemaName(_command.Schema))
throw new ArgumentException("Invalid schema name", _command.Schema);
using var conn = parameters is null ? _connectionFactory.Create() : _connectionFactory.Create(parameters);
using var transaction = conn.OpenWithTransaction();
try
{
CreateSchema(conn);
CreateRole(conn);
GrantSchemaRights(conn);
transaction.Commit();
}
catch (Exception ex)
{
transaction.Rollback();
throw new InvalidOperationException("Failed to SetupOrganization in Database", ex);
}
}
private void CreateSchema(IDbConnection db)
{
var sql = $"CREATE SCHEMA IF NOT EXISTS {_command.Schema}";
db.ExecuteSql(sql);
}
private void CreateRole(IDbConnection db)
{
var sql = $"CREATE ROLE {_command.User} LOGIN PASSWORD '{_command.Password}';";
db.ExecuteSql(sql);
var sql1 = $"ALTER ROLE {_command.User} SET search_path='{_command.Schema}';";
db.ExecuteSql(sql1);
}
private void GrantSchemaRights(IDbConnection db)
{
var sql = $"GRANT USAGE ON SCHEMA {_command.Schema} TO {_command.User};";
db.ExecuteSql(sql);
var sql1 = $"ALTER DEFAULT PRIVILEGES IN SCHEMA {_command.Schema} " +
$"GRANT INSERT, SELECT, UPDATE PRIVILEGES ON TABLES TO {_command.User};";
db.ExecuteSql(sql1);
var sql2 = $"GRANT ALL PRIVILEGES ON ALL SEQUENCES IN SCHEMA {_command.Schema} TO {_command.User};";
db.ExecuteSql(sql2);
var sql3 = $"GRANT CREATE TABLE ON SCHEMA {_command.Schema} TO {_command.User};";
db.ExecuteSql(sql3);
}
public void RevokeCreateTable(IDbConnection db)
{
var sql = $"REVOKE CREATE TABLE ON SCHEMA {_command.Schema} FROM {_command.User};";
db.ExecuteSql(sql);
}
}
}

View file

@ -0,0 +1,140 @@
using Insight.Database;
using System.Data;
using PlanTempus.Core.Database.ConnectionFactory;
namespace PlanTempus.Database.Core.DDL
{
/// <summary>
/// This is by purpose not async await
/// It is intended that this is created with the correct Application User, which is why the schema name is omitted.
/// </summary>
public class SetupIdentitySystem : IDbConfigure<SetupIdentitySystem.Command>
{
public class Command
{
public required string Schema { get; init; }
}
Command _command;
private readonly IDbConnectionFactory _connectionFactory;
public SetupIdentitySystem(IDbConnectionFactory connectionFactory)
{
_connectionFactory = connectionFactory;
}
/// <summary>
/// Creates the system tables in the specified schema within a transaction.
/// </summary>
/// <param name="schema">The schema name where the tables will be created.</param>
public void With(Command command, ConnectionStringParameters parameters = null)
{
_command = command;
using var conn = parameters is null ? _connectionFactory.Create() : _connectionFactory.Create(parameters);
using var transaction = conn.OpenWithTransaction();
try
{
CreateUsersTable(conn);
CreateOrganizationsTable(conn);
CreateUserOrganizationsTable(conn);
SetupRLS(conn);
transaction.Commit();
}
catch (Exception ex)
{
transaction.Rollback();
throw new InvalidOperationException("Failed to SetupIdentitySystem. Transaction is rolled back", ex);
}
}
/// <summary>
/// Creates the users table
/// </summary>
void CreateUsersTable(IDbConnection db)
{
var sql = @$"
CREATE TABLE IF NOT EXISTS {_command.Schema}.users (
id SERIAL PRIMARY KEY,
email VARCHAR(256) NOT NULL UNIQUE,
password_hash VARCHAR(256) NOT NULL,
security_stamp VARCHAR(36) NOT NULL,
email_confirmed BOOLEAN NOT NULL DEFAULT FALSE,
access_failed_count INTEGER NOT NULL DEFAULT 0,
lockout_enabled BOOLEAN NOT NULL DEFAULT TRUE,
lockout_end TIMESTAMPTZ NULL,
is_active BOOLEAN NOT NULL DEFAULT TRUE,
created_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
last_login_at TIMESTAMPTZ NULL
);";
db.ExecuteSql(sql);
}
/// <summary>
/// Creates the organizations table
/// </summary>
void CreateOrganizationsTable(IDbConnection db)
{
var sql = @$"
CREATE TABLE IF NOT EXISTS {_command.Schema}.organizations (
id SERIAL PRIMARY KEY,
connection_string VARCHAR(500) NOT NULL,
is_active BOOLEAN NOT NULL DEFAULT TRUE,
created_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP
);";
db.ExecuteSql(sql);
}
/// <summary>
/// Creates the user_organizations table
/// </summary>
void CreateUserOrganizationsTable(IDbConnection db)
{
var sql = @$"
CREATE TABLE IF NOT EXISTS {_command.Schema}.user_organizations (
user_id INTEGER NOT NULL REFERENCES {_command.Schema}.users(id),
organization_id INTEGER NOT NULL REFERENCES {_command.Schema}.organizations(id),
pin_code VARCHAR(10) NULL,
created_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
PRIMARY KEY (user_id, organization_id)
);";
db.ExecuteSql(sql);
}
/// <summary>
/// Sets up Row Level Security (RLS) for the organizations and user_organizations tables.
/// </summary>
void SetupRLS(IDbConnection db)
{
var sql = new[]
{
$"ALTER TABLE {_command.Schema}.organizations ENABLE ROW LEVEL SECURITY;",
$"ALTER TABLE {_command.Schema}.user_organizations ENABLE ROW LEVEL SECURITY;",
$"DROP POLICY IF EXISTS organization_access ON {_command.Schema}.organizations;",
@$"CREATE POLICY organization_access ON {_command.Schema}.organizations
USING (id IN (
SELECT organization_id
FROM {_command.Schema}.user_organizations
WHERE user_id = current_setting('app.user_id', TRUE)::INTEGER
)) WITH CHECK (true);",
$"DROP POLICY IF EXISTS user_organization_access ON {_command.Schema}.user_organizations;",
@$"CREATE POLICY user_organization_access ON {_command.Schema}.user_organizations
USING (user_id = current_setting('app.user_id', TRUE)::INTEGER) WITH CHECK (true);"
};
foreach (var statement in sql)
{
db.ExecuteSql(statement);
}
}
}
}

View file

@ -0,0 +1,9 @@
using PlanTempus.Core.Database.ConnectionFactory;
namespace PlanTempus.Database.Core
{
public interface IDbConfigure<T>
{
void With(T command, ConnectionStringParameters parameters = null);
}
}

View file

@ -0,0 +1,79 @@
using Insight.Database;
using PlanTempus.Core;
using PlanTempus.Core.Entities.Users;
using System.Data;
namespace PlanTempus.Database.Core
{
public class UserService
{
public record UserCreateCommand(string CorrelationId, string Email, string Password);
private readonly IDbConnection _db;
public UserService(IDbConnection db)
{
_db = db;
}
public async Task CreateUser(UserCreateCommand command)
{
var user = new User
{
Email = command.Email,
PasswordHash = new SecureTokenizer().TokenizeText(command.Password),
SecurityStamp = Guid.NewGuid().ToString(),
EmailConfirmed = false,
CreatedDate = DateTime.UtcNow
};
var userId = await _db.ExecuteScalarAsync<int>(@$"
INSERT INTO users (email, password_hash, security_stamp, email_confirmed, created_at)
VALUES (@Email, @PasswordHash, @SecurityStamp, @EmailConfirmed, @CreatedDate)
RETURNING id", user);
}
public async Task CreateOrganization(int userId, string organizationConnectionString)
{
var schema = "dev";
using var transaction = _db.OpenWithTransaction();
try
{
// Create organization
var organization = new Organization
{
ConnectionString = organizationConnectionString,
CreatedDate = DateTime.UtcNow,
CreatedBy = userId,
IsActive = true
};
var organizationId = await _db.ExecuteScalarAsync<int>(@$"
INSERT INTO {schema}.organizations (connection_string, created_date, is_active)
VALUES (@ConnectionString, @CreatedDate, @IsActive)
RETURNING id", organization);
// Link user to organization
var userOrganization = new UserOrganization
{
UserId = userId,
OrganizationId = organizationId,
CreatedDate = DateTime.UtcNow
};
await _db.ExecuteAsync(@$"
INSERT INTO {schema}.user_organizations (user_id, organization_id, created_date)
VALUES (@UserId, @OrganizationId, @CreatedDate)", userOrganization);
transaction.Commit();
}
catch
{
transaction.Rollback();
throw;
}
}
}
}

View file

@ -0,0 +1,26 @@
using Autofac;
using PlanTempus.Core.Database;
using PlanTempus.Core.Database.ConnectionFactory;
namespace PlanTempus.Database.ModuleRegistry
{
public class DbPostgreSqlModule : Module
{
public required string ConnectionString { get; set; }
protected override void Load(ContainerBuilder builder)
{
Insight.Database.Providers.PostgreSQL.PostgreSQLInsightDbProvider.RegisterProvider();
builder.RegisterType<PostgresConnectionFactory>()
.As<IDbConnectionFactory>()
.WithParameter(new TypedParameter(typeof(string), ConnectionString))
.SingleInstance();
builder.RegisterType<SqlOperations>()
.As<IDatabaseOperations>();
}
}
}

View file

@ -0,0 +1,50 @@
using Insight.Database;
using System.Data;
namespace PlanTempus.Database.NavigationSystem
{
internal class Setup
{
private readonly IDbConnection _db;
public Setup(IDbConnection db)
{
_db = db;
}
public void CreateSystem()
{
//await CreateNavigationLinkTemplatesTable(schema);
//await CreateNavigationLinkTemplateTranslationsTable(schema);
}
private async Task CreateNavigationLinkTemplatesTable()
{
var sql = $@"
CREATE TABLE IF NOT EXISTS navigation_link_templates (
id SERIAL PRIMARY KEY,
parent_id INTEGER NULL,
url VARCHAR(500) NOT NULL,
permission_id INTEGER NULL,
icon VARCHAR(100) NULL,
default_order INTEGER NOT NULL,
FOREIGN KEY (permission_id) REFERENCES permissions(id),
FOREIGN KEY (parent_id) REFERENCES navigation_link_templates(id)
)";
await _db.ExecuteAsync(sql);
}
private async Task CreateNavigationLinkTemplateTranslationsTable(string schema)
{
var sql = $@"
CREATE TABLE IF NOT EXISTS navigation_link_template_translations (
id SERIAL PRIMARY KEY,
template_id INTEGER NOT NULL,
language VARCHAR(10) NOT NULL,
display_name VARCHAR(100) NOT NULL,
FOREIGN KEY (template_id) REFERENCES navigation_link_templates(id)
)";
await _db.ExecuteAsync(sql);
}
}
}

View file

@ -0,0 +1,16 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net9.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="..\Core\PlanTempus.Core.csproj" />
</ItemGroup>
<ItemGroup>
<Folder Include="AuditSystem\" />
</ItemGroup>
</Project>

View file

@ -0,0 +1,96 @@
using System.Data;
using Insight.Database;
namespace PlanTempus.Database.RolesPermissionSystem
{
/// <summary>
/// This is by purpose not async await
/// It is intended that this is created with the correct Application User, which is why the schema name is omitted.
/// </summary>
public class Setup
{
IDbConnection _db;
public Setup(IDbConnection db)
{
_db = db;
}
/// <summary>
/// Creates the system tables in the specified schema within a transaction.
/// </summary>
/// <param name="schema">The schema name where the tables will be created.</param>
public void CreateSystem()
{
//if (!Validations.IsValidSchemaName(_schema))
// throw new ArgumentException("Invalid schema name", _schema);
using var transaction = _db.BeginTransaction();
try
{
CreateRolesTable();
CreatePermissionsTable();
CreatePermissionTypesTable();
CreateRolePermissionsTable();
transaction.Commit();
}
catch (Exception ex)
{
transaction.Rollback();
throw new InvalidOperationException("Failed to create system tables.", ex);
}
}
private void ExecuteSql(string sql)
{
_db.ExecuteSql(sql);
}
private void CreatePermissionTypesTable()
{
var sql = $@"
CREATE TABLE IF NOT EXISTS permission_types (
id SERIAL PRIMARY KEY,
name VARCHAR(100) NOT NULL UNIQUE
)";
ExecuteSql(sql);
}
private void CreatePermissionsTable()
{
var sql = $@"
CREATE TABLE IF NOT EXISTS permissions (
id SERIAL PRIMARY KEY,
name VARCHAR(100) NOT NULL UNIQUE,
type_id INTEGER NOT NULL,
FOREIGN KEY (type_id) REFERENCES permission_types(id)
)";
ExecuteSql(sql);
}
private void CreateRolesTable()
{
var sql = $@"
CREATE TABLE IF NOT EXISTS roles (
id SERIAL PRIMARY KEY,
name VARCHAR(100) NOT NULL UNIQUE
)";
ExecuteSql(sql);
}
private void CreateRolePermissionsTable()
{
var sql = $@"
CREATE TABLE IF NOT EXISTS role_permissions (
role_id INTEGER NOT NULL,
permission_id INTEGER NOT NULL,
PRIMARY KEY (role_id, permission_id),
FOREIGN KEY (role_id) REFERENCES roles(id),
FOREIGN KEY (permission_id) REFERENCES permissions(id)
)";
ExecuteSql(sql);
}
}
}

View file

@ -0,0 +1,138 @@
using Insight.Database;
using System.Data;
namespace PlanTempus.Database.Tenants
{
internal class InitializeOrganizationData
{
private readonly IDbConnection _db;
public InitializeOrganizationData(IDbConnection db)
{
_db = db;
}
private async Task InsertInitialData(string schema)
{
// Permission types
var insertPermissionTypes = $@"
INSERT INTO {schema}.permission_types (name) VALUES
('NAVIGATION'),
('COMMAND'),
('VIEW'),
('FEATURE')";
await _db.ExecuteAsync(insertPermissionTypes);
// Permissions
var insertPermissions = $@"
INSERT INTO {schema}.permissions (name, type_id) VALUES
-- Navigation permissions
('OVERVIEW_VIEW', (SELECT id FROM {schema}.permission_types WHERE name = 'NAVIGATION')),
('CALENDAR_VIEW', (SELECT id FROM {schema}.permission_types WHERE name = 'NAVIGATION')),
('SALES_VIEW', (SELECT id FROM {schema}.permission_types WHERE name = 'NAVIGATION')),
('CUSTOMERS_VIEW', (SELECT id FROM {schema}.permission_types WHERE name = 'NAVIGATION')),
-- Command permissions
('CREATE_PRODUCT', (SELECT id FROM {schema}.permission_types WHERE name = 'COMMAND')),
('EDIT_PRODUCT', (SELECT id FROM {schema}.permission_types WHERE name = 'COMMAND')),
('DELETE_PRODUCT', (SELECT id FROM {schema}.permission_types WHERE name = 'COMMAND')),
('CREATE_CUSTOMER', (SELECT id FROM {schema}.permission_types WHERE name = 'COMMAND')),
('EDIT_CUSTOMER', (SELECT id FROM {schema}.permission_types WHERE name = 'COMMAND')),
-- View permissions
('PRODUCT_DETAILS', (SELECT id FROM {schema}.permission_types WHERE name = 'VIEW')),
('CUSTOMER_DETAILS', (SELECT id FROM {schema}.permission_types WHERE name = 'VIEW')),
('SALES_STATISTICS', (SELECT id FROM {schema}.permission_types WHERE name = 'VIEW')),
-- Feature permissions
('ADVANCED_SEARCH', (SELECT id FROM {schema}.permission_types WHERE name = 'FEATURE')),
('EXPORT_DATA', (SELECT id FROM {schema}.permission_types WHERE name = 'FEATURE')),
('BULK_OPERATIONS', (SELECT id FROM {schema}.permission_types WHERE name = 'FEATURE'))";
await _db.ExecuteAsync(insertPermissions);
// Roles
var insertRoles = $@"
INSERT INTO {schema}.roles (name) VALUES
('SYSTEM_ADMIN'),
('TENANT_ADMIN'),
('POWER_USER'),
('BASIC_USER')";
await _db.ExecuteAsync(insertRoles);
// Top-level navigation
var insertTopNav = $@"
INSERT INTO {schema}.navigation_link_templates
(parent_id, url, permission_id, icon, default_order)
VALUES
(NULL, '/overview',
(SELECT id FROM {schema}.permissions WHERE name = 'OVERVIEW_VIEW'),
'home', 10),
(NULL, '/sales',
(SELECT id FROM {schema}.permissions WHERE name = 'SALES_VIEW'),
'shopping-cart', 20),
(NULL, '/customers',
(SELECT id FROM {schema}.permissions WHERE name = 'CUSTOMERS_VIEW'),
'users', 30)";
await _db.ExecuteAsync(insertTopNav);
// Sub-navigation
var insertSubNav = $@"
INSERT INTO {schema}.navigation_link_templates
(parent_id, url, permission_id, icon, default_order)
VALUES
((SELECT id FROM {schema}.navigation_link_templates WHERE url = '/sales'),
'/sales/create',
(SELECT id FROM {schema}.permissions WHERE name = 'CREATE_PRODUCT'),
'plus', 1),
((SELECT id FROM {schema}.navigation_link_templates WHERE url = '/customers'),
'/customers/create',
(SELECT id FROM {schema}.permissions WHERE name = 'CREATE_CUSTOMER'),
'user-plus', 1)";
await _db.ExecuteAsync(insertSubNav);
// Translations for top-level
var insertTopTranslations = $@"
INSERT INTO {schema}.navigation_link_template_translations
(template_id, language, display_name)
VALUES
((SELECT id FROM {schema}.navigation_link_templates WHERE url = '/overview'),
'da-DK', 'Overblik'),
((SELECT id FROM {schema}.navigation_link_templates WHERE url = '/overview'),
'en-US', 'Overview'),
((SELECT id FROM {schema}.navigation_link_templates WHERE url = '/sales'),
'da-DK', 'Salg'),
((SELECT id FROM {schema}.navigation_link_templates WHERE url = '/sales'),
'en-US', 'Sales'),
((SELECT id FROM {schema}.navigation_link_templates WHERE url = '/customers'),
'da-DK', 'Kunder'),
((SELECT id FROM {schema}.navigation_link_templates WHERE url = '/customers'),
'en-US', 'Customers')";
await _db.ExecuteAsync(insertTopTranslations);
// Translations for sub-navigation
var insertSubTranslations = $@"
INSERT INTO {schema}.navigation_link_template_translations
(template_id, language, display_name)
VALUES
((SELECT id FROM {schema}.navigation_link_templates WHERE url = '/sales/create'),
'da-DK', 'Opret salg'),
((SELECT id FROM {schema}.navigation_link_templates WHERE url = '/sales/create'),
'en-US', 'Create sale'),
((SELECT id FROM {schema}.navigation_link_templates WHERE url = '/customers/create'),
'da-DK', 'Opret kunde'),
((SELECT id FROM {schema}.navigation_link_templates WHERE url = '/customers/create'),
'en-US', 'Create customer')";
await _db.ExecuteAsync(insertSubTranslations);
// Giv admin alle permissions
var insertAdminPermissions = $@"
INSERT INTO {schema}.role_permissions (role_id, permission_id)
SELECT
(SELECT id FROM {schema}.roles WHERE name = 'SYSTEM_ADMIN'),
id
FROM {schema}.permissions";
await _db.ExecuteAsync(insertAdminPermissions);
}
}
}

344
NAMING_CONVENTION.md Normal file
View file

@ -0,0 +1,344 @@
# SWP.Core Naming Convention
Dette dokument definerer de officielle naming conventions for SWP.Core projektet baseret på analyse af eksisterende kodebase og .NET bedste praksis.
## Generelle Principper
1. **Konsistens**: Følg samme mønster gennem hele kodebasen
2. **Læsbarhed**: Navne skal være selvforklarende og beskrivende
3. **Sprogovervejelser**: Brug engelsk for al kode og kommentarer
4. **Undgå forkortelser**: Brug fulde ord frem for forkortelser
## 1. Namespace Naming
### Standard Format
```csharp
SWP.Core.[FeatureArea].[SubArea]
```
### Regler
- **PascalCase** for alle segmenter
- Brug `SWP.Core` som rod prefix (ikke `PlanTempus.Core`)
- Hierarkisk struktur afspejler folder struktur
- Maksimalt 4 niveauer dybt for læsbarhed
### Eksempler
```csharp
// ✅ Korrekt
SWP.Core.CommandQueries
SWP.Core.Configurations.SmartConfigProvider
SWP.Core.Database.ConnectionFactory
SWP.Core.Entities.Users
SWP.Core.X.TDD.Security
// ❌ Forkert
PlanTempus.Core.CommandQueries // Forkert prefix
SWP.core.commandqueries // Forkert casing
SWP.Core.Cfg.SmartCfgProv // Forkortelser
```
## 2. Class Naming
### Regler
- **PascalCase** for alle klasser
- Beskrivende navne der afspejler klassens ansvar
- Suffixes for specielle typer
### Class Type Suffixes
| Type | Suffix | Eksempel |
|------|--------|----------|
| Abstract classes | (ingen) | `Command` |
| Interfaces | `I` prefix | `ISecureTokenizer` |
| Exceptions | `Exception` | `ConfigurationException` |
| Factories | `Factory` | `PostgresConnectionFactory` |
| Services | `Service` | `UserService` |
| Extensions | `Extensions` | `TelemetryExtensions` |
| Modules (Autofac) | `Module` | `SecurityModule` |
| Configuration | `Configuration` eller `Options` | `SeqConfiguration`, `SmartConfigOptions` |
| Tests | `Tests` | `SecureTokenizerTests` |
### Eksempler
```csharp
// ✅ Korrekt
public class SecureTokenizer : ISecureTokenizer
public class PostgresConnectionFactory : IDbConnectionFactory
public abstract class Command : ICommand
public class ConfigurationException : Exception
// ❌ Forkert
public class secureTokenizer // Forkert casing
public class Factory // For generisk
public class SecureTokenizerImpl // Undgå "Impl" suffix
```
## 3. Interface Naming
### Regler
- **PascalCase** med `I` prefix
- Beskriver capability eller kontrakt
- Undgå `Interface` suffix
### Eksempler
```csharp
// ✅ Korrekt
public interface ISecureTokenizer
public interface IDbConnectionFactory
public interface IConfigurationRepository
// ❌ Forkert
public interface SecureTokenizer // Mangler I prefix
public interface ISecureTokenizerInterface // Redundant suffix
```
## 4. Method Naming
### Regler
- **PascalCase** for alle metoder
- Begynd med verbum der beskriver handlingen
- Async metoder skal have `Async` suffix
### Naming Patterns
| Pattern | Eksempel |
|---------|----------|
| Action methods | `TokenizeText()`, `VerifyToken()` |
| Factory methods | `Create()`, `Build()` |
| Async methods | `LogAsync()`, `CreateAsync()` |
| Boolean methods | `IsValid()`, `CanExecute()`, `HasPermission()` |
### Eksempler
```csharp
// ✅ Korrekt
public string TokenizeText(string word)
public async Task<bool> VerifyTokenAsync(string hash, string word)
public bool IsValidFormat(string input)
// ❌ Forkert
public string tokenize_text(string word) // Forkert casing
public async Task<bool> VerifyToken(string hash, string word) // Mangler Async suffix
public bool ValidFormat(string input) // Mangler verbum
```
## 5. Property Naming
### Regler
- **PascalCase** for alle properties
- Brug substantiver eller substantiv-sætninger
- Auto-properties anbefales
### Eksempler
```csharp
// ✅ Korrekt
public Guid CorrelationId { get; set; }
public required string Email { get; set; }
public DateTime CreatedDate { get; set; }
public bool IsActive { get; set; }
// ❌ Forkert
public Guid correlationId { get; set; } // Forkert casing
public string GetEmail() { get; set; } // Metode navngivning
```
## 6. Field Naming
### Private Fields
- **camelCase** med underscore prefix `_`
- Beskrivende navne
### Constants
- **PascalCase** for public constants
- **camelCase** med underscore prefix for private constants
### Readonly Fields
- Som private fields med underscore prefix
### Eksempler
```csharp
// ✅ Korrekt
private readonly IHttpClient _httpClient;
private const int _saltSize = 16;
public const string DefaultConnectionString = "...";
private static readonly string _defaultEncoding = "UTF-8";
// ❌ Forkert
private readonly IHttpClient httpClient; // Mangler underscore
private const int SALT_SIZE = 16; // Forkert casing for private
public const string default_connection = "..."; // Forkert casing for public
```
## 7. Parameter Naming
### Regler
- **camelCase** for alle parametre
- Beskrivende navne der afspejler parameterens formål
- Undgå single-letter navne (undtagen type parametre)
### Eksempler
```csharp
// ✅ Korrekt
public bool VerifyToken(string hashedToken, string plainTextPassword)
public void Configure(IServiceCollection services, string connectionString)
public async Task<T> ExecuteAsync<T>(CancellationToken cancellationToken)
// ❌ Forkert
public bool VerifyToken(string s1, string s2) // Ikke beskrivende
public void Configure(string ConnString) // Forkert casing
public async Task<T> ExecuteAsync<T>(CancellationToken ct) // Forkortelse
```
## 8. File og Folder Naming
### Files
- **PascalCase** der matcher hovedklassen i filen
- En hovedklasse per fil (undtagen interne/helper klasser)
### Folders
- **PascalCase** der afspejler namespace struktur
- Brug plural for samlinger af entities (f.eks. `Users`, `Configurations`)
### Eksempler
```
✅ Korrekt
/Core/CommandQueries/Command.cs
/Core/Entities/Users/User.cs
/Core/Configurations/SmartConfigProvider/SmartConfigProvider.cs
/Tests/Security/SecureTokenizerTests.cs
❌ Forkert
/Core/command-queries/command.cs
/Core/entities/users/user.cs
/Tests/security/secure_tokenizer_tests.cs
```
## 9. Test Naming
### Test Classes
- Klassenavn + `Tests` suffix
- Samme namespace som den testede klasse + `.X.TDD`
### Test Methods
- Format: `[MethodName]_Should[ExpectedBehavior]_[Condition]`
- Eller: `[MethodName]_[Condition]_[ExpectedBehavior]`
### Test Projects
- Format: `[ProjectName].X.TDD`
### Eksempler
```csharp
// ✅ Korrekt
namespace SWP.Core.X.TDD.Security;
[TestClass]
public class SecureTokenizerTests
{
[TestMethod]
public void TokenizeText_ShouldReturnDifferentTokens_ForSamePassword()
[TestMethod]
public void VerifyToken_WithValidPassword_ShouldReturnTrue()
[TestMethod]
public void VerifyToken_WithNullInput_ShouldThrowException()
}
// ❌ Forkert
public class TestSecureTokenizer // Forkert prefix
public void TokenizeTextTest() // Ikke beskrivende
public void Test1() // Ikke beskrivende
```
## 10. Configuration og Database Naming
### Configuration Keys
- **PascalCase** for hver sektion
- Hierarkisk med kolon separator
- Logisk gruppering
### Database vs C# Mapping
- **Database**: snake_case for kolonner
- **C#**: PascalCase for properties
- Brug mapping hvor nødvendigt
### Eksempler
```json
// ✅ Korrekt configuration
{
"Database": {
"ConnectionString": "...",
"CommandTimeout": 30
},
"Logging": {
"Level": "Information",
"Providers": {
"Seq": {
"Url": "http://localhost:5341"
}
}
}
}
```
```sql
-- ✅ Database (snake_case)
CREATE TABLE users (
id INT PRIMARY KEY,
email VARCHAR(255),
password_hash VARCHAR(255),
created_at TIMESTAMP
);
```
```csharp
// ✅ C# Entity (PascalCase)
public class User
{
public int Id { get; set; }
public string Email { get; set; }
public string PasswordHash { get; set; }
public DateTime CreatedAt { get; set; }
}
```
## 11. Specielle Konventioner
### Generic Type Parameters
- Single letters: `T`, `TKey`, `TValue`
- Beskrivende hvis nødvendigt: `TEntity`, `TRequest`, `TResponse`
### Event Handlers
- Format: `On[EventName]` eller `Handle[EventName]`
### Extension Methods
- Første parameter skal være `this`
- Metoder i statiske klasser med `Extensions` suffix
### Async/Await
- Altid `Async` suffix for async metoder
- Brug `CancellationToken cancellationToken` som sidste parameter
## 12. Undtagelser fra Konventioner
### Acceptable Forkortelser
- `Id` (frem for Identifier)
- `Url` (frem for UniformResourceLocator)
- `Http` (frem for HyperTextTransferProtocol)
- `Json` (frem for JavaScriptObjectNotation)
- `Sql` (frem for StructuredQueryLanguage)
### Kendte Patterns
- `DTO` suffix for Data Transfer Objects
- `CRUD` i kommentarer og dokumentation
- `API` for Application Programming Interface
## Implementering
1. **Gradvis migration**: Ret eksisterende kode gradvist ved ændringer
2. **Code reviews**: Håndhæv conventions i code reviews
3. **Linting**: Konfigurer analyzers til at håndhæve konventioner
4. **Dokumentation**: Hold denne guide opdateret
## Værktøjer
- **EditorConfig**: Konfigurer formatting regler
- **StyleCop**: Håndhæv naming conventions
- **SonarQube**: Code quality og consistency checks
- **Visual Studio**: Code analysis og suggestions

31
SWP.Core.sln Normal file
View file

@ -0,0 +1,31 @@

Microsoft Visual Studio Solution File, Format Version 12.00
# Visual Studio Version 17
VisualStudioVersion = 17.14.36202.13 d17.14
MinimumVisualStudioVersion = 10.0.40219.1
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "SWP.Core", "Core\SWP.Core.csproj", "{A89DB90D-5720-8689-A6DD-D077E7D85138}"
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "SWP.Core.X.TDD", "Tests\SWP.Core.X.TDD.csproj", "{76C3FC63-6C63-9B0B-698B-7AB1F08CB2AD}"
EndProject
Global
GlobalSection(SolutionConfigurationPlatforms) = preSolution
Debug|Any CPU = Debug|Any CPU
Release|Any CPU = Release|Any CPU
EndGlobalSection
GlobalSection(ProjectConfigurationPlatforms) = postSolution
{A89DB90D-5720-8689-A6DD-D077E7D85138}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{A89DB90D-5720-8689-A6DD-D077E7D85138}.Debug|Any CPU.Build.0 = Debug|Any CPU
{A89DB90D-5720-8689-A6DD-D077E7D85138}.Release|Any CPU.ActiveCfg = Release|Any CPU
{A89DB90D-5720-8689-A6DD-D077E7D85138}.Release|Any CPU.Build.0 = Release|Any CPU
{76C3FC63-6C63-9B0B-698B-7AB1F08CB2AD}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{76C3FC63-6C63-9B0B-698B-7AB1F08CB2AD}.Debug|Any CPU.Build.0 = Debug|Any CPU
{76C3FC63-6C63-9B0B-698B-7AB1F08CB2AD}.Release|Any CPU.ActiveCfg = Release|Any CPU
{76C3FC63-6C63-9B0B-698B-7AB1F08CB2AD}.Release|Any CPU.Build.0 = Release|Any CPU
EndGlobalSection
GlobalSection(SolutionProperties) = preSolution
HideSolutionNode = FALSE
EndGlobalSection
GlobalSection(ExtensibilityGlobals) = postSolution
SolutionGuid = {D194DADC-70D5-41AA-86EC-666A1BE4794D}
EndGlobalSection
EndGlobal

341
TECHNICAL_DOCUMENTATION.md Normal file
View file

@ -0,0 +1,341 @@
# SWP.Core - Technical Documentation
## Overview
SWP.Core is a modular .NET 9.0 enterprise application framework designed for multi-tenant SaaS applications. The system provides a comprehensive foundation for building scalable applications with advanced configuration management, security, telemetry, and database operations.
## Architecture
### High-Level Architecture
The system follows a layered architecture with clear separation of concerns:
```
┌─────────────────────────────────────────┐
│ Application Layer │
├─────────────────────────────────────────┤
│ Core Library │
│ ┌─────────────┬─────────────────────┐ │
│ │ Command/ │ Configuration │ │
│ │ Query │ Management │ │
│ ├─────────────┼─────────────────────┤ │
│ │ Security & │ Telemetry & │ │
│ │ Encryption │ Logging │ │
│ ├─────────────┼─────────────────────┤ │
│ │ Database │ Module Registry │ │
│ │ Operations │ (Autofac) │ │
│ └─────────────┴─────────────────────┘ │
├─────────────────────────────────────────┤
│ Database Layer │
│ ┌─────────────────────────────────────┐ │
│ │ PostgreSQL with Row Level Security │ │
│ │ Multi-tenant Schema Management │ │
│ └─────────────────────────────────────┘ │
└─────────────────────────────────────────┘
```
### Technology Stack
- **Framework**: .NET 9.0
- **Database**: PostgreSQL with Insight.Database
- **DI Container**: Autofac
- **Testing**: MSTest + Shouldly
- **Telemetry**: Application Insights + Seq
- **Security**: Sodium.Core for encryption
- **Configuration**: Custom multi-provider system
## Core Components
### 1. Configuration Management System
**Location**: `Core/Configurations/`
The system implements a sophisticated multi-provider configuration system that supports:
- **JSON Configuration Provider**: File-based configuration
- **Smart Configuration Provider**: Database-backed configuration with caching
- **Hierarchical Configuration**: Nested configuration with path-based access
**Key Classes**:
- [`ConfigurationBuilder`](Core/Configurations/ConfigurationBuilder.cs): Main builder for configuration providers
- [`SmartConfigProvider`](Core/Configurations/SmartConfigProvider/SmartConfigProvider.cs): Database-backed configuration
- [`PostgresConfigurationRepository`](Core/Configurations/SmartConfigProvider/Repositories/PostgresConfigurationRepository.cs): PostgreSQL storage
**Usage Example**:
```csharp
var config = new ConfigurationBuilder()
.AddProvider(new JsonConfigProvider("appsettings.json"))
.AddProvider(new SmartConfigProvider(connectionString))
.Build();
var connectionString = config.GetConnectionString("DefaultConnection");
var feature = config.Get<FeatureConfig>("Feature");
```
### 2. Database Operations
**Location**: `Core/Database/`
Provides a robust database abstraction layer with:
- **Connection Factory Pattern**: [`IDbConnectionFactory`](Core/Database/ConnectionFactory/IDbConnectionFactory.cs)
- **Operation Scoping**: [`DatabaseScope`](Core/Database/DatabaseScope.cs) for transaction and telemetry management
- **Telemetry Integration**: Automatic performance tracking
**Key Classes**:
- [`SqlOperations`](Core/Database/SqlOperations.cs): Main database operations class
- [`PostgresConnectionFactory`](Core/Database/ConnectionFactory/PostgresConnectionFactory.cs): PostgreSQL connection management
**Usage Example**:
```csharp
var result = await _sqlOperations.ExecuteAsync(async conn =>
{
return await conn.QueryAsync<User>("SELECT * FROM users WHERE id = @id", new { id });
}, "GetUserById");
```
### 3. Security & Encryption
**Location**: `Core/MultiKeyEncryption/`, `Core/ISecureTokenizer.cs`
Implements enterprise-grade security features:
- **Multi-Key Encryption**: [`MasterKey`](Core/MultiKeyEncryption/MasterKey.cs) for key management
- **Secure Connection Strings**: [`SecureConnectionString`](Core/MultiKeyEncryption/SecureConnectionString.cs)
- **Token Security**: [`SecureTokenizer`](Core/SecureTokenizer.cs) using Sodium.Core
**Features**:
- SHA-256 based token generation
- Secure password hashing
- Connection string encryption
- Key rotation support
### 4. Command/Query Pattern
**Location**: `Core/CommandQueries/`
Implements a lightweight command/query pattern without MediatR:
- **Base Command**: [`Command`](Core/CommandQueries/Command.cs) with correlation tracking
- **Command Interface**: [`ICommand`](Core/CommandQueries/ICommand.cs)
- **Response Handling**: [`CommandResponse`](Core/CommandQueries/CommandResponse.cs)
- **Problem Details**: [`ProblemDetails`](Core/CommandQueries/ProblemDetails.cs) for error handling
### 5. Telemetry & Logging
**Location**: `Core/Telemetry/`, `Core/SeqLogging/`
Comprehensive observability solution:
- **Application Insights Integration**: [`TelemetryExtensions`](Core/Telemetry/TelemetryExtensions.cs)
- **Seq Logging**: [`SeqLogger`](Core/SeqLogging/SeqLogger.cs) for structured logging
- **Custom Telemetry Channel**: [`SeqTelemetryChannel`](Core/Telemetry/SeqTelemetryChannel.cs)
- **Background Processing**: [`SeqBackgroundService`](Core/SeqLogging/SeqBackgroundService.cs)
**Features**:
- Structured logging with correlation IDs
- Performance metrics collection
- Exception tracking with full stack traces
- Custom enrichers for metadata
### 6. Module Registry (Dependency Injection)
**Location**: `Core/ModuleRegistry/`
Autofac-based modular dependency injection:
- **Security Module**: [`SecurityModule`](Core/ModuleRegistry/SecurityModule.cs)
- **Telemetry Module**: [`TelemetryModule`](Core/ModuleRegistry/TelemetryModule.cs)
- **Seq Logging Module**: [`SeqLoggingModule`](Core/ModuleRegistry/SeqLoggingModule.cs)
- **Database Module**: [`DbPostgreSqlModule`](Core/Database/ModuleRegistry/DbPostgreSqlModule.cs)
## Database Schema
### Multi-Tenant Architecture
The system implements a sophisticated multi-tenant architecture using PostgreSQL:
**Core Tables** (in identity schema):
- **users**: User authentication and profile data
- **organizations**: Tenant/organization management
- **user_organizations**: Many-to-many relationship with PIN codes
**Security Features**:
- **Row Level Security (RLS)**: Automatic tenant isolation
- **Schema-based Separation**: Each tenant can have dedicated schemas
- **Connection String Encryption**: Secure tenant database connections
**Example Schema Setup**:
```sql
-- From SetupIdentitySystem.cs
CREATE TABLE identity.users (
id SERIAL PRIMARY KEY,
email VARCHAR(256) NOT NULL UNIQUE,
password_hash VARCHAR(256) NOT NULL,
security_stamp VARCHAR(36) NOT NULL,
email_confirmed BOOLEAN NOT NULL DEFAULT FALSE,
created_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP
);
-- RLS Policy
CREATE POLICY organization_access ON identity.organizations
USING (id IN (
SELECT organization_id
FROM identity.user_organizations
WHERE user_id = current_setting('app.user_id', TRUE)::INTEGER
));
```
## Testing Strategy
**Location**: `Tests/`
Comprehensive testing approach using MSTest + Shouldly:
- **Unit Tests**: Component-level testing
- **Integration Tests**: Database and external service testing
- **Configuration Tests**: Multi-provider configuration validation
**Testing Principles**:
- Clear variable names for debugging
- Shouldly assertions for readable test failures
- Isolated test environments
- Mock-based testing for external dependencies
**Example Test Structure**:
```csharp
[TestMethod]
public void SecureTokenizer_Should_Generate_Valid_Token()
{
// Arrange
var tokenizer = new SecureTokenizer();
var inputText = "test-password";
// Act
var generatedToken = tokenizer.TokenizeText(inputText);
// Assert
generatedToken.ShouldNotBeNullOrEmpty();
generatedToken.Length.ShouldBe(64); // SHA-256 hex length
}
```
## Configuration
### Multi-Provider Configuration System
The system supports multiple configuration sources with hierarchical merging:
1. **JSON Files**: Traditional appsettings.json
2. **Database**: Dynamic configuration stored in PostgreSQL
3. **Environment Variables**: Runtime configuration
4. **Azure App Configuration**: Cloud-based configuration (planned)
**Configuration Structure**:
```json
{
"ConnectionStrings": {
"DefaultConnection": "Host=localhost;Database=app;..."
},
"ApplicationInsights": {
"ConnectionString": "InstrumentationKey=...",
"UseSeqLoggingTelemetryChannel": true
},
"SeqConfiguration": {
"IngestionEndpoint": "http://localhost:5341",
"Environment": "Development"
},
"Feature": {
"Enabled": true,
"RolloutPercentage": 25,
"AllowedUserGroups": ["beta"]
}
}
```
## Deployment & Operations
### Prerequisites
- .NET 9.0 Runtime
- PostgreSQL 12+
- Seq (for logging)
- Application Insights (for telemetry)
### Environment Setup
1. **Database Setup**: Run DDL scripts from `Database/Core/DDL/`
2. **Configuration**: Set up appsettings.json with connection strings
3. **Logging**: Configure Seq endpoint
4. **Telemetry**: Set Application Insights connection string
### Performance Considerations
- **Connection Pooling**: Managed by Npgsql
- **Async Operations**: All database operations are async
- **Telemetry Overhead**: Minimal impact with background processing
- **Configuration Caching**: Smart config provider includes caching
## Security Considerations
### Authentication & Authorization
- **Password Security**: SHA-256 hashing with security stamps
- **Token Management**: Secure token generation and validation
- **Multi-Tenant Isolation**: RLS policies prevent cross-tenant data access
### Data Protection
- **Connection String Encryption**: Sensitive connection data encrypted
- **Audit Trails**: Comprehensive logging of all operations
- **Input Validation**: FluentValidation integration
### Compliance
- **GDPR Ready**: User data management and deletion capabilities
- **Audit Logging**: Complete operation tracking
- **Data Encryption**: At-rest and in-transit encryption
## Extension Points
### Adding New Modules
1. Create new Autofac module inheriting from `Module`
2. Register services in `Load()` method
3. Add module to container builder
### Custom Configuration Providers
1. Implement `IConfigurationProvider`
2. Add to `ConfigurationBuilder`
3. Handle configuration merging strategy
### Custom Telemetry
1. Extend `TelemetryExtensions`
2. Add custom enrichers
3. Configure Application Insights processors
## Troubleshooting
### Common Issues
1. **Database Connection**: Check PostgreSQL connection strings and user permissions
2. **Configuration Loading**: Verify JSON syntax and provider order
3. **Telemetry**: Ensure Application Insights and Seq endpoints are accessible
4. **Multi-Tenant**: Verify RLS policies and user context settings
### Debugging
- Enable detailed logging in Seq
- Use Application Insights for performance monitoring
- Check database query performance with PostgreSQL logs
- Validate configuration loading with debug output
## Future Roadmap
- Azure App Configuration integration
- Advanced caching strategies
- GraphQL API support
- Event sourcing capabilities
- Microservices decomposition support

42
Tests/.runsettings Normal file
View file

@ -0,0 +1,42 @@
<?xml version="1.0" encoding="utf-8"?>
<RunSettings>
<RunConfiguration>
<MaxCpuCount>0</MaxCpuCount>
<ResultsDirectory>.\TestResults</ResultsDirectory>
<TargetFrameworkVersion>net9.0</TargetFrameworkVersion>
</RunConfiguration>
<DataCollectionRunSettings>
<DataCollectors>
<DataCollector friendlyName="Code Coverage" uri="datacollector://Microsoft/CodeCoverage/2.0">
<Configuration>
<CodeCoverage>
<ModulePaths>
<Include>
<ModulePath>.*SWP\.Core\.dll$</ModulePath>
</Include>
<Exclude>
<ModulePath>.*Tests.*</ModulePath>
</Exclude>
</ModulePaths>
<UseVerifiableInstrumentation>True</UseVerifiableInstrumentation>
<AllowLowIntegrityProcesses>True</AllowLowIntegrityProcesses>
<CollectFromChildProcesses>True</CollectFromChildProcesses>
<CollectAspDotNet>False</CollectAspDotNet>
</CodeCoverage>
</Configuration>
</DataCollector>
</DataCollectors>
</DataCollectionRunSettings>
<MSTest>
<MapInconclusiveToFailed>false</MapInconclusiveToFailed>
<CaptureTraceOutput>true</CaptureTraceOutput>
<DeleteDeploymentDirectoryAfterTestRunIsComplete>true</DeleteDeploymentDirectoryAfterTestRunIsComplete>
<DeploymentEnabled>true</DeploymentEnabled>
<Parallelize>
<Workers>0</Workers>
<Scope>MethodLevel</Scope>
</Parallelize>
</MSTest>
</RunSettings>

View file

@ -0,0 +1,40 @@
using Npgsql;
namespace SWP.Core.X.TDD.CodeSnippets;
internal class TestPostgresLISTENNOTIFY
{
private static async Task Main(string[] args)
{
var connectionString = "Host=192.168.1.57;Database=ptdb01;Username=postgres;Password=3911";
try
{
await using var conn = new NpgsqlConnection(connectionString);
await conn.OpenAsync();
Console.WriteLine("Forbundet til databasen. Lytter efter notifikationer...");
conn.Notification += (o, e) =>
{
Console.WriteLine("Notifikation modtaget:");
Console.WriteLine($" PID: {e.PID}");
Console.WriteLine($" Kanal: {e.Channel}");
Console.WriteLine($" Payload: {e.Payload}");
Console.WriteLine("------------------------");
};
await using (var cmd = new NpgsqlCommand("LISTEN config_changes;", conn))
await cmd.ExecuteNonQueryAsync();
Console.WriteLine("Tryk på en tast for at stoppe...");
while (!Console.KeyAvailable) await conn.WaitAsync();
}
catch (Exception ex)
{
Console.WriteLine($"Der opstod en fejl: {ex.Message}");
Console.WriteLine($"Stack trace: {ex.StackTrace}");
}
}
}

View file

@ -0,0 +1,11 @@
INSERT INTO "system".app_configuration ("key",value,"label",content_type,valid_from,expires_at,created_at,modified_at,etag) VALUES
('Email:Templates:Welcome','{"subject":"Velkommen til vores platform","template":"welcome-dk.html","sender":"velkommen@firma.dk"}','test','application/json','2024-01-01 01:00:00+01',NULL,'2025-02-03 16:46:36.665888+01','2025-02-03 16:47:30.528326+01','c48949c4-c02f-4c77-b81c-e281a810def1'::uuid),
('Email:Templates:Password','{"subject":"Nulstil dit kodeord","template":"reset-password-dk.html","sender":"support@firma.dk"}','Email Templates','application/json','2024-01-01 01:00:00+01',NULL,'2025-02-03 16:47:56.537775+01','2025-02-03 16:47:56.537775+01','26500738-4f5b-4cc8-a0e4-2a6a5fd57675'::uuid),
('Debug','true',NULL,'text/plain',NULL,NULL,'2025-02-02 14:25:22.200058+01','2025-02-02 14:25:22.200058+01','f1348731-9396-4f1d-b40a-7fbd23a897d2'::uuid),
('Database:ConnectionString','"Server=db.example.com;Port=5432"',NULL,'text/plain',NULL,NULL,'2025-02-02 14:25:22.200058+01','2025-02-02 14:25:22.200058+01','2aa0bc3e-fa24-449a-8f25-a76d9b4d535e'::uuid),
('Database:Timeout','30',NULL,'text/plain',NULL,NULL,'2025-02-02 14:25:22.200058+01','2025-02-02 14:25:22.200058+01','d25ebb14-49f6-4e33-9ac7-a3253705d0fb'::uuid),
('Database:UseSSL','true',NULL,'text/plain',NULL,NULL,'2025-02-02 14:25:22.200058+01','2025-02-02 14:25:22.200058+01','f4d52ec4-b723-4561-9b18-0e7a68b89a17'::uuid),
('Logging:FileOptions','{"Path": "/var/logs/app.log", "MaxSizeMB": 100, "RetentionDays": 7}',NULL,'text/plain',NULL,NULL,'2025-02-02 14:25:22.200058+01','2025-02-02 14:25:22.200058+01','06c0891d-a860-4acc-917a-d0877f511c1b'::uuid),
('Features:Experimental','{"Enabled": true, "RolloutPercentage": 25, "AllowedUserGroups": ["beta"]}',NULL,'text/plain',NULL,NULL,'2025-02-02 14:25:22.200058+01','2025-02-02 14:25:22.200058+01','0136fdef-51d9-4909-82ef-f72053ce6d6d'::uuid),
('API:Endpoints','"/api/users"',NULL,'text/plain',NULL,NULL,'2025-02-02 14:25:22.200058+01','2025-02-02 14:25:22.200058+01','fe362b69-a486-48ad-9165-2e623e2e6f70'::uuid),
('API:Endpoints','"/api/products"',NULL,'text/plain',NULL,NULL,'2025-02-02 14:25:22.200058+01','2025-02-02 14:25:22.200058+01','c087e2d4-1f38-4814-b4dd-f30c463dc6d1'::uuid);

View file

@ -0,0 +1,63 @@
using Microsoft.VisualStudio.TestTools.UnitTesting;
using Shouldly;
using SWP.Core.CommandQueries;
namespace SWP.Core.X.TDD.CommandQueries;
[TestClass]
public class CommandTests
{
[TestMethod]
public void Command_ShouldHaveCorrelationId()
{
// Arrange & Act
var correlationId = Guid.NewGuid();
var command = new TestCommand { CorrelationId = correlationId };
// Assert
command.CorrelationId.ShouldBe(correlationId);
}
[TestMethod]
public void Command_ShouldHaveTransactionId()
{
// Arrange & Act
var correlationId = Guid.NewGuid();
var transactionId = Guid.NewGuid();
var command = new TestCommand { CorrelationId = correlationId };
command.TransactionId = transactionId;
// Assert
command.TransactionId.ShouldBe(transactionId);
}
private class TestCommand : Command
{
public string TestProperty { get; set; }
}
}
[TestClass]
public class ProblemDetailsTests
{
[TestMethod]
public void ProblemDetails_ShouldHaveBasicProperties()
{
// Arrange & Act
var problem = new ProblemDetails
{
Type = "ValidationError",
Title = "Validation Failed",
Status = 400,
Detail = "Email is required",
Instance = "/api/users"
};
// Assert
problem.Type.ShouldBe("ValidationError");
problem.Title.ShouldBe("Validation Failed");
problem.Status.ShouldBe(400);
problem.Detail.ShouldBe("Email is required");
problem.Instance.ShouldBe("/api/users");
}
}

View file

@ -0,0 +1,53 @@
using Newtonsoft.Json;
using Shouldly;
using SWP.Core.CommandQueries;
namespace SWP.Core.X.TDD.CommandQueryHandlerTests;
[TestClass]
public class ProblemDetailsTests
{
[TestMethod]
public void TestFormatOfProblemDetails()
{
// Arrange
var problemDetails = new ProblemDetails
{
Type = "https://example.com/errors/invalid-input",
Title = "Invalid Input",
Status = 400,
Detail = "The request body is invalid.",
Instance = "/api/users"
};
problemDetails.AddExtension("invalidFields", new[]
{
new { Field = "name", Message = "The 'name' field is required." },
new { Field = "email", Message = "The 'email' field must be a valid email address." }
});
var json = JsonConvert.SerializeObject(problemDetails, Formatting.Indented);
var expectedJson = """
{
"Type": "https://example.com/errors/invalid-input",
"Title": "Invalid Input",
"Status": 400,
"Detail": "The request body is invalid.",
"Instance": "/api/users",
"invalidFields": [
{
"Field": "name",
"Message": "The 'name' field is required."
},
{
"Field": "email",
"Message": "The 'email' field must be a valid email address."
}
]
}
""";
json.ShouldBe(expectedJson);
}
}

View file

@ -0,0 +1,188 @@
using System.Data;
using Autofac;
using Insight.Database;
using Newtonsoft.Json;
using SWP.Core.Database.ConnectionFactory;
using Shouldly;
namespace SWP.Core.X.TDD.ConfigurationSystem;
[TestClass]
public class SetupConfigurationTests : TestFixture
{
private IDbConnection _connection;
[TestInitialize]
public void Setup()
{
var connectionFactory = Container.Resolve<IDbConnectionFactory>();
_connection = connectionFactory.Create();
}
[TestCleanup]
public void Cleanup()
{
_connection.ExecuteSql(@"
TRUNCATE TABLE app_configuration_history;
TRUNCATE TABLE app_configuration CASCADE;");
_connection.Dispose();
}
[TestMethod]
public void InsertConfiguration_ShouldCreateHistoryRecord()
{
// Arrange
var configData = new
{
key = "test.key",
value = "test value",
label = "Test Label"
};
// Act
var result = _connection.QuerySql<dynamic>(@"
INSERT INTO app_configuration (key, value, label)
VALUES (@key, @value, @label)
RETURNING *", configData).Single();
var history = _connection.QuerySql<dynamic>(@"
SELECT key, value, label, action_type
FROM app_configuration_history
WHERE id = @id AND action_type = 'I'",
new { id = (int)result.id })
.Single();
// Assert
var expected = JsonConvert.SerializeObject(new
{
configData.key,
configData.value,
configData.label,
action_type = "I"
});
var actual = JsonConvert.SerializeObject(history) as string;
actual.ShouldBe(expected);
}
[TestMethod]
public void UpdateConfiguration_ShouldUpdateModifiedAt()
{
// Arrange
var configData = new
{
key = "test.key",
value = "original value"
};
var original = _connection.QuerySql<dynamic>(@"
INSERT INTO app_configuration (key, value)
VALUES (@key, @value)
RETURNING modified_at", configData)
.Single();
Thread.Sleep(1000);
// Act
var updated = _connection.QuerySql<dynamic>(@"
UPDATE app_configuration
SET value = @value
WHERE key = @key
RETURNING modified_at",
new { configData.key, value = "updated value" })
.Single();
// Assert
((DateTime)updated.modified_at).ShouldBeGreaterThan((DateTime)original.modified_at);
}
[TestMethod]
public void DeleteConfiguration_ShouldCreateHistoryRecord()
{
// Arrange
var configData = new
{
key = "test.key",
value = "test value"
};
var original = _connection.QuerySql<dynamic>(@"
INSERT INTO app_configuration (key, value)
VALUES (@key, @value)
RETURNING id", configData)
.Single();
// Act
_connection.ExecuteSql(
"DELETE FROM app_configuration WHERE id = @id",
new { id = (int)original.id });
// Assert
var history = _connection.QuerySql<dynamic>(@"
SELECT key, value, action_type
FROM app_configuration_history
WHERE id = @id AND action_type = 'D'",
new { id = (int)original.id })
.Single();
var expected = JsonConvert.SerializeObject(new
{
configData.key,
configData.value,
action_type = "D"
});
var actual = JsonConvert.SerializeObject(history) as string;
actual.ShouldBe(expected);
}
[TestMethod]
public void InsertConfiguration_ShouldSetAllColumns()
{
// Arrange
var now = DateTime.UtcNow;
now = new DateTime(now.Year, now.Month, now.Day, now.Hour, now.Minute, 0, DateTimeKind.Utc);
var configData = new
{
key = "test.columns",
value = "test value",
label = "Test Label",
content_type = "application/json",
valid_from = now,
expires_at = now.AddDays(30)
};
// Act
var result = _connection.QuerySql<dynamic>(@"
INSERT INTO app_configuration (
key,
value,
label,
content_type,
valid_from,
expires_at)
VALUES (
@key,
@value,
@label,
@content_type,
@valid_from,
@expires_at)
RETURNING key, value, label, content_type,
CAST(EXTRACT(EPOCH FROM date_trunc('minute', valid_from)) AS INTEGER) as valid_from,
CAST(EXTRACT(EPOCH FROM date_trunc('minute', expires_at)) AS INTEGER) as expires_at", configData)
.Single();
// Assert
var expected = JsonConvert.SerializeObject(new
{
configData.key,
configData.value,
configData.label,
configData.content_type,
valid_from = ((DateTimeOffset)configData.valid_from).ToUnixTimeSeconds(),
expires_at = ((DateTimeOffset)configData.expires_at).ToUnixTimeSeconds()
});
Assert.AreEqual(expected, JsonConvert.SerializeObject(result));
}
}

View file

@ -0,0 +1,145 @@
using Newtonsoft.Json.Linq;
using SWP.Core.Configurations;
using SWP.Core.Configurations.JsonConfigProvider;
using SWP.Core.Configurations.SmartConfigProvider;
using Shouldly;
using SWP.Core.X.TDD;
namespace SWP.Core.X.TDD.ConfigurationTests;
[TestClass]
public class JsonConfigurationProviderTests : TestFixture
{
private const string _testFolder = "ConfigurationTests/";
public JsonConfigurationProviderTests() : base(_testFolder)
{
}
[TestMethod]
public void GetSection_ShouldReturnCorrectFeatureSection()
{
// Arrange
var expectedJObject = JObject.Parse(@"{
'Enabled': true,
'RolloutPercentage': 25,
'AllowedUserGroups': ['beta']
}") as JToken;
var builder = new ConfigurationBuilder()
.AddJsonFile($"{_testFolder}appconfiguration.dev.json")
.Build();
// Act
var section = builder.GetSection("Feature");
// Assert
section.ShouldNotBeNull();
section.Value.ShouldBeEquivalentTo(expectedJObject);
}
[TestMethod]
public void Get_ShouldReturnCorrectFeatureObject()
{
// Arrange
var expectedFeature = new Feature
{
Enabled = true,
RolloutPercentage = 25,
AllowedUserGroups = new List<string> { "beta" }
};
var builder = new ConfigurationBuilder()
.AddJsonFile($"{_testFolder}appconfiguration.dev.json")
.Build();
// Act
var actualFeature = builder.GetSection("Feature").ToObject<Feature>();
#pragma warning disable CS0618 // Type or member is obsolete
var actualFeatureObsoleted = builder.GetSection("Feature").Get<Feature>();
#pragma warning restore CS0618 // Type or member is obsolete
// Assert
actualFeature.ShouldBeEquivalentTo(expectedFeature);
actualFeatureObsoleted.ShouldBeEquivalentTo(expectedFeature);
}
[TestMethod]
public void Get_ShouldReturnCorrectValueAsString()
{
// Arrange
var expectedFeature = "123";
var builder = new ConfigurationBuilder()
.AddJsonFile($"{_testFolder}appconfiguration.dev.json")
.Build();
// Act
var actualFeature = builder.GetSection("AnotherSetting").Get<string>("Thresholds:High");
// Assert
actualFeature.ShouldBeEquivalentTo(expectedFeature);
}
/// <summary>
/// Testing a stupid indexer for compability with Microsoft ConfigurationBuilder
/// </summary>
[TestMethod]
public void Indexer_ShouldReturnValueAsString()
{
// Arrange
var expected = "SHA256";
var builder = new ConfigurationBuilder()
.AddJsonFile($"{_testFolder}appconfiguration.dev.json")
.Build();
// Act
var actual = builder["Authentication"];
// Assert
actual.ShouldBeEquivalentTo(expected);
}
[TestMethod]
public void Get_ShouldReturnCorrectValueAsInt()
{
// Arrange
var expectedFeature = 22;
var builder = new ConfigurationBuilder()
.AddJsonFile($"{_testFolder}appconfiguration.dev.json")
.Build();
// Act
var actualFeature = builder.GetSection("AnotherSetting:Temperature").Get<int>("Indoor:Max:Limit");
// Assert
actualFeature.ShouldBe(expectedFeature);
}
[TestMethod]
public void Get_ShouldReturnCorrectValueAsBool()
{
// Arrange
var expectedFeature = true;
var configRoot = new ConfigurationBuilder()
.AddJsonFile($"{_testFolder}appconfiguration.dev.json")
.AddSmartConfig()
.Build();
// Act
var actualFeature = configRoot.Get<bool>("Database:UseSSL");
// Assert
actualFeature.ShouldBe(expectedFeature);
}
}
internal class Feature
{
public bool Enabled { get; set; }
public int RolloutPercentage { get; set; }
public List<string> AllowedUserGroups { get; set; }
}

View file

@ -0,0 +1,75 @@
using Newtonsoft.Json.Linq;
using SWP.Core.Configurations.Common;
namespace SWP.Core.X.TDD.ConfigurationTests;
[TestClass]
public class ConfigurationTests : TestFixture
{
[TestInitialize]
public void Init()
{
}
[TestMethod]
public void ConfigurationSettingsTest()
{
var pairs = new List<KeyValuePair<string, JToken>>
{
new("Debug", true),
// Database konfiguration
new("Database:ConnectionString", "Server=db.example.com;Port=5432"),
new("Database:Timeout", 30),
new("Database:UseSSL", true),
// Logging konfiguration med JObject
new("Logging:FileOptions", JObject.Parse(@"{
'Path': '/var/logs/app.log',
'MaxSizeMB': 100,
'RetentionDays': 7
}")),
// Feature flags med kompleks konfiguration
new("Features:Experimental", JObject.Parse(@"{
'Enabled': true,
'RolloutPercentage': 25,
'AllowedUserGroups': ['beta']
}")),
// API endpoints med array
new("API:Endpoints", "/api/users"),
new("API:Endpoints", "/api/products")
};
var result = KeyValueToJson.Convert(pairs);
var expected = JObject.Parse(@"{
'Debug' : true,
'Database': {
'ConnectionString': 'Server=db.example.com;Port=5432',
'Timeout': 30,
'UseSSL': true
},
'Logging': {
'FileOptions': {
'Path': '/var/logs/app.log',
'MaxSizeMB': 100,
'RetentionDays': 7
}
},
'Features': {
'Experimental': {
'Enabled': true,
'RolloutPercentage': 25,
'AllowedUserGroups': ['beta']
}
},
'API': {
'Endpoints': ['/api/users', '/api/products']
}
}");
Assert.IsTrue(JToken.DeepEquals(expected, result));
}
}

View file

@ -0,0 +1,82 @@
using Autofac;
using Insight.Database;
using SWP.Core.Configurations;
using SWP.Core.Configurations.JsonConfigProvider;
using SWP.Core.Configurations.SmartConfigProvider;
using SWP.Core.Database.ConnectionFactory;
using Shouldly;
namespace SWP.Core.X.TDD.ConfigurationTests;
[TestClass]
public class SmartConfigProviderTests : TestFixture
{
private const string _testFolder = "ConfigurationTests/";
[TestMethod]
public void TrySmartConfigWithOptionsForPostgres()
{
var config = new ConfigurationBuilder()
.AddJsonFile($"{_testFolder}appconfiguration.dev.json")
.AddSmartConfig(options => options.UsePostgres("DefaultConnection"))
.Build();
var actualFeature = config.Get<bool>("Database:UseSSL");
}
[TestMethod]
public void Get_ShouldReturnCorrectValueAsBool()
{
// Arrange
var expectedFeature = true;
var config = new ConfigurationBuilder()
.AddJsonFile($"{_testFolder}appconfiguration.dev.json")
.AddSmartConfig(options => options.UsePostgres("DefaultConnection"))
.Build();
// Act
var actualFeature = config.Get<bool>("Database:UseSSL");
// Assert
actualFeature.ShouldBe(expectedFeature);
}
[TestMethod]
public void Get_ShouldReturnCorrectValueWhenSelectingIntoValueRowInConfigTable()
{
// Arrange
var expectedFeature = 100;
var builder = new ConfigurationBuilder()
.AddJsonFile($"{_testFolder}appconfiguration.dev.json")
.AddSmartConfig(options => options.UsePostgres("DefaultConnection"))
.Build();
// Act
var actualFeature = builder.GetSection("Logging:FileOptions").Get<int>("MaxSizeMB");
var withoutSectionThisAlsoWorks = builder.Get<int>("Logging:FileOptions:MaxSizeMB");
// Assert
actualFeature.ShouldBe(expectedFeature);
actualFeature.ShouldBe(withoutSectionThisAlsoWorks);
}
[TestMethod]
public void TryGetActiveConfigurations()
{
var connFactory = Container.Resolve<IDbConnectionFactory>();
const string sql = @"
SELECT id, ""key"", value, label, content_type,
valid_from, expires_at, created_at, modified_at, etag
FROM app_configuration
WHERE CURRENT_TIMESTAMP BETWEEN valid_from AND expires_at
OR (valid_from IS NULL AND expires_at IS NULL)";
using (var conn = connFactory.Create())
{
var result = conn.QuerySql(sql);
}
}
}

View file

@ -0,0 +1,74 @@
{
"AllowedHosts": "*",
"ConnectionStrings": {
"DefaultConnection": "Host=192.168.1.57;Port=5432;Database=sandbox;User Id=sathumper;Password=3911;"
},
"ApplicationInsights": {
"ConnectionString": "InstrumentationKey=07d2a2b9-5e8e-4924-836e-264f8438f6c5;IngestionEndpoint=https://northeurope-2.in.applicationinsights.azure.com/;LiveEndpoint=https://northeurope.livediagnostics.monitor.azure.com/;ApplicationId=56748c39-2fa3-4880-a1e2-24068e791548",
"UseSeqLoggingTelemetryChannel": true
},
"SeqConfiguration": {
"IngestionEndpoint": "http://localhost:5341",
"ApiKey": null,
"Environment": "MSTEST"
},
"Authentication": "SHA256",
"Feature": {
"Enabled": true,
"RolloutPercentage": 25,
"AllowedUserGroups": [
"beta"
]
},
"AnotherSetting": {
"Thresholds": {
"High": "123",
"Low": "-1"
},
"Temperature": {
"Indoor": {
"Max": {
"Limit": 22
},
"Min": {
"Limit": 18
}
},
"Outdoor": {
"Max": {
"Limit": 12
},
"Min": {
"Limit": 9
}
}
}
},
"Serilog": {
"MinimumLevel": {
"Default": "Information",
"Override": {
"Microsoft": "Warning",
"System": "Warning"
}
},
"WriteTo": [
{
"Name": "Seq",
"Args": {
"serverUrl": "http://localhost:5341",
"apiKey": ""
}
}
],
"Enrich": [
"WithMachineName",
"WithThreadId",
"WithProcessId",
"WithEnvironmentName"
],
"Properties": {
"Application": "PlanTempus"
}
}
}

View file

@ -0,0 +1,83 @@
using Microsoft.VisualStudio.TestTools.UnitTesting;
using Shouldly;
using SWP.Core.Entities.Users;
using SWP.Core.X.TDD.TestHelpers;
namespace SWP.Core.X.TDD.Entities;
[TestClass]
public class UserTests
{
[TestMethod]
public void User_ShouldHaveBasicProperties()
{
// Arrange & Act
var user = new User
{
Id = 1,
Email = "test@example.com",
PasswordHash = "hashedPassword",
SecurityStamp = "securityStamp",
EmailConfirmed = true,
CreatedDate = DateTime.UtcNow
};
// Assert
user.Id.ShouldBe(1);
user.Email.ShouldBe("test@example.com");
user.PasswordHash.ShouldBe("hashedPassword");
user.SecurityStamp.ShouldBe("securityStamp");
user.EmailConfirmed.ShouldBeTrue();
user.CreatedDate.ShouldBeInRange(DateTime.UtcNow.AddMinutes(-1), DateTime.UtcNow.AddMinutes(1));
}
[TestMethod]
public void TestDataBuilder_ShouldCreateValidUser()
{
// Act
var user = TestDataBuilder.Users.CreateTestUser();
// Assert
user.ShouldNotBeNull();
user.Email.ShouldNotBeNullOrEmpty();
user.Email.ShouldContain("@example.com");
user.CreatedDate.ShouldBeInRange(DateTime.UtcNow.AddMinutes(-1), DateTime.UtcNow.AddMinutes(1));
}
[TestMethod]
public void TestDataBuilder_ShouldCreateUserWithCustomEmail()
{
// Arrange
var customEmail = "custom@test.com";
// Act
var user = TestDataBuilder.Users.CreateTestUser(customEmail);
// Assert
user.Email.ShouldBe(customEmail);
}
}
[TestClass]
public class OrganizationTests
{
[TestMethod]
public void Organization_ShouldHaveBasicProperties()
{
// Arrange & Act
var org = new Organization
{
Id = 1,
ConnectionString = "test connection",
CreatedDate = DateTime.UtcNow,
CreatedBy = 1,
IsActive = true
};
// Assert
org.Id.ShouldBe(1);
org.ConnectionString.ShouldBe("test connection");
org.CreatedBy.ShouldBe(1);
org.IsActive.ShouldBeTrue();
}
}

View file

@ -0,0 +1,71 @@
using System.Net;
using Autofac;
using Microsoft.ApplicationInsights;
using Microsoft.ApplicationInsights.Channel;
using Microsoft.ApplicationInsights.DataContracts;
using SWP.Core.SeqLogging;
using SWP.Core.Telemetry;
namespace SWP.Core.X.TDD.Logging;
[TestClass]
public class SeqBackgroundServiceTest : TestFixture
{
private CancellationTokenSource _cts;
private IMessageChannel<ITelemetry> _messageChannel;
private SeqBackgroundService _service;
[TestInitialize]
public void SetupThis()
{
_messageChannel = new MessageChannel();
var telemetryClient = Container.Resolve<TelemetryClient>();
var config = new SeqConfiguration("http://localhost:5341", null, "MSTEST");
var httpClient = new SeqHttpClient(config);
var logger = new SeqLogger<SeqBackgroundService>(httpClient, config);
_service = new SeqBackgroundService(telemetryClient, _messageChannel, logger);
_cts = new CancellationTokenSource();
}
[TestMethod]
public async Task Messages_ShouldBeProcessedFromQueue()
{
await _service.StartAsync(_cts.Token);
for (var i = 0; i < 5; i++)
{
var eventTelemetry = new EventTelemetry
{
Name = "Test Event",
Timestamp = DateTimeOffset.UtcNow
};
eventTelemetry.Properties.Add("TestId", Guid.NewGuid().ToString());
eventTelemetry.Metrics.Add("TestMetric", 42.0);
await _messageChannel.Writer.WriteAsync(eventTelemetry);
}
// wait for processing
await Task.Delay(5000);
_cts.Cancel(); //not sure about this, we need to analyse more before this is "the way"
await _service.StopAsync(CancellationToken.None);
var hasMoreMessages = await _messageChannel.Reader.WaitToReadAsync();
Assert.IsFalse(hasMoreMessages, "Queue should be empty after 5 seconds");
}
private class TestMessageHandler : HttpMessageHandler
{
protected override Task<HttpResponseMessage> SendAsync(
HttpRequestMessage request,
CancellationToken cancellationToken)
{
return Task.FromResult(new HttpResponseMessage(HttpStatusCode.OK));
}
}
}

View file

@ -0,0 +1,145 @@
using Autofac;
using Microsoft.ApplicationInsights;
using Microsoft.ApplicationInsights.DataContracts;
using SWP.Core.SeqLogging;
namespace SWP.Core.X.TDD.Logging;
[TestClass]
public class SeqLoggerTests : TestFixture
{
private readonly string _testId;
private readonly SeqHttpClient _httpClient;
private readonly SeqLogger<SeqLoggerTests> _logger;
public SeqLoggerTests()
{
_testId = Guid.NewGuid().ToString();
var config = new SeqConfiguration("http://localhost:5341", null, "MSTEST");
_httpClient = new SeqHttpClient(config);
_logger = new SeqLogger<SeqLoggerTests>(_httpClient, config);
}
[TestMethod]
public async Task LogTraceTelemetry_SendsCorrectDataWithErrorLevel()
{
// Arrange
var traceTelemetry = new TraceTelemetry
{
Message = "Test trace error message",
SeverityLevel = SeverityLevel.Error,
Timestamp = DateTimeOffset.UtcNow
};
traceTelemetry.Properties.Add("TestId", _testId);
// Act
await _logger.LogAsync(traceTelemetry);
}
[TestMethod]
public async Task LogTraceTelemetry_SendsCorrectDataWithWarningLevel()
{
// Arrange
var traceTelemetry = new TraceTelemetry
{
Message = "Test trace warning message",
SeverityLevel = SeverityLevel.Warning,
Timestamp = DateTimeOffset.UtcNow
};
traceTelemetry.Properties.Add("TestId", _testId);
// Act
await _logger.LogAsync(traceTelemetry);
}
[TestMethod]
public async Task LogEventTelemetry_SendsCorrectData()
{
// Arrange
var eventTelemetry = new EventTelemetry
{
Name = "Test Event",
Timestamp = DateTimeOffset.UtcNow
};
eventTelemetry.Properties.Add("TestId", _testId);
eventTelemetry.Metrics.Add("TestMetric", 42.0);
// Act
await _logger.LogAsync(eventTelemetry);
}
[TestMethod]
public async Task LogExceptionTelemetry_SendsCorrectData()
{
try
{
var t = 0;
var result = 10 / t;
}
catch (Exception e)
{
// Arrange
var exceptionTelemetry = new ExceptionTelemetry(e)
{
Timestamp = DateTimeOffset.UtcNow
};
exceptionTelemetry.Properties.Add("TestId", _testId);
// Act
await _logger.LogAsync(exceptionTelemetry);
}
}
[TestMethod]
public async Task LogDependencyTelemetry_SendsCorrectData()
{
// Arrange
var dependencyTelemetry = new DependencyTelemetry
{
Name = "SQL Query",
Type = "SQL",
Target = "TestDB",
Success = true,
Duration = TimeSpan.FromMilliseconds(100),
Timestamp = DateTimeOffset.UtcNow
};
dependencyTelemetry.Properties.Add("TestId", _testId);
// Act
await _logger.LogAsync(dependencyTelemetry);
}
/// <summary>
/// This is for scope test in SeqLogger. It is not testing anything related to the TelemetryChannel which logs to Seq.
/// </summary>
/// <returns></returns>
[TestMethod]
public async Task LogRequestTelemetryInOperationHolderWithParentChild_SendsCorrectData()
{
var telemetryClient = Container.Resolve<TelemetryClient>();
using (var parent = telemetryClient.StartOperation<RequestTelemetry>("Parent First"))
{
parent.Telemetry.Duration = TimeSpan.FromMilliseconds(250);
parent.Telemetry.Url = new Uri("http://parent.test.com/api/test");
using (var child = telemetryClient.StartOperation<RequestTelemetry>("Child 1"))
{
child.Telemetry.Success = true;
child.Telemetry.ResponseCode = "200";
child.Telemetry.Duration = TimeSpan.FromMilliseconds(50);
child.Telemetry.Url = new Uri("http://child.test.com/api/test");
child.Telemetry.Timestamp = DateTimeOffset.UtcNow;
child.Telemetry.Properties.Add("httpMethod", HttpMethod.Get.ToString());
child.Telemetry.Properties.Add("TestId", _testId);
await _logger.LogAsync(child);
}
;
await _logger.LogAsync(parent);
}
}
}

View file

@ -0,0 +1,60 @@
using Autofac;
using Microsoft.ApplicationInsights;
using Microsoft.ApplicationInsights.Channel;
using Microsoft.ApplicationInsights.DataContracts;
using SWP.Core.SeqLogging;
using SWP.Core.Telemetry;
namespace SWP.Core.X.TDD.Logging;
[TestClass]
public class SeqTelemetryChannelTest : TestFixture
{
private CancellationTokenSource _cts;
private IMessageChannel<ITelemetry> _messageChannel;
private SeqBackgroundService _service;
private TelemetryClient _telemetryClient;
[TestInitialize]
public void SetupThis()
{
//it is important to use the same MessageChannel as the BackgroundService uses
//we know that IMessageChannel<ITelemetry> _messageChannel; is registered via Autofac and manually injected into SeqBackgroundService
//so we can get it by calling the Autofac Container in this test.
_messageChannel = Container.Resolve<IMessageChannel<ITelemetry>>();
_service = Container.Resolve<SeqBackgroundService>();
_telemetryClient = Container.Resolve<TelemetryClient>();
_cts = new CancellationTokenSource();
}
[TestMethod]
public async Task Messages_ShouldBeProcessedFromQueue()
{
await _service.StartAsync(_cts.Token);
for (var i = 0; i < 5; i++)
{
var eventTelemetry = new EventTelemetry
{
Name = "Test Event 3",
Timestamp = DateTimeOffset.UtcNow
};
eventTelemetry.Properties.Add("TestId", Guid.NewGuid().ToString());
eventTelemetry.Metrics.Add("TestMetric", 42.0);
//we don't write to the _messageChannel.Writer.WriteAsync(eventTelemetry);, but the TelemetryClient which is configured to use SeqTelemetryChannel
_telemetryClient.TrackEvent(eventTelemetry);
}
// wait for processing
await Task.Delay(5000);
await _service.StopAsync(CancellationToken.None);
var hasMoreMessages = await _messageChannel.Reader.WaitToReadAsync();
Assert.IsFalse(hasMoreMessages, "Queue should be empty after 5 seconds");
}
}

View file

@ -0,0 +1,86 @@
using System.Diagnostics;
using System.Text;
using Sodium;
namespace SWP.Core.X.TDD;
[TestClass]
public class PasswordHasherTests : TestFixture
{
[TestMethod]
public void MyTestMethod()
{
var stopwatch = Stopwatch.StartNew();
var salt = PasswordHash.ScryptGenerateSalt();
// 2. Konverter password til byte[]
var passwordBytes = Encoding.UTF8.GetBytes("password123");
// 3. Kald ScryptHashBinary korrekt
var hash = PasswordHash.ScryptHashBinary(
passwordBytes,
salt
);
stopwatch.Stop();
}
[TestMethod]
public void HashPassword_ShouldCreateValidHashFormat()
{
// Arrange
var password = "TestPassword123";
// Act
var hashedPassword = new SecureTokenizer().TokenizeText(password);
var parts = hashedPassword.Split('.');
// Assert
Assert.AreEqual(3, parts.Length);
Assert.AreEqual("100000", parts[0]);
}
[TestMethod]
public void VerifyPassword_WithCorrectPassword_ShouldReturnTrue()
{
// Arrange
var password = "TestPassword123";
var hashedPassword = new SecureTokenizer().TokenizeText(password);
// Act
var result = new SecureTokenizer().VerifyToken(hashedPassword, password);
// Assert
Assert.IsTrue(result);
}
[TestMethod]
public void VerifyPassword_WithWrongPassword_ShouldReturnFalse()
{
// Arrange
var correctPassword = "TestPassword123";
var wrongPassword = "WrongPassword123";
var hashedPassword = new SecureTokenizer().TokenizeText(correctPassword);
// Act
var result = new SecureTokenizer().VerifyToken(hashedPassword, wrongPassword);
// Assert
Assert.IsFalse(result);
}
[TestMethod]
public void VerifyPassword_WithInvalidHashFormat_ShouldReturnFalse()
{
// Arrange
var password = "TestPassword123";
var invalidHash = "InvalidHash";
// Act
var result = new SecureTokenizer().VerifyToken(invalidHash, password);
// Assert
Assert.IsFalse(result);
}
}

80
Tests/PostgresTests.cs Normal file
View file

@ -0,0 +1,80 @@
using Autofac;
using Insight.Database;
using Shouldly;
using SWP.Core.Database;
using SWP.Core.Database.ConnectionFactory;
namespace SWP.Core.X.TDD;
[TestClass]
public class PostgresTests : TestFixture
{
private IDbConnectionFactory _connFactory;
private IDatabaseOperations _databaseOperations;
[TestInitialize]
public void MyTestMethod()
{
_connFactory = Container.Resolve<IDbConnectionFactory>();
_databaseOperations = Container.Resolve<IDatabaseOperations>();
}
[TestMethod]
public void TestDefaultConnection()
{
//https://stackoverflow.com/questions/69169247/how-to-create-idbconnection-factory-using-autofac-for-dapper
using (var conn = _connFactory.Create())
conn.ExecuteSql("SELECT 1 as p");
}
[TestMethod]
public async Task TestScopeConnectionWithLogging()
{
using var db = _databaseOperations.CreateScope(nameof(TestScopeConnectionWithLogging));
try
{
var user = await db.Connection.QuerySqlAsync<string>(
"SELECT tablename FROM pg_tables limit 5");
}
catch (Exception ex)
{
db.Error(ex);
throw;
}
}
[TestMethod]
public async Task TestScopeConnectionWithErrorLogging()
{
using var db = _databaseOperations.CreateScope(nameof(TestScopeConnectionWithLogging));
try
{
var user = await db.Connection.QuerySqlAsync<string>(
"SELECT tablename FROM pg_tables limit 5");
}
catch (Exception ex)
{
db.Error(ex);
}
}
[TestMethod]
public async Task TestSimpleDatabaseOperation()
{
try
{
await _databaseOperations.ExecuteAsync(async connection =>
{
return await connection.QuerySqlAsync<string>(
"SELECT tablename FROM pg_tables limit 5");
}, nameof(TestSimpleDatabaseOperation));
}
catch (Exception)
{
throw;
}
}
}

View file

@ -0,0 +1,40 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net9.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<IsPackable>false</IsPackable>
<IsTestProject>true</IsTestProject>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="coverlet.collector" Version="6.0.0" />
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.8.0" />
<PackageReference Include="moq" Version="4.20.72" />
<PackageReference Include="MSTest.TestAdapter" Version="3.1.1" />
<PackageReference Include="MSTest.TestFramework" Version="3.1.1" />
<PackageReference Include="Shouldly" Version="4.3.0" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\Core\SWP.Core.csproj" />
</ItemGroup>
<ItemGroup>
<Using Include="Microsoft.VisualStudio.TestTools.UnitTesting" />
</ItemGroup>
<ItemGroup>
<None Update="appconfiguration.dev.json">
<CopyToOutputDirectory>Always</CopyToOutputDirectory>
</None>
<None Update="ConfigurationTests\appconfiguration.dev.json">
<CopyToOutputDirectory>Always</CopyToOutputDirectory>
</None>
<None Update="ConfigurationTests\appconfiguration.dev.json">
<CopyToOutputDirectory>Always</CopyToOutputDirectory>
</None>
</ItemGroup>
</Project>

View file

@ -0,0 +1 @@
namespace SWP.Core.X.TDD;

View file

@ -0,0 +1,86 @@
using Microsoft.VisualStudio.TestTools.UnitTesting;
using Shouldly;
using SWP.Core;
namespace SWP.Core.X.TDD.Security;
[TestClass]
public class SecureTokenizerTests
{
private ISecureTokenizer _tokenizer;
[TestInitialize]
public void Setup()
{
_tokenizer = new SecureTokenizer();
}
[TestMethod]
public void TokenizeText_ShouldReturnNonEmptyString()
{
// Act
var token = _tokenizer.TokenizeText("testPassword");
// Assert
token.ShouldNotBeNullOrEmpty();
}
[TestMethod]
public void TokenizeText_ShouldReturnDifferentTokensForSamePassword()
{
// Arrange
var password = "testPassword";
// Act
var token1 = _tokenizer.TokenizeText(password);
var token2 = _tokenizer.TokenizeText(password);
// Assert
token1.ShouldNotBe(token2);
}
[TestMethod]
public void VerifyToken_ShouldReturnTrueForValidPassword()
{
// Arrange
var password = "testPassword";
var token = _tokenizer.TokenizeText(password);
// Act
var result = _tokenizer.VerifyToken(token, password);
// Assert
result.ShouldBeTrue();
}
[TestMethod]
public void VerifyToken_ShouldReturnFalseForInvalidPassword()
{
// Arrange
var password = "testPassword";
var token = _tokenizer.TokenizeText(password);
// Act
var result = _tokenizer.VerifyToken(token, "wrongPassword");
// Assert
result.ShouldBeFalse();
}
[TestMethod]
public void VerifyToken_ShouldReturnFalseForMalformedToken()
{
// Act & Assert
_tokenizer.VerifyToken("invalid.token", "password").ShouldBeFalse();
_tokenizer.VerifyToken("", "password").ShouldBeFalse();
}
[TestMethod]
public void VerifyToken_ShouldHandleNullInputs()
{
// Act & Assert
Should.Throw<NullReferenceException>(() => _tokenizer.VerifyToken(null, "password"));
// Note: Current implementation doesn't handle null inputs gracefully
// This should be fixed in production code
}
}

87
Tests/TestFixture.cs Normal file
View file

@ -0,0 +1,87 @@
using System.Diagnostics;
using Autofac;
using Microsoft.ApplicationInsights;
using Microsoft.Extensions.Logging;
using SWP.Core.Configurations;
using SWP.Core.Configurations.JsonConfigProvider;
using SWP.Core.Database.ModuleRegistry;
using SWP.Core.ModuleRegistry;
using SWP.Core.SeqLogging;
namespace SWP.Core.X.TDD;
/// <summary>
/// Act as base class for tests. Avoids duplication of test setup code
/// </summary>
[TestClass]
public abstract class TestFixture
{
private readonly string _configurationFilePath;
protected TestFixture() : this(null)
{
}
public TestFixture(string configurationFilePath)
{
if (configurationFilePath is not null)
_configurationFilePath = configurationFilePath?.TrimEnd('/') + "/";
CreateContainerBuilder();
Container = ContainerBuilder.Build();
}
protected IContainer Container { get; private set; }
protected ContainerBuilder ContainerBuilder { get; private set; }
public virtual IConfigurationRoot Configuration()
{
var configuration = new ConfigurationBuilder()
.AddJsonFile($"{_configurationFilePath}appconfiguration.dev.json")
.Build();
return configuration;
}
protected virtual void CreateContainerBuilder()
{
var configuration = Configuration();
var builder = new ContainerBuilder();
builder.RegisterGeneric(typeof(Logger<>))
.As(typeof(ILogger<>))
.SingleInstance();
builder.RegisterModule(new DbPostgreSqlModule
{
ConnectionString = configuration.GetConnectionString("DefaultConnection")
});
builder.RegisterModule(new TelemetryModule
{
TelemetryConfig = configuration.GetSection("ApplicationInsights").ToObject<TelemetryConfig>()
});
builder.RegisterModule(new SeqLoggingModule
{
SeqConfiguration = configuration.GetSection("SeqConfiguration").ToObject<SeqConfiguration>()
});
builder.RegisterModule<SecurityModule>();
ContainerBuilder = builder;
}
[TestCleanup]
public void CleanUp()
{
Trace.Flush();
var telemetryClient = Container.Resolve<TelemetryClient>();
telemetryClient.Flush();
if (Container is null) return;
Container.Dispose();
Container = null;
}
}

View file

@ -0,0 +1,33 @@
using Microsoft.Extensions.Configuration;
using SWP.Core.Entities.Users;
namespace SWP.Core.X.TDD.TestHelpers;
public static class TestDataBuilder
{
public static class Users
{
public static User CreateTestUser(string email = null)
{
return new User
{
Id = new Random().Next(1, 1000),
Email = email ?? $"test{Guid.NewGuid()}@example.com",
EmailConfirmed = false,
CreatedDate = DateTime.UtcNow
};
}
}
public static class Configuration
{
public static Dictionary<string, string> CreateTestConfiguration()
{
return new Dictionary<string, string>
{
["Database:ConnectionString"] = "Host=localhost;Database=test",
["Logging:Level"] = "Debug"
};
}
}
}

View file

@ -0,0 +1,14 @@
{
"ConnectionStrings": {
"DefaultConnection": "Host=192.168.1.57;Port=5432;Database=ptmain;User Id=sathumper;Password=3911;"
},
"ApplicationInsights": {
"ConnectionString": "InstrumentationKey=07d2a2b9-5e8e-4924-836e-264f8438f6c5;IngestionEndpoint=https://northeurope-2.in.applicationinsights.azure.com/;LiveEndpoint=https://northeurope.livediagnostics.monitor.azure.com/;ApplicationId=56748c39-2fa3-4880-a1e2-24068e791548",
"UseSeqLoggingTelemetryChannel": true
},
"SeqConfiguration": {
"IngestionEndpoint": "http://localhost:5341",
"ApiKey": null,
"Environment": "MSTEST"
}
}

39
build.sh Normal file
View file

@ -0,0 +1,39 @@
#!/bin/bash
# Build script for SWP.Core
# This script assumes .NET 9.0 SDK is installed
set -e
echo "Building SWP.Core Solution..."
# Check if dotnet is available
if ! command -v dotnet &> /dev/null; then
echo "Error: .NET SDK is not installed or not in PATH"
echo "Please install .NET 9.0 SDK from https://dotnet.microsoft.com/download"
exit 1
fi
# Check .NET version
DOTNET_VERSION=$(dotnet --version)
echo "Using .NET version: $DOTNET_VERSION"
# Restore packages
echo "Restoring NuGet packages..."
dotnet restore
# Build solution
echo "Building solution in Release mode..."
dotnet build --configuration Release --no-restore
# Run unit tests
echo "Running unit tests..."
dotnet test --configuration Release --no-build --filter "TestCategory!=Integration" --logger "console;verbosity=normal"
# Run integration tests (if database is available)
if [ "$RUN_INTEGRATION_TESTS" = "true" ]; then
echo "Running integration tests..."
dotnet test --configuration Release --no-build --filter "TestCategory=Integration" --logger "console;verbosity=normal"
fi
echo "Build completed successfully!"